fixes and modelgen insert
This commit is contained in:
10
api/main.py
10
api/main.py
@@ -39,11 +39,11 @@ app.add_middleware(
|
|||||||
allow_headers=["*"],
|
allow_headers=["*"],
|
||||||
)
|
)
|
||||||
|
|
||||||
# Routes
|
# Routes - all under /api prefix
|
||||||
app.include_router(system_router)
|
app.include_router(system_router, prefix="/api")
|
||||||
app.include_router(assets_router)
|
app.include_router(assets_router, prefix="/api")
|
||||||
app.include_router(presets_router)
|
app.include_router(presets_router, prefix="/api")
|
||||||
app.include_router(jobs_router)
|
app.include_router(jobs_router, prefix="/api")
|
||||||
|
|
||||||
|
|
||||||
@app.get("/")
|
@app.get("/")
|
||||||
|
|||||||
@@ -15,6 +15,27 @@ def health_check():
|
|||||||
return {"status": "healthy"}
|
return {"status": "healthy"}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/status")
|
||||||
|
def system_status():
|
||||||
|
"""System status for UI."""
|
||||||
|
return {"status": "ok", "version": "0.1.0"}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/worker")
|
||||||
|
def worker_status():
|
||||||
|
"""Worker status from gRPC."""
|
||||||
|
try:
|
||||||
|
from mpr.grpc.client import get_client
|
||||||
|
|
||||||
|
client = get_client()
|
||||||
|
status = client.get_worker_status()
|
||||||
|
if status:
|
||||||
|
return status
|
||||||
|
return {"available": False, "error": "No response from worker"}
|
||||||
|
except Exception as e:
|
||||||
|
return {"available": False, "error": str(e)}
|
||||||
|
|
||||||
|
|
||||||
@router.get("/ffmpeg/codecs")
|
@router.get("/ffmpeg/codecs")
|
||||||
def ffmpeg_codecs():
|
def ffmpeg_codecs():
|
||||||
"""Get available FFmpeg encoders and decoders."""
|
"""Get available FFmpeg encoders and decoders."""
|
||||||
|
|||||||
89
api/schemas/models.py
Normal file
89
api/schemas/models.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
"""
|
||||||
|
Pydantic Models - GENERATED FILE
|
||||||
|
|
||||||
|
Do not edit directly. Regenerate using modelgen.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
class AssetStatus(str, Enum):
|
||||||
|
PENDING = "pending"
|
||||||
|
READY = "ready"
|
||||||
|
ERROR = "error"
|
||||||
|
|
||||||
|
class JobStatus(str, Enum):
|
||||||
|
PENDING = "pending"
|
||||||
|
PROCESSING = "processing"
|
||||||
|
COMPLETED = "completed"
|
||||||
|
FAILED = "failed"
|
||||||
|
CANCELLED = "cancelled"
|
||||||
|
|
||||||
|
class MediaAsset(BaseModel):
|
||||||
|
"""A video/audio file registered in the system."""
|
||||||
|
id: UUID
|
||||||
|
filename: str
|
||||||
|
file_path: str
|
||||||
|
status: AssetStatus = "AssetStatus.PENDING"
|
||||||
|
error_message: Optional[str] = None
|
||||||
|
file_size: Optional[int] = None
|
||||||
|
duration: Optional[float] = None
|
||||||
|
video_codec: Optional[str] = None
|
||||||
|
audio_codec: Optional[str] = None
|
||||||
|
width: Optional[int] = None
|
||||||
|
height: Optional[int] = None
|
||||||
|
framerate: Optional[float] = None
|
||||||
|
bitrate: Optional[int] = None
|
||||||
|
properties: Dict[str, Any]
|
||||||
|
comments: str = ""
|
||||||
|
tags: List[str] = Field(default_factory=list)
|
||||||
|
created_at: Optional[datetime] = None
|
||||||
|
updated_at: Optional[datetime] = None
|
||||||
|
|
||||||
|
class TranscodePreset(BaseModel):
|
||||||
|
"""A reusable transcoding configuration (like Handbrake presets)."""
|
||||||
|
id: UUID
|
||||||
|
name: str
|
||||||
|
description: str = ""
|
||||||
|
is_builtin: bool = False
|
||||||
|
container: str = "mp4"
|
||||||
|
video_codec: str = "libx264"
|
||||||
|
video_bitrate: Optional[str] = None
|
||||||
|
video_crf: Optional[int] = None
|
||||||
|
video_preset: Optional[str] = None
|
||||||
|
resolution: Optional[str] = None
|
||||||
|
framerate: Optional[float] = None
|
||||||
|
audio_codec: str = "aac"
|
||||||
|
audio_bitrate: Optional[str] = None
|
||||||
|
audio_channels: Optional[int] = None
|
||||||
|
audio_samplerate: Optional[int] = None
|
||||||
|
extra_args: List[str] = Field(default_factory=list)
|
||||||
|
created_at: Optional[datetime] = None
|
||||||
|
updated_at: Optional[datetime] = None
|
||||||
|
|
||||||
|
class TranscodeJob(BaseModel):
|
||||||
|
"""A transcoding or trimming job in the queue."""
|
||||||
|
id: UUID
|
||||||
|
source_asset_id: UUID
|
||||||
|
preset_id: Optional[UUID] = None
|
||||||
|
preset_snapshot: Dict[str, Any]
|
||||||
|
trim_start: Optional[float] = None
|
||||||
|
trim_end: Optional[float] = None
|
||||||
|
output_filename: str = ""
|
||||||
|
output_path: Optional[str] = None
|
||||||
|
output_asset_id: Optional[UUID] = None
|
||||||
|
status: JobStatus = "JobStatus.PENDING"
|
||||||
|
progress: float = 0.0
|
||||||
|
current_frame: Optional[int] = None
|
||||||
|
current_time: Optional[float] = None
|
||||||
|
speed: Optional[str] = None
|
||||||
|
error_message: Optional[str] = None
|
||||||
|
celery_task_id: Optional[str] = None
|
||||||
|
priority: int = 0
|
||||||
|
created_at: Optional[datetime] = None
|
||||||
|
started_at: Optional[datetime] = None
|
||||||
|
completed_at: Optional[datetime] = None
|
||||||
@@ -23,7 +23,7 @@ services:
|
|||||||
POSTGRES_USER: mpr_user
|
POSTGRES_USER: mpr_user
|
||||||
POSTGRES_PASSWORD: mpr_pass
|
POSTGRES_PASSWORD: mpr_pass
|
||||||
ports:
|
ports:
|
||||||
- "5435:5432"
|
- "5436:5432"
|
||||||
volumes:
|
volumes:
|
||||||
- postgres-data:/var/lib/postgresql/data
|
- postgres-data:/var/lib/postgresql/data
|
||||||
healthcheck:
|
healthcheck:
|
||||||
@@ -33,7 +33,7 @@ services:
|
|||||||
redis:
|
redis:
|
||||||
image: redis:7-alpine
|
image: redis:7-alpine
|
||||||
ports:
|
ports:
|
||||||
- "6380:6379"
|
- "6381:6379"
|
||||||
volumes:
|
volumes:
|
||||||
- redis-data:/data
|
- redis-data:/data
|
||||||
healthcheck:
|
healthcheck:
|
||||||
@@ -101,7 +101,7 @@ services:
|
|||||||
dockerfile: ctrl/Dockerfile
|
dockerfile: ctrl/Dockerfile
|
||||||
command: python -m mpr.grpc.server
|
command: python -m mpr.grpc.server
|
||||||
ports:
|
ports:
|
||||||
- "50051:50051"
|
- "50052:50051"
|
||||||
environment:
|
environment:
|
||||||
<<: *common-env
|
<<: *common-env
|
||||||
GRPC_PORT: 50051
|
GRPC_PORT: 50051
|
||||||
|
|||||||
46
ctrl/generate.sh
Executable file
46
ctrl/generate.sh
Executable file
@@ -0,0 +1,46 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Model generation script for MPR
|
||||||
|
# Generates Django, Pydantic, TypeScript, and Protobuf from schema/models
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
cd "$(dirname "$0")/.."
|
||||||
|
|
||||||
|
echo "Generating models from schema/models..."
|
||||||
|
|
||||||
|
# Django ORM models
|
||||||
|
python -m tools.modelgen from-schema \
|
||||||
|
--schema schema/models \
|
||||||
|
--output mpr/media_assets/models.py \
|
||||||
|
--targets django
|
||||||
|
|
||||||
|
# Pydantic schemas for FastAPI
|
||||||
|
python -m tools.modelgen from-schema \
|
||||||
|
--schema schema/models \
|
||||||
|
--output api/schemas/models.py \
|
||||||
|
--targets pydantic
|
||||||
|
|
||||||
|
# TypeScript types for Timeline UI
|
||||||
|
python -m tools.modelgen from-schema \
|
||||||
|
--schema schema/models \
|
||||||
|
--output ui/timeline/src/types.ts \
|
||||||
|
--targets typescript
|
||||||
|
|
||||||
|
# Protobuf for gRPC
|
||||||
|
python -m tools.modelgen from-schema \
|
||||||
|
--schema schema/models \
|
||||||
|
--output mpr/grpc/protos/worker.proto \
|
||||||
|
--targets proto
|
||||||
|
|
||||||
|
# Generate gRPC stubs from proto
|
||||||
|
echo "Generating gRPC stubs..."
|
||||||
|
python -m grpc_tools.protoc \
|
||||||
|
-I mpr/grpc/protos \
|
||||||
|
--python_out=mpr/grpc \
|
||||||
|
--grpc_python_out=mpr/grpc \
|
||||||
|
mpr/grpc/protos/worker.proto
|
||||||
|
|
||||||
|
# Fix relative import in generated grpc stub
|
||||||
|
sed -i 's/^import worker_pb2/from . import worker_pb2/' mpr/grpc/worker_pb2_grpc.py
|
||||||
|
|
||||||
|
echo "Done!"
|
||||||
@@ -41,7 +41,7 @@ http {
|
|||||||
}
|
}
|
||||||
|
|
||||||
# FastAPI
|
# FastAPI
|
||||||
location /api {
|
location /api/ {
|
||||||
proxy_pass http://fastapi;
|
proxy_pass http://fastapi;
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
// MPR Worker Service - GENERATED FILE
|
// Protocol Buffer Definitions - GENERATED FILE
|
||||||
//
|
//
|
||||||
// Do not edit directly. Modify schema/models/grpc.py and run:
|
// Do not edit directly. Regenerate using modelgen.
|
||||||
// python schema/generate.py --proto
|
|
||||||
|
|
||||||
syntax = "proto3";
|
syntax = "proto3";
|
||||||
|
|
||||||
|
|||||||
@@ -1,32 +1,27 @@
|
|||||||
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
|
||||||
"""Client and server classes corresponding to protobuf-defined services."""
|
"""Client and server classes corresponding to protobuf-defined services."""
|
||||||
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
import grpc
|
import grpc
|
||||||
|
import warnings
|
||||||
|
|
||||||
from . import worker_pb2 as worker__pb2
|
from . import worker_pb2 as worker__pb2
|
||||||
|
|
||||||
GRPC_GENERATED_VERSION = "1.76.0"
|
GRPC_GENERATED_VERSION = '1.76.0'
|
||||||
GRPC_VERSION = grpc.__version__
|
GRPC_VERSION = grpc.__version__
|
||||||
_version_not_supported = False
|
_version_not_supported = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from grpc._utilities import first_version_is_lower
|
from grpc._utilities import first_version_is_lower
|
||||||
|
_version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
|
||||||
_version_not_supported = first_version_is_lower(
|
|
||||||
GRPC_VERSION, GRPC_GENERATED_VERSION
|
|
||||||
)
|
|
||||||
except ImportError:
|
except ImportError:
|
||||||
_version_not_supported = True
|
_version_not_supported = True
|
||||||
|
|
||||||
if _version_not_supported:
|
if _version_not_supported:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
f"The grpc package installed is at version {GRPC_VERSION},"
|
f'The grpc package installed is at version {GRPC_VERSION},'
|
||||||
+ " but the generated code in worker_pb2_grpc.py depends on"
|
+ ' but the generated code in worker_pb2_grpc.py depends on'
|
||||||
+ f" grpcio>={GRPC_GENERATED_VERSION}."
|
+ f' grpcio>={GRPC_GENERATED_VERSION}.'
|
||||||
+ f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}"
|
+ f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
|
||||||
+ f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}."
|
+ f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -40,29 +35,25 @@ class WorkerServiceStub(object):
|
|||||||
channel: A grpc.Channel.
|
channel: A grpc.Channel.
|
||||||
"""
|
"""
|
||||||
self.SubmitJob = channel.unary_unary(
|
self.SubmitJob = channel.unary_unary(
|
||||||
"/mpr.worker.WorkerService/SubmitJob",
|
'/mpr.worker.WorkerService/SubmitJob',
|
||||||
request_serializer=worker__pb2.JobRequest.SerializeToString,
|
request_serializer=worker__pb2.JobRequest.SerializeToString,
|
||||||
response_deserializer=worker__pb2.JobResponse.FromString,
|
response_deserializer=worker__pb2.JobResponse.FromString,
|
||||||
_registered_method=True,
|
_registered_method=True)
|
||||||
)
|
|
||||||
self.StreamProgress = channel.unary_stream(
|
self.StreamProgress = channel.unary_stream(
|
||||||
"/mpr.worker.WorkerService/StreamProgress",
|
'/mpr.worker.WorkerService/StreamProgress',
|
||||||
request_serializer=worker__pb2.ProgressRequest.SerializeToString,
|
request_serializer=worker__pb2.ProgressRequest.SerializeToString,
|
||||||
response_deserializer=worker__pb2.ProgressUpdate.FromString,
|
response_deserializer=worker__pb2.ProgressUpdate.FromString,
|
||||||
_registered_method=True,
|
_registered_method=True)
|
||||||
)
|
|
||||||
self.CancelJob = channel.unary_unary(
|
self.CancelJob = channel.unary_unary(
|
||||||
"/mpr.worker.WorkerService/CancelJob",
|
'/mpr.worker.WorkerService/CancelJob',
|
||||||
request_serializer=worker__pb2.CancelRequest.SerializeToString,
|
request_serializer=worker__pb2.CancelRequest.SerializeToString,
|
||||||
response_deserializer=worker__pb2.CancelResponse.FromString,
|
response_deserializer=worker__pb2.CancelResponse.FromString,
|
||||||
_registered_method=True,
|
_registered_method=True)
|
||||||
)
|
|
||||||
self.GetWorkerStatus = channel.unary_unary(
|
self.GetWorkerStatus = channel.unary_unary(
|
||||||
"/mpr.worker.WorkerService/GetWorkerStatus",
|
'/mpr.worker.WorkerService/GetWorkerStatus',
|
||||||
request_serializer=worker__pb2.Empty.SerializeToString,
|
request_serializer=worker__pb2.Empty.SerializeToString,
|
||||||
response_deserializer=worker__pb2.WorkerStatus.FromString,
|
response_deserializer=worker__pb2.WorkerStatus.FromString,
|
||||||
_registered_method=True,
|
_registered_method=True)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class WorkerServiceServicer(object):
|
class WorkerServiceServicer(object):
|
||||||
@@ -71,81 +62,76 @@ class WorkerServiceServicer(object):
|
|||||||
def SubmitJob(self, request, context):
|
def SubmitJob(self, request, context):
|
||||||
"""Missing associated documentation comment in .proto file."""
|
"""Missing associated documentation comment in .proto file."""
|
||||||
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
||||||
context.set_details("Method not implemented!")
|
context.set_details('Method not implemented!')
|
||||||
raise NotImplementedError("Method not implemented!")
|
raise NotImplementedError('Method not implemented!')
|
||||||
|
|
||||||
def StreamProgress(self, request, context):
|
def StreamProgress(self, request, context):
|
||||||
"""Missing associated documentation comment in .proto file."""
|
"""Missing associated documentation comment in .proto file."""
|
||||||
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
||||||
context.set_details("Method not implemented!")
|
context.set_details('Method not implemented!')
|
||||||
raise NotImplementedError("Method not implemented!")
|
raise NotImplementedError('Method not implemented!')
|
||||||
|
|
||||||
def CancelJob(self, request, context):
|
def CancelJob(self, request, context):
|
||||||
"""Missing associated documentation comment in .proto file."""
|
"""Missing associated documentation comment in .proto file."""
|
||||||
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
||||||
context.set_details("Method not implemented!")
|
context.set_details('Method not implemented!')
|
||||||
raise NotImplementedError("Method not implemented!")
|
raise NotImplementedError('Method not implemented!')
|
||||||
|
|
||||||
def GetWorkerStatus(self, request, context):
|
def GetWorkerStatus(self, request, context):
|
||||||
"""Missing associated documentation comment in .proto file."""
|
"""Missing associated documentation comment in .proto file."""
|
||||||
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
||||||
context.set_details("Method not implemented!")
|
context.set_details('Method not implemented!')
|
||||||
raise NotImplementedError("Method not implemented!")
|
raise NotImplementedError('Method not implemented!')
|
||||||
|
|
||||||
|
|
||||||
def add_WorkerServiceServicer_to_server(servicer, server):
|
def add_WorkerServiceServicer_to_server(servicer, server):
|
||||||
rpc_method_handlers = {
|
rpc_method_handlers = {
|
||||||
"SubmitJob": grpc.unary_unary_rpc_method_handler(
|
'SubmitJob': grpc.unary_unary_rpc_method_handler(
|
||||||
servicer.SubmitJob,
|
servicer.SubmitJob,
|
||||||
request_deserializer=worker__pb2.JobRequest.FromString,
|
request_deserializer=worker__pb2.JobRequest.FromString,
|
||||||
response_serializer=worker__pb2.JobResponse.SerializeToString,
|
response_serializer=worker__pb2.JobResponse.SerializeToString,
|
||||||
),
|
),
|
||||||
"StreamProgress": grpc.unary_stream_rpc_method_handler(
|
'StreamProgress': grpc.unary_stream_rpc_method_handler(
|
||||||
servicer.StreamProgress,
|
servicer.StreamProgress,
|
||||||
request_deserializer=worker__pb2.ProgressRequest.FromString,
|
request_deserializer=worker__pb2.ProgressRequest.FromString,
|
||||||
response_serializer=worker__pb2.ProgressUpdate.SerializeToString,
|
response_serializer=worker__pb2.ProgressUpdate.SerializeToString,
|
||||||
),
|
),
|
||||||
"CancelJob": grpc.unary_unary_rpc_method_handler(
|
'CancelJob': grpc.unary_unary_rpc_method_handler(
|
||||||
servicer.CancelJob,
|
servicer.CancelJob,
|
||||||
request_deserializer=worker__pb2.CancelRequest.FromString,
|
request_deserializer=worker__pb2.CancelRequest.FromString,
|
||||||
response_serializer=worker__pb2.CancelResponse.SerializeToString,
|
response_serializer=worker__pb2.CancelResponse.SerializeToString,
|
||||||
),
|
),
|
||||||
"GetWorkerStatus": grpc.unary_unary_rpc_method_handler(
|
'GetWorkerStatus': grpc.unary_unary_rpc_method_handler(
|
||||||
servicer.GetWorkerStatus,
|
servicer.GetWorkerStatus,
|
||||||
request_deserializer=worker__pb2.Empty.FromString,
|
request_deserializer=worker__pb2.Empty.FromString,
|
||||||
response_serializer=worker__pb2.WorkerStatus.SerializeToString,
|
response_serializer=worker__pb2.WorkerStatus.SerializeToString,
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
generic_handler = grpc.method_handlers_generic_handler(
|
generic_handler = grpc.method_handlers_generic_handler(
|
||||||
"mpr.worker.WorkerService", rpc_method_handlers
|
'mpr.worker.WorkerService', rpc_method_handlers)
|
||||||
)
|
|
||||||
server.add_generic_rpc_handlers((generic_handler,))
|
server.add_generic_rpc_handlers((generic_handler,))
|
||||||
server.add_registered_method_handlers(
|
server.add_registered_method_handlers('mpr.worker.WorkerService', rpc_method_handlers)
|
||||||
"mpr.worker.WorkerService", rpc_method_handlers
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# This class is part of an EXPERIMENTAL API.
|
# This class is part of an EXPERIMENTAL API.
|
||||||
class WorkerService(object):
|
class WorkerService(object):
|
||||||
"""Missing associated documentation comment in .proto file."""
|
"""Missing associated documentation comment in .proto file."""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def SubmitJob(
|
def SubmitJob(request,
|
||||||
request,
|
target,
|
||||||
target,
|
options=(),
|
||||||
options=(),
|
channel_credentials=None,
|
||||||
channel_credentials=None,
|
call_credentials=None,
|
||||||
call_credentials=None,
|
insecure=False,
|
||||||
insecure=False,
|
compression=None,
|
||||||
compression=None,
|
wait_for_ready=None,
|
||||||
wait_for_ready=None,
|
timeout=None,
|
||||||
timeout=None,
|
metadata=None):
|
||||||
metadata=None,
|
|
||||||
):
|
|
||||||
return grpc.experimental.unary_unary(
|
return grpc.experimental.unary_unary(
|
||||||
request,
|
request,
|
||||||
target,
|
target,
|
||||||
"/mpr.worker.WorkerService/SubmitJob",
|
'/mpr.worker.WorkerService/SubmitJob',
|
||||||
worker__pb2.JobRequest.SerializeToString,
|
worker__pb2.JobRequest.SerializeToString,
|
||||||
worker__pb2.JobResponse.FromString,
|
worker__pb2.JobResponse.FromString,
|
||||||
options,
|
options,
|
||||||
@@ -156,26 +142,23 @@ class WorkerService(object):
|
|||||||
wait_for_ready,
|
wait_for_ready,
|
||||||
timeout,
|
timeout,
|
||||||
metadata,
|
metadata,
|
||||||
_registered_method=True,
|
_registered_method=True)
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def StreamProgress(
|
def StreamProgress(request,
|
||||||
request,
|
target,
|
||||||
target,
|
options=(),
|
||||||
options=(),
|
channel_credentials=None,
|
||||||
channel_credentials=None,
|
call_credentials=None,
|
||||||
call_credentials=None,
|
insecure=False,
|
||||||
insecure=False,
|
compression=None,
|
||||||
compression=None,
|
wait_for_ready=None,
|
||||||
wait_for_ready=None,
|
timeout=None,
|
||||||
timeout=None,
|
metadata=None):
|
||||||
metadata=None,
|
|
||||||
):
|
|
||||||
return grpc.experimental.unary_stream(
|
return grpc.experimental.unary_stream(
|
||||||
request,
|
request,
|
||||||
target,
|
target,
|
||||||
"/mpr.worker.WorkerService/StreamProgress",
|
'/mpr.worker.WorkerService/StreamProgress',
|
||||||
worker__pb2.ProgressRequest.SerializeToString,
|
worker__pb2.ProgressRequest.SerializeToString,
|
||||||
worker__pb2.ProgressUpdate.FromString,
|
worker__pb2.ProgressUpdate.FromString,
|
||||||
options,
|
options,
|
||||||
@@ -186,26 +169,23 @@ class WorkerService(object):
|
|||||||
wait_for_ready,
|
wait_for_ready,
|
||||||
timeout,
|
timeout,
|
||||||
metadata,
|
metadata,
|
||||||
_registered_method=True,
|
_registered_method=True)
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def CancelJob(
|
def CancelJob(request,
|
||||||
request,
|
target,
|
||||||
target,
|
options=(),
|
||||||
options=(),
|
channel_credentials=None,
|
||||||
channel_credentials=None,
|
call_credentials=None,
|
||||||
call_credentials=None,
|
insecure=False,
|
||||||
insecure=False,
|
compression=None,
|
||||||
compression=None,
|
wait_for_ready=None,
|
||||||
wait_for_ready=None,
|
timeout=None,
|
||||||
timeout=None,
|
metadata=None):
|
||||||
metadata=None,
|
|
||||||
):
|
|
||||||
return grpc.experimental.unary_unary(
|
return grpc.experimental.unary_unary(
|
||||||
request,
|
request,
|
||||||
target,
|
target,
|
||||||
"/mpr.worker.WorkerService/CancelJob",
|
'/mpr.worker.WorkerService/CancelJob',
|
||||||
worker__pb2.CancelRequest.SerializeToString,
|
worker__pb2.CancelRequest.SerializeToString,
|
||||||
worker__pb2.CancelResponse.FromString,
|
worker__pb2.CancelResponse.FromString,
|
||||||
options,
|
options,
|
||||||
@@ -216,26 +196,23 @@ class WorkerService(object):
|
|||||||
wait_for_ready,
|
wait_for_ready,
|
||||||
timeout,
|
timeout,
|
||||||
metadata,
|
metadata,
|
||||||
_registered_method=True,
|
_registered_method=True)
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def GetWorkerStatus(
|
def GetWorkerStatus(request,
|
||||||
request,
|
target,
|
||||||
target,
|
options=(),
|
||||||
options=(),
|
channel_credentials=None,
|
||||||
channel_credentials=None,
|
call_credentials=None,
|
||||||
call_credentials=None,
|
insecure=False,
|
||||||
insecure=False,
|
compression=None,
|
||||||
compression=None,
|
wait_for_ready=None,
|
||||||
wait_for_ready=None,
|
timeout=None,
|
||||||
timeout=None,
|
metadata=None):
|
||||||
metadata=None,
|
|
||||||
):
|
|
||||||
return grpc.experimental.unary_unary(
|
return grpc.experimental.unary_unary(
|
||||||
request,
|
request,
|
||||||
target,
|
target,
|
||||||
"/mpr.worker.WorkerService/GetWorkerStatus",
|
'/mpr.worker.WorkerService/GetWorkerStatus',
|
||||||
worker__pb2.Empty.SerializeToString,
|
worker__pb2.Empty.SerializeToString,
|
||||||
worker__pb2.WorkerStatus.FromString,
|
worker__pb2.WorkerStatus.FromString,
|
||||||
options,
|
options,
|
||||||
@@ -246,5 +223,4 @@ class WorkerService(object):
|
|||||||
wait_for_ready,
|
wait_for_ready,
|
||||||
timeout,
|
timeout,
|
||||||
metadata,
|
metadata,
|
||||||
_registered_method=True,
|
_registered_method=True)
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,21 +1,27 @@
|
|||||||
"""
|
"""
|
||||||
Django ORM Models - GENERATED FILE
|
Django ORM Models - GENERATED FILE
|
||||||
|
|
||||||
Do not edit directly. Modify schema/models/*.py and run:
|
Do not edit directly. Regenerate using modelgen.
|
||||||
python schema/generate.py --django
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import uuid
|
import uuid
|
||||||
from django.db import models
|
from django.db import models
|
||||||
|
|
||||||
|
class AssetStatus(models.TextChoices):
|
||||||
|
PENDING = "pending", "Pending"
|
||||||
|
READY = "ready", "Ready"
|
||||||
|
ERROR = "error", "Error"
|
||||||
|
|
||||||
|
class JobStatus(models.TextChoices):
|
||||||
|
PENDING = "pending", "Pending"
|
||||||
|
PROCESSING = "processing", "Processing"
|
||||||
|
COMPLETED = "completed", "Completed"
|
||||||
|
FAILED = "failed", "Failed"
|
||||||
|
CANCELLED = "cancelled", "Cancelled"
|
||||||
|
|
||||||
class MediaAsset(models.Model):
|
class MediaAsset(models.Model):
|
||||||
"""A video/audio file registered in the system."""
|
"""A video/audio file registered in the system."""
|
||||||
|
|
||||||
class Status(models.TextChoices):
|
|
||||||
PENDING = "pending", "Pending"
|
|
||||||
READY = "ready", "Ready"
|
|
||||||
ERROR = "error", "Error"
|
|
||||||
|
|
||||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||||
filename = models.CharField(max_length=500)
|
filename = models.CharField(max_length=500)
|
||||||
file_path = models.CharField(max_length=1000)
|
file_path = models.CharField(max_length=1000)
|
||||||
@@ -74,13 +80,6 @@ class TranscodePreset(models.Model):
|
|||||||
class TranscodeJob(models.Model):
|
class TranscodeJob(models.Model):
|
||||||
"""A transcoding or trimming job in the queue."""
|
"""A transcoding or trimming job in the queue."""
|
||||||
|
|
||||||
class Status(models.TextChoices):
|
|
||||||
PENDING = "pending", "Pending"
|
|
||||||
PROCESSING = "processing", "Processing"
|
|
||||||
COMPLETED = "completed", "Completed"
|
|
||||||
FAILED = "failed", "Failed"
|
|
||||||
CANCELLED = "cancelled", "Cancelled"
|
|
||||||
|
|
||||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||||
source_asset_id = models.UUIDField()
|
source_asset_id = models.UUIDField()
|
||||||
preset_id = models.UUIDField(null=True, blank=True)
|
preset_id = models.UUIDField(null=True, blank=True)
|
||||||
|
|||||||
41
tools/__init__.py
Normal file
41
tools/__init__.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
"""
|
||||||
|
Modelgen - Generic Model Generation Tool
|
||||||
|
|
||||||
|
Generates typed models from various sources to various output formats.
|
||||||
|
|
||||||
|
Input sources:
|
||||||
|
- Configuration files (soleprint config.json style)
|
||||||
|
- Python dataclasses in schema/ folder
|
||||||
|
- Existing codebases: Django, SQLAlchemy, Prisma (for extraction)
|
||||||
|
|
||||||
|
Output formats:
|
||||||
|
- pydantic: Pydantic BaseModel classes
|
||||||
|
- django: Django ORM models
|
||||||
|
- typescript: TypeScript interfaces
|
||||||
|
- protobuf: Protocol Buffer definitions
|
||||||
|
- prisma: Prisma schema
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python -m soleprint.station.tools.modelgen from-config -c config.json -o models.py
|
||||||
|
python -m soleprint.station.tools.modelgen from-schema -o models/ --targets pydantic,typescript
|
||||||
|
python -m soleprint.station.tools.modelgen extract --source /path/to/django --targets pydantic
|
||||||
|
python -m soleprint.station.tools.modelgen list-formats
|
||||||
|
"""
|
||||||
|
|
||||||
|
__version__ = "0.2.0"
|
||||||
|
|
||||||
|
from .generator import GENERATORS, BaseGenerator
|
||||||
|
from .loader import ConfigLoader, load_config
|
||||||
|
from .model_generator import ModelGenerator
|
||||||
|
|
||||||
|
# Backwards compatibility
|
||||||
|
WRITERS = GENERATORS
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"ModelGenerator",
|
||||||
|
"ConfigLoader",
|
||||||
|
"load_config",
|
||||||
|
"GENERATORS",
|
||||||
|
"WRITERS",
|
||||||
|
"BaseGenerator",
|
||||||
|
]
|
||||||
290
tools/__main__.py
Normal file
290
tools/__main__.py
Normal file
@@ -0,0 +1,290 @@
|
|||||||
|
"""
|
||||||
|
Modelgen - Generic Model Generation Tool
|
||||||
|
|
||||||
|
Generates typed models from various sources to various formats.
|
||||||
|
|
||||||
|
Input sources:
|
||||||
|
- from-config: Configuration files (soleprint config.json style)
|
||||||
|
- from-schema: Python dataclasses in schema/ folder
|
||||||
|
- extract: Existing codebases (Django, SQLAlchemy, Prisma)
|
||||||
|
|
||||||
|
Output formats:
|
||||||
|
- pydantic: Pydantic BaseModel classes
|
||||||
|
- django: Django ORM models
|
||||||
|
- typescript: TypeScript interfaces
|
||||||
|
- protobuf: Protocol Buffer definitions
|
||||||
|
- prisma: Prisma schema
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python -m soleprint.station.tools.modelgen --help
|
||||||
|
python -m soleprint.station.tools.modelgen from-config -c config.json -o models.py
|
||||||
|
python -m soleprint.station.tools.modelgen from-schema -o models/ --targets pydantic,typescript
|
||||||
|
python -m soleprint.station.tools.modelgen extract --source /path/to/django --targets pydantic
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from .generator import GENERATORS
|
||||||
|
|
||||||
|
|
||||||
|
def cmd_from_config(args):
|
||||||
|
"""Generate models from a configuration file (soleprint config.json style)."""
|
||||||
|
from .loader import load_config
|
||||||
|
from .model_generator import ModelGenerator
|
||||||
|
|
||||||
|
config_path = Path(args.config)
|
||||||
|
if not config_path.exists():
|
||||||
|
print(f"Error: Config file not found: {config_path}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
output_path = Path(args.output)
|
||||||
|
|
||||||
|
print(f"Loading config: {config_path}")
|
||||||
|
config = load_config(config_path)
|
||||||
|
|
||||||
|
print(f"Generating {args.format} models to: {output_path}")
|
||||||
|
generator = ModelGenerator(
|
||||||
|
config=config,
|
||||||
|
output_path=output_path,
|
||||||
|
output_format=args.format,
|
||||||
|
)
|
||||||
|
result_path = generator.generate()
|
||||||
|
|
||||||
|
print(f"Models generated: {result_path}")
|
||||||
|
|
||||||
|
|
||||||
|
def cmd_from_schema(args):
|
||||||
|
"""Generate models from Python dataclasses in schema/ folder."""
|
||||||
|
from .loader import load_schema
|
||||||
|
from .writer import write_file
|
||||||
|
|
||||||
|
# Determine schema path
|
||||||
|
schema_path = Path(args.schema) if args.schema else Path.cwd() / "schema"
|
||||||
|
|
||||||
|
if not schema_path.exists():
|
||||||
|
print(f"Error: Schema folder not found: {schema_path}", file=sys.stderr)
|
||||||
|
print(
|
||||||
|
"Create a schema/ folder with Python dataclasses and an __init__.py",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
print("that exports DATACLASSES and ENUMS lists.", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print(f"Loading schema: {schema_path}")
|
||||||
|
schema = load_schema(schema_path)
|
||||||
|
|
||||||
|
print(f"Found {len(schema.models)} models, {len(schema.enums)} enums")
|
||||||
|
|
||||||
|
# Parse targets
|
||||||
|
targets = [t.strip() for t in args.targets.split(",")]
|
||||||
|
output_dir = Path(args.output)
|
||||||
|
|
||||||
|
for target in targets:
|
||||||
|
if target not in GENERATORS:
|
||||||
|
print(f"Warning: Unknown target '{target}', skipping", file=sys.stderr)
|
||||||
|
continue
|
||||||
|
|
||||||
|
generator = GENERATORS[target]()
|
||||||
|
ext = generator.file_extension()
|
||||||
|
|
||||||
|
# Determine output filename (use target name to avoid overwrites)
|
||||||
|
if len(targets) == 1 and args.output.endswith(ext):
|
||||||
|
output_file = output_dir
|
||||||
|
else:
|
||||||
|
output_file = output_dir / f"models_{target}{ext}"
|
||||||
|
|
||||||
|
print(f"Generating {target} to: {output_file}")
|
||||||
|
generator.generate(schema, output_file)
|
||||||
|
|
||||||
|
print("Done!")
|
||||||
|
|
||||||
|
|
||||||
|
def cmd_extract(args):
|
||||||
|
"""Extract models from existing codebase."""
|
||||||
|
from .loader.extract import EXTRACTORS
|
||||||
|
|
||||||
|
source_path = Path(args.source)
|
||||||
|
if not source_path.exists():
|
||||||
|
print(f"Error: Source path not found: {source_path}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Auto-detect or use specified framework
|
||||||
|
framework = args.framework
|
||||||
|
extractor = None
|
||||||
|
|
||||||
|
if framework == "auto":
|
||||||
|
for name, extractor_cls in EXTRACTORS.items():
|
||||||
|
ext = extractor_cls(source_path)
|
||||||
|
if ext.detect():
|
||||||
|
framework = name
|
||||||
|
extractor = ext
|
||||||
|
print(f"Detected framework: {framework}")
|
||||||
|
break
|
||||||
|
|
||||||
|
if not extractor:
|
||||||
|
print("Error: Could not auto-detect framework", file=sys.stderr)
|
||||||
|
print(f"Available frameworks: {list(EXTRACTORS.keys())}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
if framework not in EXTRACTORS:
|
||||||
|
print(f"Error: Unknown framework: {framework}", file=sys.stderr)
|
||||||
|
print(f"Available: {list(EXTRACTORS.keys())}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
extractor = EXTRACTORS[framework](source_path)
|
||||||
|
|
||||||
|
print(f"Extracting from: {source_path}")
|
||||||
|
models, enums = extractor.extract()
|
||||||
|
|
||||||
|
print(f"Extracted {len(models)} models, {len(enums)} enums")
|
||||||
|
|
||||||
|
# Parse targets
|
||||||
|
targets = [t.strip() for t in args.targets.split(",")]
|
||||||
|
output_dir = Path(args.output)
|
||||||
|
|
||||||
|
for target in targets:
|
||||||
|
if target not in GENERATORS:
|
||||||
|
print(f"Warning: Unknown target '{target}', skipping", file=sys.stderr)
|
||||||
|
continue
|
||||||
|
|
||||||
|
generator = GENERATORS[target]()
|
||||||
|
ext = generator.file_extension()
|
||||||
|
|
||||||
|
# Determine output filename (use target name to avoid overwrites)
|
||||||
|
if len(targets) == 1 and args.output.endswith(ext):
|
||||||
|
output_file = output_dir
|
||||||
|
else:
|
||||||
|
output_file = output_dir / f"models_{target}{ext}"
|
||||||
|
|
||||||
|
print(f"Generating {target} to: {output_file}")
|
||||||
|
generator.generate((models, enums), output_file)
|
||||||
|
|
||||||
|
print("Done!")
|
||||||
|
|
||||||
|
|
||||||
|
def cmd_list_formats(args):
|
||||||
|
"""List available output formats."""
|
||||||
|
print("Available output formats:")
|
||||||
|
for fmt in GENERATORS.keys():
|
||||||
|
print(f" - {fmt}")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Modelgen - Generic Model Generation Tool",
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||||
|
)
|
||||||
|
|
||||||
|
subparsers = parser.add_subparsers(dest="command", required=True)
|
||||||
|
|
||||||
|
# Available formats for help text
|
||||||
|
formats = list(GENERATORS.keys())
|
||||||
|
formats_str = ", ".join(formats)
|
||||||
|
|
||||||
|
# from-config command
|
||||||
|
config_parser = subparsers.add_parser(
|
||||||
|
"from-config",
|
||||||
|
help="Generate models from soleprint configuration file",
|
||||||
|
)
|
||||||
|
config_parser.add_argument(
|
||||||
|
"--config",
|
||||||
|
"-c",
|
||||||
|
type=str,
|
||||||
|
required=True,
|
||||||
|
help="Path to configuration file (e.g., config.json)",
|
||||||
|
)
|
||||||
|
config_parser.add_argument(
|
||||||
|
"--output",
|
||||||
|
"-o",
|
||||||
|
type=str,
|
||||||
|
required=True,
|
||||||
|
help="Output path (file or directory)",
|
||||||
|
)
|
||||||
|
config_parser.add_argument(
|
||||||
|
"--format",
|
||||||
|
"-f",
|
||||||
|
type=str,
|
||||||
|
default="pydantic",
|
||||||
|
choices=["pydantic"], # Only pydantic for config mode
|
||||||
|
help="Output format (default: pydantic)",
|
||||||
|
)
|
||||||
|
config_parser.set_defaults(func=cmd_from_config)
|
||||||
|
|
||||||
|
# from-schema command
|
||||||
|
schema_parser = subparsers.add_parser(
|
||||||
|
"from-schema",
|
||||||
|
help="Generate models from Python dataclasses in schema/ folder",
|
||||||
|
)
|
||||||
|
schema_parser.add_argument(
|
||||||
|
"--schema",
|
||||||
|
"-s",
|
||||||
|
type=str,
|
||||||
|
default=None,
|
||||||
|
help="Path to schema folder (default: ./schema)",
|
||||||
|
)
|
||||||
|
schema_parser.add_argument(
|
||||||
|
"--output",
|
||||||
|
"-o",
|
||||||
|
type=str,
|
||||||
|
required=True,
|
||||||
|
help="Output path (file or directory)",
|
||||||
|
)
|
||||||
|
schema_parser.add_argument(
|
||||||
|
"--targets",
|
||||||
|
"-t",
|
||||||
|
type=str,
|
||||||
|
default="pydantic",
|
||||||
|
help=f"Comma-separated output targets ({formats_str})",
|
||||||
|
)
|
||||||
|
schema_parser.set_defaults(func=cmd_from_schema)
|
||||||
|
|
||||||
|
# extract command
|
||||||
|
extract_parser = subparsers.add_parser(
|
||||||
|
"extract",
|
||||||
|
help="Extract models from existing codebase",
|
||||||
|
)
|
||||||
|
extract_parser.add_argument(
|
||||||
|
"--source",
|
||||||
|
"-s",
|
||||||
|
type=str,
|
||||||
|
required=True,
|
||||||
|
help="Path to source codebase",
|
||||||
|
)
|
||||||
|
extract_parser.add_argument(
|
||||||
|
"--framework",
|
||||||
|
"-f",
|
||||||
|
type=str,
|
||||||
|
choices=["django", "sqlalchemy", "prisma", "auto"],
|
||||||
|
default="auto",
|
||||||
|
help="Source framework (default: auto-detect)",
|
||||||
|
)
|
||||||
|
extract_parser.add_argument(
|
||||||
|
"--output",
|
||||||
|
"-o",
|
||||||
|
type=str,
|
||||||
|
required=True,
|
||||||
|
help="Output path (file or directory)",
|
||||||
|
)
|
||||||
|
extract_parser.add_argument(
|
||||||
|
"--targets",
|
||||||
|
"-t",
|
||||||
|
type=str,
|
||||||
|
default="pydantic",
|
||||||
|
help=f"Comma-separated output targets ({formats_str})",
|
||||||
|
)
|
||||||
|
extract_parser.set_defaults(func=cmd_extract)
|
||||||
|
|
||||||
|
# list-formats command
|
||||||
|
formats_parser = subparsers.add_parser(
|
||||||
|
"list-formats",
|
||||||
|
help="List available output formats",
|
||||||
|
)
|
||||||
|
formats_parser.set_defaults(func=cmd_list_formats)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
args.func(args)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
40
tools/generator/__init__.py
Normal file
40
tools/generator/__init__.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
"""
|
||||||
|
Generator - Stack-specific code generators for modelgen.
|
||||||
|
|
||||||
|
Supported generators:
|
||||||
|
- PydanticGenerator: Pydantic BaseModel classes
|
||||||
|
- DjangoGenerator: Django ORM models
|
||||||
|
- TypeScriptGenerator: TypeScript interfaces
|
||||||
|
- ProtobufGenerator: Protocol Buffer definitions
|
||||||
|
- PrismaGenerator: Prisma schema
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Dict, Type
|
||||||
|
|
||||||
|
from .base import BaseGenerator
|
||||||
|
from .django import DjangoGenerator
|
||||||
|
from .prisma import PrismaGenerator
|
||||||
|
from .protobuf import ProtobufGenerator
|
||||||
|
from .pydantic import PydanticGenerator
|
||||||
|
from .typescript import TypeScriptGenerator
|
||||||
|
|
||||||
|
# Registry of available generators
|
||||||
|
GENERATORS: Dict[str, Type[BaseGenerator]] = {
|
||||||
|
"pydantic": PydanticGenerator,
|
||||||
|
"django": DjangoGenerator,
|
||||||
|
"typescript": TypeScriptGenerator,
|
||||||
|
"ts": TypeScriptGenerator, # Alias
|
||||||
|
"protobuf": ProtobufGenerator,
|
||||||
|
"proto": ProtobufGenerator, # Alias
|
||||||
|
"prisma": PrismaGenerator,
|
||||||
|
}
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"BaseGenerator",
|
||||||
|
"PydanticGenerator",
|
||||||
|
"DjangoGenerator",
|
||||||
|
"TypeScriptGenerator",
|
||||||
|
"ProtobufGenerator",
|
||||||
|
"PrismaGenerator",
|
||||||
|
"GENERATORS",
|
||||||
|
]
|
||||||
23
tools/generator/base.py
Normal file
23
tools/generator/base.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
"""
|
||||||
|
Base Generator
|
||||||
|
|
||||||
|
Abstract base class for all code generators.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
|
class BaseGenerator(ABC):
|
||||||
|
"""Abstract base for code generators."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def generate(self, models: Any, output_path: Path) -> None:
|
||||||
|
"""Generate code for the given models to the specified path."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def file_extension(self) -> str:
|
||||||
|
"""Return the file extension for this format."""
|
||||||
|
pass
|
||||||
268
tools/generator/django.py
Normal file
268
tools/generator/django.py
Normal file
@@ -0,0 +1,268 @@
|
|||||||
|
"""
|
||||||
|
Django Generator
|
||||||
|
|
||||||
|
Generates Django ORM models from model definitions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import dataclasses as dc
|
||||||
|
from enum import Enum
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, List, get_type_hints
|
||||||
|
|
||||||
|
from ..helpers import format_opts, get_origin_name, get_type_name, unwrap_optional
|
||||||
|
from ..loader.schema import EnumDefinition, ModelDefinition
|
||||||
|
from ..types import DJANGO_SPECIAL, DJANGO_TYPES
|
||||||
|
from .base import BaseGenerator
|
||||||
|
|
||||||
|
|
||||||
|
class DjangoGenerator(BaseGenerator):
|
||||||
|
"""Generates Django ORM model files."""
|
||||||
|
|
||||||
|
def file_extension(self) -> str:
|
||||||
|
return ".py"
|
||||||
|
|
||||||
|
def generate(self, models, output_path: Path) -> None:
|
||||||
|
"""Generate Django models to output_path."""
|
||||||
|
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Handle different input types
|
||||||
|
if hasattr(models, "models"):
|
||||||
|
# SchemaLoader or similar
|
||||||
|
content = self._generate_from_definitions(
|
||||||
|
models.models, getattr(models, "enums", [])
|
||||||
|
)
|
||||||
|
elif isinstance(models, tuple):
|
||||||
|
# (models, enums) tuple
|
||||||
|
content = self._generate_from_definitions(models[0], models[1])
|
||||||
|
elif isinstance(models, list):
|
||||||
|
# List of dataclasses (MPR style)
|
||||||
|
content = self._generate_from_dataclasses(models)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||||
|
|
||||||
|
output_path.write_text(content)
|
||||||
|
|
||||||
|
def _generate_from_definitions(
|
||||||
|
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||||
|
) -> str:
|
||||||
|
"""Generate from ModelDefinition objects."""
|
||||||
|
lines = self._generate_header()
|
||||||
|
|
||||||
|
# Generate enums as TextChoices
|
||||||
|
for enum_def in enums:
|
||||||
|
lines.extend(self._generate_text_choices(enum_def))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
# Generate models
|
||||||
|
for model_def in models:
|
||||||
|
lines.extend(self._generate_model_from_definition(model_def))
|
||||||
|
lines.extend(["", ""])
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||||
|
"""Generate from Python dataclasses (MPR style)."""
|
||||||
|
lines = self._generate_header()
|
||||||
|
|
||||||
|
for cls in dataclasses:
|
||||||
|
lines.extend(self._generate_model_from_dataclass(cls))
|
||||||
|
lines.extend(["", ""])
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_header(self) -> List[str]:
|
||||||
|
"""Generate file header."""
|
||||||
|
return [
|
||||||
|
'"""',
|
||||||
|
"Django ORM Models - GENERATED FILE",
|
||||||
|
"",
|
||||||
|
"Do not edit directly. Regenerate using modelgen.",
|
||||||
|
'"""',
|
||||||
|
"",
|
||||||
|
"import uuid",
|
||||||
|
"from django.db import models",
|
||||||
|
"",
|
||||||
|
]
|
||||||
|
|
||||||
|
def _generate_text_choices(self, enum_def: EnumDefinition) -> List[str]:
|
||||||
|
"""Generate Django TextChoices from EnumDefinition."""
|
||||||
|
lines = [
|
||||||
|
f"class {enum_def.name}(models.TextChoices):",
|
||||||
|
]
|
||||||
|
for name, value in enum_def.values:
|
||||||
|
label = name.replace("_", " ").title()
|
||||||
|
lines.append(f' {name} = "{value}", "{label}"')
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_model_from_definition(self, model_def: ModelDefinition) -> List[str]:
|
||||||
|
"""Generate Django model from ModelDefinition."""
|
||||||
|
docstring = model_def.docstring or model_def.name
|
||||||
|
lines = [
|
||||||
|
f"class {model_def.name}(models.Model):",
|
||||||
|
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||||
|
"",
|
||||||
|
]
|
||||||
|
|
||||||
|
for field in model_def.fields:
|
||||||
|
django_field = self._resolve_field_type(
|
||||||
|
field.name, field.type_hint, field.default, field.optional
|
||||||
|
)
|
||||||
|
lines.append(f" {field.name} = {django_field}")
|
||||||
|
|
||||||
|
# Add Meta and __str__
|
||||||
|
lines.extend(
|
||||||
|
[
|
||||||
|
"",
|
||||||
|
" class Meta:",
|
||||||
|
' ordering = ["-created_at"]'
|
||||||
|
if any(f.name == "created_at" for f in model_def.fields)
|
||||||
|
else " pass",
|
||||||
|
"",
|
||||||
|
" def __str__(self):",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Determine __str__ return
|
||||||
|
field_names = [f.name for f in model_def.fields]
|
||||||
|
if "filename" in field_names:
|
||||||
|
lines.append(" return self.filename")
|
||||||
|
elif "name" in field_names:
|
||||||
|
lines.append(" return self.name")
|
||||||
|
else:
|
||||||
|
lines.append(" return str(self.id)")
|
||||||
|
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_model_from_dataclass(self, cls: type) -> List[str]:
|
||||||
|
"""Generate Django model from a dataclass (MPR style)."""
|
||||||
|
docstring = cls.__doc__ or cls.__name__
|
||||||
|
lines = [
|
||||||
|
f"class {cls.__name__}(models.Model):",
|
||||||
|
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||||
|
"",
|
||||||
|
]
|
||||||
|
|
||||||
|
hints = get_type_hints(cls)
|
||||||
|
fields = {f.name: f for f in dc.fields(cls)}
|
||||||
|
|
||||||
|
# Check for enums and add Status inner class if needed
|
||||||
|
for type_hint in hints.values():
|
||||||
|
base, _ = unwrap_optional(type_hint)
|
||||||
|
if isinstance(base, type) and issubclass(base, Enum):
|
||||||
|
lines.append(" class Status(models.TextChoices):")
|
||||||
|
for member in base:
|
||||||
|
label = member.name.replace("_", " ").title()
|
||||||
|
lines.append(f' {member.name} = "{member.value}", "{label}"')
|
||||||
|
lines.append("")
|
||||||
|
break
|
||||||
|
|
||||||
|
# Generate fields
|
||||||
|
for name, type_hint in hints.items():
|
||||||
|
if name.startswith("_"):
|
||||||
|
continue
|
||||||
|
field = fields.get(name)
|
||||||
|
default = dc.MISSING
|
||||||
|
if field and field.default is not dc.MISSING:
|
||||||
|
default = field.default
|
||||||
|
django_field = self._resolve_field_type(name, type_hint, default, False)
|
||||||
|
lines.append(f" {name} = {django_field}")
|
||||||
|
|
||||||
|
# Add Meta and __str__
|
||||||
|
lines.extend(
|
||||||
|
[
|
||||||
|
"",
|
||||||
|
" class Meta:",
|
||||||
|
' ordering = ["-created_at"]'
|
||||||
|
if "created_at" in hints
|
||||||
|
else " pass",
|
||||||
|
"",
|
||||||
|
" def __str__(self):",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
if "filename" in hints:
|
||||||
|
lines.append(" return self.filename")
|
||||||
|
elif "name" in hints:
|
||||||
|
lines.append(" return self.name")
|
||||||
|
else:
|
||||||
|
lines.append(" return str(self.id)")
|
||||||
|
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _resolve_field_type(
|
||||||
|
self, name: str, type_hint: Any, default: Any, optional: bool
|
||||||
|
) -> str:
|
||||||
|
"""Resolve Python type to Django field."""
|
||||||
|
# Special fields
|
||||||
|
if name in DJANGO_SPECIAL:
|
||||||
|
return DJANGO_SPECIAL[name]
|
||||||
|
|
||||||
|
base, is_optional = unwrap_optional(type_hint)
|
||||||
|
optional = optional or is_optional
|
||||||
|
origin = get_origin_name(base)
|
||||||
|
type_name = get_type_name(base)
|
||||||
|
opts = format_opts(optional)
|
||||||
|
|
||||||
|
# Container types
|
||||||
|
if origin == "dict":
|
||||||
|
return DJANGO_TYPES["dict"]
|
||||||
|
if origin == "list":
|
||||||
|
return DJANGO_TYPES["list"]
|
||||||
|
|
||||||
|
# UUID / datetime
|
||||||
|
if type_name == "UUID":
|
||||||
|
return DJANGO_TYPES["UUID"].format(opts=opts)
|
||||||
|
if type_name == "datetime":
|
||||||
|
return DJANGO_TYPES["datetime"].format(opts=opts)
|
||||||
|
|
||||||
|
# Enum
|
||||||
|
if isinstance(base, type) and issubclass(base, Enum):
|
||||||
|
extra = []
|
||||||
|
if optional:
|
||||||
|
extra.append("null=True, blank=True")
|
||||||
|
if default is not dc.MISSING and isinstance(default, Enum):
|
||||||
|
extra.append(f"default=Status.{default.name}")
|
||||||
|
return DJANGO_TYPES["enum"].format(
|
||||||
|
opts=", " + ", ".join(extra) if extra else ""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Text fields (based on name heuristics)
|
||||||
|
if base is str and any(
|
||||||
|
x in name for x in ("message", "comments", "description")
|
||||||
|
):
|
||||||
|
return DJANGO_TYPES["text"]
|
||||||
|
|
||||||
|
# BigInt fields
|
||||||
|
if base is int and name in ("file_size", "bitrate"):
|
||||||
|
return DJANGO_TYPES["bigint"].format(opts=opts)
|
||||||
|
|
||||||
|
# String with max_length
|
||||||
|
if base is str:
|
||||||
|
max_length = 1000 if "path" in name else 500 if "filename" in name else 255
|
||||||
|
return DJANGO_TYPES[str].format(
|
||||||
|
max_length=max_length, opts=", " + opts if opts else ""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Integer
|
||||||
|
if base is int:
|
||||||
|
extra = [opts] if opts else []
|
||||||
|
if default is not dc.MISSING and not callable(default):
|
||||||
|
extra.append(f"default={default}")
|
||||||
|
return DJANGO_TYPES[int].format(opts=", ".join(extra))
|
||||||
|
|
||||||
|
# Float
|
||||||
|
if base is float:
|
||||||
|
extra = [opts] if opts else []
|
||||||
|
if default is not dc.MISSING and not callable(default):
|
||||||
|
extra.append(f"default={default}")
|
||||||
|
return DJANGO_TYPES[float].format(opts=", ".join(extra))
|
||||||
|
|
||||||
|
# Boolean
|
||||||
|
if base is bool:
|
||||||
|
default_val = default if default is not dc.MISSING else False
|
||||||
|
return DJANGO_TYPES[bool].format(default=default_val)
|
||||||
|
|
||||||
|
# Fallback to CharField
|
||||||
|
return DJANGO_TYPES[str].format(
|
||||||
|
max_length=255, opts=", " + opts if opts else ""
|
||||||
|
)
|
||||||
173
tools/generator/prisma.py
Normal file
173
tools/generator/prisma.py
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
"""
|
||||||
|
Prisma Generator
|
||||||
|
|
||||||
|
Generates Prisma schema from model definitions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from enum import Enum
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, List, get_type_hints
|
||||||
|
|
||||||
|
from ..helpers import get_origin_name, get_type_name, unwrap_optional
|
||||||
|
from ..loader.schema import EnumDefinition, ModelDefinition
|
||||||
|
from ..types import PRISMA_SPECIAL, PRISMA_TYPES
|
||||||
|
from .base import BaseGenerator
|
||||||
|
|
||||||
|
|
||||||
|
class PrismaGenerator(BaseGenerator):
|
||||||
|
"""Generates Prisma schema files."""
|
||||||
|
|
||||||
|
def file_extension(self) -> str:
|
||||||
|
return ".prisma"
|
||||||
|
|
||||||
|
def generate(self, models, output_path: Path) -> None:
|
||||||
|
"""Generate Prisma schema to output_path."""
|
||||||
|
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Handle different input types
|
||||||
|
if hasattr(models, "models"):
|
||||||
|
# SchemaLoader
|
||||||
|
content = self._generate_from_definitions(
|
||||||
|
models.models, getattr(models, "enums", [])
|
||||||
|
)
|
||||||
|
elif isinstance(models, tuple):
|
||||||
|
# (models, enums) tuple
|
||||||
|
content = self._generate_from_definitions(models[0], models[1])
|
||||||
|
elif isinstance(models, list):
|
||||||
|
# List of dataclasses (MPR style)
|
||||||
|
content = self._generate_from_dataclasses(models)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||||
|
|
||||||
|
output_path.write_text(content)
|
||||||
|
|
||||||
|
def _generate_from_definitions(
|
||||||
|
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||||
|
) -> str:
|
||||||
|
"""Generate from ModelDefinition objects."""
|
||||||
|
lines = self._generate_header()
|
||||||
|
|
||||||
|
# Generate enums
|
||||||
|
for enum_def in enums:
|
||||||
|
lines.extend(self._generate_enum(enum_def))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
# Generate models
|
||||||
|
for model_def in models:
|
||||||
|
lines.extend(self._generate_model_from_definition(model_def))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||||
|
"""Generate from Python dataclasses (MPR style)."""
|
||||||
|
lines = self._generate_header()
|
||||||
|
|
||||||
|
# Collect and generate enums first
|
||||||
|
enums_generated = set()
|
||||||
|
for cls in dataclasses:
|
||||||
|
hints = get_type_hints(cls)
|
||||||
|
for type_hint in hints.values():
|
||||||
|
base, _ = unwrap_optional(type_hint)
|
||||||
|
if isinstance(base, type) and issubclass(base, Enum):
|
||||||
|
if base.__name__ not in enums_generated:
|
||||||
|
lines.extend(self._generate_enum_from_python(base))
|
||||||
|
lines.append("")
|
||||||
|
enums_generated.add(base.__name__)
|
||||||
|
|
||||||
|
# Generate models
|
||||||
|
for cls in dataclasses:
|
||||||
|
lines.extend(self._generate_model_from_dataclass(cls))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_header(self) -> List[str]:
|
||||||
|
"""Generate file header with datasource and generator."""
|
||||||
|
return [
|
||||||
|
"// Prisma Schema - GENERATED FILE",
|
||||||
|
"//",
|
||||||
|
"// Do not edit directly. Regenerate using modelgen.",
|
||||||
|
"",
|
||||||
|
"generator client {",
|
||||||
|
' provider = "prisma-client-py"',
|
||||||
|
"}",
|
||||||
|
"",
|
||||||
|
"datasource db {",
|
||||||
|
' provider = "postgresql"',
|
||||||
|
' url = env("DATABASE_URL")',
|
||||||
|
"}",
|
||||||
|
"",
|
||||||
|
]
|
||||||
|
|
||||||
|
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
|
||||||
|
"""Generate Prisma enum from EnumDefinition."""
|
||||||
|
lines = [f"enum {enum_def.name} {{"]
|
||||||
|
for name, _ in enum_def.values:
|
||||||
|
lines.append(f" {name}")
|
||||||
|
lines.append("}")
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_enum_from_python(self, enum_cls: type) -> List[str]:
|
||||||
|
"""Generate Prisma enum from Python Enum."""
|
||||||
|
lines = [f"enum {enum_cls.__name__} {{"]
|
||||||
|
for member in enum_cls:
|
||||||
|
lines.append(f" {member.name}")
|
||||||
|
lines.append("}")
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_model_from_definition(self, model_def: ModelDefinition) -> List[str]:
|
||||||
|
"""Generate Prisma model from ModelDefinition."""
|
||||||
|
lines = [f"model {model_def.name} {{"]
|
||||||
|
|
||||||
|
for field in model_def.fields:
|
||||||
|
prisma_type = self._resolve_type(
|
||||||
|
field.name, field.type_hint, field.optional
|
||||||
|
)
|
||||||
|
lines.append(f" {field.name} {prisma_type}")
|
||||||
|
|
||||||
|
lines.append("}")
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_model_from_dataclass(self, cls: type) -> List[str]:
|
||||||
|
"""Generate Prisma model from a dataclass."""
|
||||||
|
lines = [f"model {cls.__name__} {{"]
|
||||||
|
|
||||||
|
for name, type_hint in get_type_hints(cls).items():
|
||||||
|
if name.startswith("_"):
|
||||||
|
continue
|
||||||
|
prisma_type = self._resolve_type(name, type_hint, False)
|
||||||
|
lines.append(f" {name} {prisma_type}")
|
||||||
|
|
||||||
|
lines.append("}")
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _resolve_type(self, name: str, type_hint: Any, optional: bool) -> str:
|
||||||
|
"""Resolve Python type to Prisma type string."""
|
||||||
|
# Special fields
|
||||||
|
if name in PRISMA_SPECIAL:
|
||||||
|
return PRISMA_SPECIAL[name]
|
||||||
|
|
||||||
|
base, is_optional = unwrap_optional(type_hint)
|
||||||
|
optional = optional or is_optional
|
||||||
|
origin = get_origin_name(base)
|
||||||
|
type_name = get_type_name(base)
|
||||||
|
|
||||||
|
# Container types
|
||||||
|
if origin == "dict" or origin == "list":
|
||||||
|
result = PRISMA_TYPES.get(origin, "Json")
|
||||||
|
return f"{result}?" if optional else result
|
||||||
|
|
||||||
|
# UUID / datetime
|
||||||
|
if type_name in ("UUID", "datetime"):
|
||||||
|
result = PRISMA_TYPES.get(type_name, "String")
|
||||||
|
return f"{result}?" if optional else result
|
||||||
|
|
||||||
|
# Enum
|
||||||
|
if isinstance(base, type) and issubclass(base, Enum):
|
||||||
|
result = base.__name__
|
||||||
|
return f"{result}?" if optional else result
|
||||||
|
|
||||||
|
# Basic types
|
||||||
|
result = PRISMA_TYPES.get(base, "String")
|
||||||
|
return f"{result}?" if optional else result
|
||||||
168
tools/generator/protobuf.py
Normal file
168
tools/generator/protobuf.py
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
"""
|
||||||
|
Protobuf Generator
|
||||||
|
|
||||||
|
Generates Protocol Buffer definitions from model definitions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, List, get_type_hints
|
||||||
|
|
||||||
|
from ..helpers import get_origin_name, unwrap_optional
|
||||||
|
from ..loader.schema import GrpcServiceDefinition, ModelDefinition
|
||||||
|
from ..types import PROTO_RESOLVERS
|
||||||
|
from .base import BaseGenerator
|
||||||
|
|
||||||
|
|
||||||
|
class ProtobufGenerator(BaseGenerator):
|
||||||
|
"""Generates Protocol Buffer definition files."""
|
||||||
|
|
||||||
|
def file_extension(self) -> str:
|
||||||
|
return ".proto"
|
||||||
|
|
||||||
|
def generate(self, models, output_path: Path) -> None:
|
||||||
|
"""Generate protobuf definitions to output_path."""
|
||||||
|
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Handle different input types
|
||||||
|
if hasattr(models, "grpc_messages"):
|
||||||
|
# SchemaLoader with gRPC definitions
|
||||||
|
content = self._generate_from_loader(models)
|
||||||
|
elif isinstance(models, tuple) and len(models) >= 3:
|
||||||
|
# (messages, service_def) tuple
|
||||||
|
content = self._generate_from_definitions(models[0], models[1])
|
||||||
|
elif isinstance(models, list):
|
||||||
|
# List of dataclasses (MPR style)
|
||||||
|
content = self._generate_from_dataclasses(models)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||||
|
|
||||||
|
output_path.write_text(content)
|
||||||
|
|
||||||
|
def _generate_from_loader(self, loader) -> str:
|
||||||
|
"""Generate from SchemaLoader."""
|
||||||
|
messages = loader.grpc_messages
|
||||||
|
service = loader.grpc_service
|
||||||
|
|
||||||
|
lines = self._generate_header(
|
||||||
|
service.package if service else "service",
|
||||||
|
service.name if service else "Service",
|
||||||
|
service.methods if service else [],
|
||||||
|
)
|
||||||
|
|
||||||
|
for model_def in messages:
|
||||||
|
lines.extend(self._generate_message_from_definition(model_def))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_from_definitions(
|
||||||
|
self, messages: List[ModelDefinition], service: GrpcServiceDefinition
|
||||||
|
) -> str:
|
||||||
|
"""Generate from ModelDefinition objects."""
|
||||||
|
lines = self._generate_header(service.package, service.name, service.methods)
|
||||||
|
|
||||||
|
for model_def in messages:
|
||||||
|
lines.extend(self._generate_message_from_definition(model_def))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||||
|
"""Generate from Python dataclasses (MPR style)."""
|
||||||
|
lines = self._generate_header("service", "Service", [])
|
||||||
|
|
||||||
|
for cls in dataclasses:
|
||||||
|
lines.extend(self._generate_message_from_dataclass(cls))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_header(
|
||||||
|
self, package: str, service_name: str, methods: List[dict]
|
||||||
|
) -> List[str]:
|
||||||
|
"""Generate file header with service definition."""
|
||||||
|
lines = [
|
||||||
|
"// Protocol Buffer Definitions - GENERATED FILE",
|
||||||
|
"//",
|
||||||
|
"// Do not edit directly. Regenerate using modelgen.",
|
||||||
|
"",
|
||||||
|
'syntax = "proto3";',
|
||||||
|
"",
|
||||||
|
f"package {package};",
|
||||||
|
"",
|
||||||
|
]
|
||||||
|
|
||||||
|
if methods:
|
||||||
|
lines.append(f"service {service_name} {{")
|
||||||
|
for m in methods:
|
||||||
|
req = (
|
||||||
|
m["request"].__name__
|
||||||
|
if hasattr(m["request"], "__name__")
|
||||||
|
else str(m["request"])
|
||||||
|
)
|
||||||
|
resp = (
|
||||||
|
m["response"].__name__
|
||||||
|
if hasattr(m["response"], "__name__")
|
||||||
|
else str(m["response"])
|
||||||
|
)
|
||||||
|
returns = f"stream {resp}" if m.get("stream_response") else resp
|
||||||
|
lines.append(f" rpc {m['name']}({req}) returns ({returns});")
|
||||||
|
lines.extend(["}", ""])
|
||||||
|
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_message_from_definition(
|
||||||
|
self, model_def: ModelDefinition
|
||||||
|
) -> List[str]:
|
||||||
|
"""Generate proto message from ModelDefinition."""
|
||||||
|
lines = [f"message {model_def.name} {{"]
|
||||||
|
|
||||||
|
if not model_def.fields:
|
||||||
|
lines.append(" // Empty")
|
||||||
|
else:
|
||||||
|
for i, field in enumerate(model_def.fields, 1):
|
||||||
|
proto_type, optional = self._resolve_type(field.type_hint)
|
||||||
|
prefix = (
|
||||||
|
"optional "
|
||||||
|
if optional and not proto_type.startswith("repeated")
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
lines.append(f" {prefix}{proto_type} {field.name} = {i};")
|
||||||
|
|
||||||
|
lines.append("}")
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_message_from_dataclass(self, cls: type) -> List[str]:
|
||||||
|
"""Generate proto message from a dataclass."""
|
||||||
|
lines = [f"message {cls.__name__} {{"]
|
||||||
|
|
||||||
|
hints = get_type_hints(cls)
|
||||||
|
if not hints:
|
||||||
|
lines.append(" // Empty")
|
||||||
|
else:
|
||||||
|
for i, (name, type_hint) in enumerate(hints.items(), 1):
|
||||||
|
proto_type, optional = self._resolve_type(type_hint)
|
||||||
|
prefix = (
|
||||||
|
"optional "
|
||||||
|
if optional and not proto_type.startswith("repeated")
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
lines.append(f" {prefix}{proto_type} {name} = {i};")
|
||||||
|
|
||||||
|
lines.append("}")
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _resolve_type(self, type_hint: Any) -> tuple[str, bool]:
|
||||||
|
"""Resolve Python type to proto type. Returns (type, is_optional)."""
|
||||||
|
base, optional = unwrap_optional(type_hint)
|
||||||
|
origin = get_origin_name(base)
|
||||||
|
|
||||||
|
# Look up resolver
|
||||||
|
resolver = PROTO_RESOLVERS.get(origin) or PROTO_RESOLVERS.get(base)
|
||||||
|
|
||||||
|
if resolver:
|
||||||
|
result = resolver(base)
|
||||||
|
is_repeated = result.startswith("repeated")
|
||||||
|
return result, optional and not is_repeated
|
||||||
|
|
||||||
|
return "string", optional
|
||||||
427
tools/generator/pydantic.py
Normal file
427
tools/generator/pydantic.py
Normal file
@@ -0,0 +1,427 @@
|
|||||||
|
"""
|
||||||
|
Pydantic Generator
|
||||||
|
|
||||||
|
Generates Pydantic BaseModel classes from model definitions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from enum import Enum
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, List, get_type_hints
|
||||||
|
|
||||||
|
from ..helpers import get_origin_name, get_type_name, unwrap_optional
|
||||||
|
from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
|
||||||
|
from ..types import PYDANTIC_RESOLVERS
|
||||||
|
from .base import BaseGenerator
|
||||||
|
|
||||||
|
|
||||||
|
class PydanticGenerator(BaseGenerator):
|
||||||
|
"""Generates Pydantic model files."""
|
||||||
|
|
||||||
|
def file_extension(self) -> str:
|
||||||
|
return ".py"
|
||||||
|
|
||||||
|
def generate(self, models, output_path: Path) -> None:
|
||||||
|
"""Generate Pydantic models to output_path."""
|
||||||
|
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Detect input type and generate accordingly
|
||||||
|
if hasattr(models, "get_shared_component"):
|
||||||
|
# ConfigLoader (soleprint config)
|
||||||
|
content = self._generate_from_config(models)
|
||||||
|
elif hasattr(models, "models"):
|
||||||
|
# SchemaLoader
|
||||||
|
content = self._generate_from_definitions(
|
||||||
|
models.models, getattr(models, "enums", [])
|
||||||
|
)
|
||||||
|
elif isinstance(models, tuple):
|
||||||
|
# (models, enums) tuple from extractor
|
||||||
|
content = self._generate_from_definitions(models[0], models[1])
|
||||||
|
elif isinstance(models, list):
|
||||||
|
# List of dataclasses (MPR style)
|
||||||
|
content = self._generate_from_dataclasses(models)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||||
|
|
||||||
|
output_path.write_text(content)
|
||||||
|
|
||||||
|
def _generate_from_definitions(
|
||||||
|
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||||
|
) -> str:
|
||||||
|
"""Generate from ModelDefinition objects (schema/extract mode)."""
|
||||||
|
lines = self._generate_header()
|
||||||
|
|
||||||
|
# Generate enums
|
||||||
|
for enum_def in enums:
|
||||||
|
lines.extend(self._generate_enum(enum_def))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
# Generate models
|
||||||
|
for model_def in models:
|
||||||
|
lines.extend(self._generate_model_from_definition(model_def))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||||
|
"""Generate from Python dataclasses (MPR style)."""
|
||||||
|
lines = self._generate_header()
|
||||||
|
|
||||||
|
# Collect and generate enums first
|
||||||
|
enums_generated = set()
|
||||||
|
for cls in dataclasses:
|
||||||
|
hints = get_type_hints(cls)
|
||||||
|
for type_hint in hints.values():
|
||||||
|
base, _ = unwrap_optional(type_hint)
|
||||||
|
if isinstance(base, type) and issubclass(base, Enum):
|
||||||
|
if base.__name__ not in enums_generated:
|
||||||
|
lines.extend(self._generate_enum_from_python(base))
|
||||||
|
lines.append("")
|
||||||
|
enums_generated.add(base.__name__)
|
||||||
|
|
||||||
|
# Generate models
|
||||||
|
for cls in dataclasses:
|
||||||
|
lines.extend(self._generate_model_from_dataclass(cls))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_header(self) -> List[str]:
|
||||||
|
"""Generate file header."""
|
||||||
|
return [
|
||||||
|
'"""',
|
||||||
|
"Pydantic Models - GENERATED FILE",
|
||||||
|
"",
|
||||||
|
"Do not edit directly. Regenerate using modelgen.",
|
||||||
|
'"""',
|
||||||
|
"",
|
||||||
|
"from datetime import datetime",
|
||||||
|
"from enum import Enum",
|
||||||
|
"from typing import Any, Dict, List, Optional",
|
||||||
|
"from uuid import UUID",
|
||||||
|
"",
|
||||||
|
"from pydantic import BaseModel, Field",
|
||||||
|
"",
|
||||||
|
]
|
||||||
|
|
||||||
|
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
|
||||||
|
"""Generate Pydantic enum from EnumDefinition."""
|
||||||
|
lines = [f"class {enum_def.name}(str, Enum):"]
|
||||||
|
for name, value in enum_def.values:
|
||||||
|
lines.append(f' {name} = "{value}"')
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_enum_from_python(self, enum_cls: type) -> List[str]:
|
||||||
|
"""Generate Pydantic enum from Python Enum."""
|
||||||
|
lines = [f"class {enum_cls.__name__}(str, Enum):"]
|
||||||
|
for member in enum_cls:
|
||||||
|
lines.append(f' {member.name} = "{member.value}"')
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_model_from_definition(self, model_def: ModelDefinition) -> List[str]:
|
||||||
|
"""Generate Pydantic model from ModelDefinition."""
|
||||||
|
docstring = model_def.docstring or model_def.name
|
||||||
|
lines = [
|
||||||
|
f"class {model_def.name}(BaseModel):",
|
||||||
|
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||||
|
]
|
||||||
|
|
||||||
|
if not model_def.fields:
|
||||||
|
lines.append(" pass")
|
||||||
|
else:
|
||||||
|
for field in model_def.fields:
|
||||||
|
py_type = self._resolve_type(field.type_hint, field.optional)
|
||||||
|
default = self._format_default(field.default, field.optional)
|
||||||
|
lines.append(f" {field.name}: {py_type}{default}")
|
||||||
|
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_model_from_dataclass(self, cls: type) -> List[str]:
|
||||||
|
"""Generate Pydantic model from a dataclass."""
|
||||||
|
import dataclasses as dc
|
||||||
|
|
||||||
|
docstring = cls.__doc__ or cls.__name__
|
||||||
|
lines = [
|
||||||
|
f"class {cls.__name__}(BaseModel):",
|
||||||
|
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||||
|
]
|
||||||
|
|
||||||
|
hints = get_type_hints(cls)
|
||||||
|
fields = {f.name: f for f in dc.fields(cls)}
|
||||||
|
|
||||||
|
for name, type_hint in hints.items():
|
||||||
|
if name.startswith("_"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
field = fields.get(name)
|
||||||
|
default_val = dc.MISSING
|
||||||
|
if field:
|
||||||
|
if field.default is not dc.MISSING:
|
||||||
|
default_val = field.default
|
||||||
|
|
||||||
|
py_type = self._resolve_type(type_hint, False)
|
||||||
|
default = self._format_default(default_val, "Optional" in py_type)
|
||||||
|
lines.append(f" {name}: {py_type}{default}")
|
||||||
|
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _resolve_type(self, type_hint: Any, optional: bool) -> str:
|
||||||
|
"""Resolve Python type to Pydantic type string."""
|
||||||
|
base, is_optional = unwrap_optional(type_hint)
|
||||||
|
optional = optional or is_optional
|
||||||
|
origin = get_origin_name(base)
|
||||||
|
type_name = get_type_name(base)
|
||||||
|
|
||||||
|
# Look up resolver
|
||||||
|
resolver = (
|
||||||
|
PYDANTIC_RESOLVERS.get(origin)
|
||||||
|
or PYDANTIC_RESOLVERS.get(type_name)
|
||||||
|
or PYDANTIC_RESOLVERS.get(base)
|
||||||
|
or (
|
||||||
|
PYDANTIC_RESOLVERS["enum"]
|
||||||
|
if isinstance(base, type) and issubclass(base, Enum)
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
result = resolver(base) if resolver else "str"
|
||||||
|
return f"Optional[{result}]" if optional else result
|
||||||
|
|
||||||
|
def _format_default(self, default: Any, optional: bool) -> str:
|
||||||
|
"""Format default value for field."""
|
||||||
|
import dataclasses as dc
|
||||||
|
|
||||||
|
if optional:
|
||||||
|
return " = None"
|
||||||
|
if default is dc.MISSING or default is None:
|
||||||
|
return ""
|
||||||
|
if isinstance(default, str):
|
||||||
|
return f' = "{default}"'
|
||||||
|
if isinstance(default, Enum):
|
||||||
|
return f" = {default.__class__.__name__}.{default.name}"
|
||||||
|
if callable(default):
|
||||||
|
return " = Field(default_factory=list)" if "list" in str(default) else ""
|
||||||
|
return f" = {default!r}"
|
||||||
|
|
||||||
|
def _generate_from_config(self, config) -> str:
|
||||||
|
"""Generate from ConfigLoader (soleprint config.json mode)."""
|
||||||
|
# Get component names from config
|
||||||
|
config_comp = config.get_shared_component("config")
|
||||||
|
data_comp = config.get_shared_component("data")
|
||||||
|
|
||||||
|
data_flow_sys = config.get_system("data_flow")
|
||||||
|
doc_sys = config.get_system("documentation")
|
||||||
|
exec_sys = config.get_system("execution")
|
||||||
|
|
||||||
|
connector_comp = config.get_component("data_flow", "connector")
|
||||||
|
pulse_comp = config.get_component("data_flow", "composed")
|
||||||
|
|
||||||
|
pattern_comp = config.get_component("documentation", "pattern")
|
||||||
|
doc_composed = config.get_component("documentation", "composed")
|
||||||
|
|
||||||
|
tool_comp = config.get_component("execution", "utility")
|
||||||
|
monitor_comp = config.get_component("execution", "watcher")
|
||||||
|
cabinet_comp = config.get_component("execution", "container")
|
||||||
|
exec_composed = config.get_component("execution", "composed")
|
||||||
|
|
||||||
|
return f'''"""
|
||||||
|
Pydantic models - Generated from {config.framework.name}.config.json
|
||||||
|
|
||||||
|
DO NOT EDIT MANUALLY - Regenerate from config
|
||||||
|
"""
|
||||||
|
|
||||||
|
from enum import Enum
|
||||||
|
from typing import List, Literal, Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
class Status(str, Enum):
|
||||||
|
PENDING = "pending"
|
||||||
|
PLANNED = "planned"
|
||||||
|
BUILDING = "building"
|
||||||
|
DEV = "dev"
|
||||||
|
LIVE = "live"
|
||||||
|
READY = "ready"
|
||||||
|
|
||||||
|
|
||||||
|
class System(str, Enum):
|
||||||
|
{data_flow_sys.name.upper()} = "{data_flow_sys.name}"
|
||||||
|
{doc_sys.name.upper()} = "{doc_sys.name}"
|
||||||
|
{exec_sys.name.upper()} = "{exec_sys.name}"
|
||||||
|
|
||||||
|
|
||||||
|
class ToolType(str, Enum):
|
||||||
|
APP = "app"
|
||||||
|
CLI = "cli"
|
||||||
|
|
||||||
|
|
||||||
|
# === Shared Components ===
|
||||||
|
|
||||||
|
|
||||||
|
class {config_comp.title}(BaseModel):
|
||||||
|
"""{config_comp.description}. Shared across {data_flow_sys.name}, {exec_sys.name}."""
|
||||||
|
|
||||||
|
name: str # Unique identifier
|
||||||
|
slug: str # URL-friendly identifier
|
||||||
|
title: str # Display title for UI
|
||||||
|
status: Optional[Status] = None
|
||||||
|
config_path: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class {data_comp.title}(BaseModel):
|
||||||
|
"""{data_comp.description}. Shared across all systems."""
|
||||||
|
|
||||||
|
name: str # Unique identifier
|
||||||
|
slug: str # URL-friendly identifier
|
||||||
|
title: str # Display title for UI
|
||||||
|
status: Optional[Status] = None
|
||||||
|
source_template: Optional[str] = None
|
||||||
|
data_path: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
# === System-Specific Components ===
|
||||||
|
|
||||||
|
|
||||||
|
class {connector_comp.title}(BaseModel):
|
||||||
|
"""{connector_comp.description} ({data_flow_sys.name})."""
|
||||||
|
|
||||||
|
name: str # Unique identifier
|
||||||
|
slug: str # URL-friendly identifier
|
||||||
|
title: str # Display title for UI
|
||||||
|
status: Optional[Status] = None
|
||||||
|
system: Literal["{data_flow_sys.name}"] = "{data_flow_sys.name}"
|
||||||
|
mock: Optional[bool] = None
|
||||||
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class {pattern_comp.title}(BaseModel):
|
||||||
|
"""{pattern_comp.description} ({doc_sys.name})."""
|
||||||
|
|
||||||
|
name: str # Unique identifier
|
||||||
|
slug: str # URL-friendly identifier
|
||||||
|
title: str # Display title for UI
|
||||||
|
status: Optional[Status] = None
|
||||||
|
template_path: Optional[str] = None
|
||||||
|
system: Literal["{doc_sys.name}"] = "{doc_sys.name}"
|
||||||
|
|
||||||
|
|
||||||
|
class {tool_comp.title}(BaseModel):
|
||||||
|
"""{tool_comp.description} ({exec_sys.name})."""
|
||||||
|
|
||||||
|
name: str # Unique identifier
|
||||||
|
slug: str # URL-friendly identifier
|
||||||
|
title: str # Display title for UI
|
||||||
|
status: Optional[Status] = None
|
||||||
|
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||||
|
type: Optional[ToolType] = None
|
||||||
|
description: Optional[str] = None
|
||||||
|
path: Optional[str] = None
|
||||||
|
url: Optional[str] = None
|
||||||
|
cli: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class {monitor_comp.title}(BaseModel):
|
||||||
|
"""{monitor_comp.description} ({exec_sys.name})."""
|
||||||
|
|
||||||
|
name: str # Unique identifier
|
||||||
|
slug: str # URL-friendly identifier
|
||||||
|
title: str # Display title for UI
|
||||||
|
status: Optional[Status] = None
|
||||||
|
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||||
|
|
||||||
|
|
||||||
|
class {cabinet_comp.title}(BaseModel):
|
||||||
|
"""{cabinet_comp.description} ({exec_sys.name})."""
|
||||||
|
|
||||||
|
name: str # Unique identifier
|
||||||
|
slug: str # URL-friendly identifier
|
||||||
|
title: str # Display title for UI
|
||||||
|
status: Optional[Status] = None
|
||||||
|
tools: List[{tool_comp.title}] = Field(default_factory=list)
|
||||||
|
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||||
|
|
||||||
|
|
||||||
|
# === Composed Types ===
|
||||||
|
|
||||||
|
|
||||||
|
class {pulse_comp.title}(BaseModel):
|
||||||
|
"""{pulse_comp.description} ({data_flow_sys.name}). Formula: {pulse_comp.formula}."""
|
||||||
|
|
||||||
|
name: str # Unique identifier
|
||||||
|
slug: str # URL-friendly identifier
|
||||||
|
title: str # Display title for UI
|
||||||
|
status: Optional[Status] = None
|
||||||
|
{connector_comp.name}: Optional[{connector_comp.title}] = None
|
||||||
|
{config_comp.name}: Optional[{config_comp.title}] = None
|
||||||
|
{data_comp.name}: Optional[{data_comp.title}] = None
|
||||||
|
system: Literal["{data_flow_sys.name}"] = "{data_flow_sys.name}"
|
||||||
|
|
||||||
|
|
||||||
|
class {doc_composed.title}(BaseModel):
|
||||||
|
"""{doc_composed.description} ({doc_sys.name}). Formula: {doc_composed.formula}."""
|
||||||
|
|
||||||
|
name: str # Unique identifier
|
||||||
|
slug: str # URL-friendly identifier
|
||||||
|
title: str # Display title for UI
|
||||||
|
status: Optional[Status] = None
|
||||||
|
template: Optional[{pattern_comp.title}] = None
|
||||||
|
{data_comp.name}: Optional[{data_comp.title}] = None
|
||||||
|
output_{data_comp.name}: Optional[{data_comp.title}] = None
|
||||||
|
system: Literal["{doc_sys.name}"] = "{doc_sys.name}"
|
||||||
|
|
||||||
|
|
||||||
|
class {exec_composed.title}(BaseModel):
|
||||||
|
"""{exec_composed.description} ({exec_sys.name}). Formula: {exec_composed.formula}."""
|
||||||
|
|
||||||
|
name: str # Unique identifier
|
||||||
|
slug: str # URL-friendly identifier
|
||||||
|
title: str # Display title for UI
|
||||||
|
status: Optional[Status] = None
|
||||||
|
cabinet: Optional[{cabinet_comp.title}] = None
|
||||||
|
{config_comp.name}: Optional[{config_comp.title}] = None
|
||||||
|
{data_comp.plural}: List[{data_comp.title}] = Field(default_factory=list)
|
||||||
|
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||||
|
|
||||||
|
|
||||||
|
# === Collection wrappers for JSON files ===
|
||||||
|
|
||||||
|
|
||||||
|
class {config_comp.title}Collection(BaseModel):
|
||||||
|
items: List[{config_comp.title}] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class {data_comp.title}Collection(BaseModel):
|
||||||
|
items: List[{data_comp.title}] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class {connector_comp.title}Collection(BaseModel):
|
||||||
|
items: List[{connector_comp.title}] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class {pattern_comp.title}Collection(BaseModel):
|
||||||
|
items: List[{pattern_comp.title}] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class {tool_comp.title}Collection(BaseModel):
|
||||||
|
items: List[{tool_comp.title}] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class {monitor_comp.title}Collection(BaseModel):
|
||||||
|
items: List[{monitor_comp.title}] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class {cabinet_comp.title}Collection(BaseModel):
|
||||||
|
items: List[{cabinet_comp.title}] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class {pulse_comp.title}Collection(BaseModel):
|
||||||
|
items: List[{pulse_comp.title}] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class {doc_composed.title}Collection(BaseModel):
|
||||||
|
items: List[{doc_composed.title}] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class {exec_composed.title}Collection(BaseModel):
|
||||||
|
items: List[{exec_composed.title}] = Field(default_factory=list)
|
||||||
|
'''
|
||||||
144
tools/generator/typescript.py
Normal file
144
tools/generator/typescript.py
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
"""
|
||||||
|
TypeScript Generator
|
||||||
|
|
||||||
|
Generates TypeScript interfaces from model definitions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from enum import Enum
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, List, get_type_hints
|
||||||
|
|
||||||
|
from ..helpers import get_origin_name, get_type_name, unwrap_optional
|
||||||
|
from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
|
||||||
|
from ..types import TS_RESOLVERS
|
||||||
|
from .base import BaseGenerator
|
||||||
|
|
||||||
|
|
||||||
|
class TypeScriptGenerator(BaseGenerator):
|
||||||
|
"""Generates TypeScript interface files."""
|
||||||
|
|
||||||
|
def file_extension(self) -> str:
|
||||||
|
return ".ts"
|
||||||
|
|
||||||
|
def generate(self, models, output_path: Path) -> None:
|
||||||
|
"""Generate TypeScript types to output_path."""
|
||||||
|
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Handle different input types
|
||||||
|
if hasattr(models, "models"):
|
||||||
|
# SchemaLoader
|
||||||
|
content = self._generate_from_definitions(
|
||||||
|
models.models, getattr(models, "enums", [])
|
||||||
|
)
|
||||||
|
elif isinstance(models, tuple):
|
||||||
|
# (models, enums) tuple
|
||||||
|
content = self._generate_from_definitions(models[0], models[1])
|
||||||
|
elif isinstance(models, list):
|
||||||
|
# List of dataclasses (MPR style)
|
||||||
|
content = self._generate_from_dataclasses(models)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||||
|
|
||||||
|
output_path.write_text(content)
|
||||||
|
|
||||||
|
def _generate_from_definitions(
|
||||||
|
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||||
|
) -> str:
|
||||||
|
"""Generate from ModelDefinition objects."""
|
||||||
|
lines = self._generate_header()
|
||||||
|
|
||||||
|
# Generate enums as union types
|
||||||
|
for enum_def in enums:
|
||||||
|
values = " | ".join(f'"{v}"' for _, v in enum_def.values)
|
||||||
|
lines.append(f"export type {enum_def.name} = {values};")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
# Generate interfaces
|
||||||
|
for model_def in models:
|
||||||
|
lines.extend(self._generate_interface_from_definition(model_def))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||||
|
"""Generate from Python dataclasses (MPR style)."""
|
||||||
|
lines = self._generate_header()
|
||||||
|
|
||||||
|
# Collect and generate enums first
|
||||||
|
enums_generated = set()
|
||||||
|
for cls in dataclasses:
|
||||||
|
hints = get_type_hints(cls)
|
||||||
|
for type_hint in hints.values():
|
||||||
|
base, _ = unwrap_optional(type_hint)
|
||||||
|
if isinstance(base, type) and issubclass(base, Enum):
|
||||||
|
if base.__name__ not in enums_generated:
|
||||||
|
values = " | ".join(f'"{m.value}"' for m in base)
|
||||||
|
lines.append(f"export type {base.__name__} = {values};")
|
||||||
|
enums_generated.add(base.__name__)
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
# Generate interfaces
|
||||||
|
for cls in dataclasses:
|
||||||
|
lines.extend(self._generate_interface_from_dataclass(cls))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_header(self) -> List[str]:
|
||||||
|
"""Generate file header."""
|
||||||
|
return [
|
||||||
|
"/**",
|
||||||
|
" * TypeScript Types - GENERATED FILE",
|
||||||
|
" *",
|
||||||
|
" * Do not edit directly. Regenerate using modelgen.",
|
||||||
|
" */",
|
||||||
|
"",
|
||||||
|
]
|
||||||
|
|
||||||
|
def _generate_interface_from_definition(
|
||||||
|
self, model_def: ModelDefinition
|
||||||
|
) -> List[str]:
|
||||||
|
"""Generate TypeScript interface from ModelDefinition."""
|
||||||
|
lines = [f"export interface {model_def.name} {{"]
|
||||||
|
|
||||||
|
for field in model_def.fields:
|
||||||
|
ts_type = self._resolve_type(field.type_hint, field.optional)
|
||||||
|
lines.append(f" {field.name}: {ts_type};")
|
||||||
|
|
||||||
|
lines.append("}")
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_interface_from_dataclass(self, cls: type) -> List[str]:
|
||||||
|
"""Generate TypeScript interface from a dataclass."""
|
||||||
|
lines = [f"export interface {cls.__name__} {{"]
|
||||||
|
|
||||||
|
for name, type_hint in get_type_hints(cls).items():
|
||||||
|
if name.startswith("_"):
|
||||||
|
continue
|
||||||
|
ts_type = self._resolve_type(type_hint, False)
|
||||||
|
lines.append(f" {name}: {ts_type};")
|
||||||
|
|
||||||
|
lines.append("}")
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _resolve_type(self, type_hint: Any, optional: bool) -> str:
|
||||||
|
"""Resolve Python type to TypeScript type string."""
|
||||||
|
base, is_optional = unwrap_optional(type_hint)
|
||||||
|
optional = optional or is_optional
|
||||||
|
origin = get_origin_name(base)
|
||||||
|
type_name = get_type_name(base)
|
||||||
|
|
||||||
|
# Look up resolver
|
||||||
|
resolver = (
|
||||||
|
TS_RESOLVERS.get(origin)
|
||||||
|
or TS_RESOLVERS.get(type_name)
|
||||||
|
or TS_RESOLVERS.get(base)
|
||||||
|
or (
|
||||||
|
TS_RESOLVERS["enum"]
|
||||||
|
if isinstance(base, type) and issubclass(base, Enum)
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
result = resolver(base) if resolver else "string"
|
||||||
|
return f"{result} | null" if optional else result
|
||||||
72
tools/helpers.py
Normal file
72
tools/helpers.py
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
"""
|
||||||
|
Type Helpers
|
||||||
|
|
||||||
|
Utilities for type introspection and resolution.
|
||||||
|
Used by generators and loaders.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import dataclasses as dc
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Any, Union, get_args, get_origin
|
||||||
|
|
||||||
|
|
||||||
|
def unwrap_optional(type_hint: Any) -> tuple[Any, bool]:
|
||||||
|
"""Unwrap Optional[T] -> (T, True) or (T, False) if not optional."""
|
||||||
|
origin = get_origin(type_hint)
|
||||||
|
if origin is Union:
|
||||||
|
args = [a for a in get_args(type_hint) if a is not type(None)]
|
||||||
|
return (args[0] if args else str, True)
|
||||||
|
return (type_hint, False)
|
||||||
|
|
||||||
|
|
||||||
|
def get_origin_name(type_hint: Any) -> str | None:
|
||||||
|
"""Get origin type name: 'dict', 'list', or None."""
|
||||||
|
origin = get_origin(type_hint)
|
||||||
|
if origin is dict:
|
||||||
|
return "dict"
|
||||||
|
if origin is list:
|
||||||
|
return "list"
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_type_name(type_hint: Any) -> str | None:
|
||||||
|
"""Get type name for special types like UUID, datetime."""
|
||||||
|
if hasattr(type_hint, "__name__"):
|
||||||
|
return type_hint.__name__
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_list_inner(type_hint: Any) -> str:
|
||||||
|
"""Get inner type of List[T]."""
|
||||||
|
args = get_args(type_hint)
|
||||||
|
if args and args[0] in (str, int, float, bool):
|
||||||
|
return {str: "str", int: "int", float: "float", bool: "bool"}[args[0]]
|
||||||
|
return "str"
|
||||||
|
|
||||||
|
|
||||||
|
def get_field_default(field: dc.Field) -> Any:
|
||||||
|
"""Get default value from dataclass field."""
|
||||||
|
if field.default is not dc.MISSING:
|
||||||
|
return field.default
|
||||||
|
return dc.MISSING
|
||||||
|
|
||||||
|
|
||||||
|
def format_opts(optional: bool, extra: list[str] | None = None) -> str:
|
||||||
|
"""Format field options string for Django."""
|
||||||
|
parts = []
|
||||||
|
if optional:
|
||||||
|
parts.append("null=True, blank=True")
|
||||||
|
if extra:
|
||||||
|
parts.extend(extra)
|
||||||
|
return ", ".join(parts)
|
||||||
|
|
||||||
|
|
||||||
|
def is_enum(type_hint: Any) -> bool:
|
||||||
|
"""Check if type is an Enum."""
|
||||||
|
base, _ = unwrap_optional(type_hint)
|
||||||
|
return isinstance(base, type) and issubclass(base, Enum)
|
||||||
|
|
||||||
|
|
||||||
|
def get_enum_values(enum_class: type) -> list[tuple[str, str]]:
|
||||||
|
"""Get list of (name, value) pairs from an Enum."""
|
||||||
|
return [(m.name, m.value) for m in enum_class]
|
||||||
37
tools/loader/__init__.py
Normal file
37
tools/loader/__init__.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
"""
|
||||||
|
Loader - Input source handlers for modelgen.
|
||||||
|
|
||||||
|
Supported loaders:
|
||||||
|
- ConfigLoader: Load from soleprint config.json
|
||||||
|
- SchemaLoader: Load from Python dataclasses in schema/ folder
|
||||||
|
- Extractors: Extract from existing codebases (Django, SQLAlchemy, Prisma)
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .config import ConfigLoader, load_config
|
||||||
|
from .extract import EXTRACTORS, BaseExtractor, DjangoExtractor
|
||||||
|
from .schema import (
|
||||||
|
EnumDefinition,
|
||||||
|
FieldDefinition,
|
||||||
|
GrpcServiceDefinition,
|
||||||
|
ModelDefinition,
|
||||||
|
SchemaLoader,
|
||||||
|
load_schema,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
# Config loader
|
||||||
|
"ConfigLoader",
|
||||||
|
"load_config",
|
||||||
|
# Schema loader
|
||||||
|
"SchemaLoader",
|
||||||
|
"load_schema",
|
||||||
|
# Model definitions
|
||||||
|
"ModelDefinition",
|
||||||
|
"FieldDefinition",
|
||||||
|
"EnumDefinition",
|
||||||
|
"GrpcServiceDefinition",
|
||||||
|
# Extractors
|
||||||
|
"BaseExtractor",
|
||||||
|
"DjangoExtractor",
|
||||||
|
"EXTRACTORS",
|
||||||
|
]
|
||||||
116
tools/loader/config.py
Normal file
116
tools/loader/config.py
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
"""
|
||||||
|
Configuration Loader
|
||||||
|
|
||||||
|
Loads and validates framework configuration files (soleprint config.json style).
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class FrameworkConfig:
|
||||||
|
"""Framework metadata"""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
slug: str
|
||||||
|
version: str
|
||||||
|
description: str
|
||||||
|
tagline: str
|
||||||
|
icon: str
|
||||||
|
hub_port: int
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class SystemConfig:
|
||||||
|
"""System configuration"""
|
||||||
|
|
||||||
|
key: str
|
||||||
|
name: str
|
||||||
|
slug: str = ""
|
||||||
|
title: str = ""
|
||||||
|
tagline: str = ""
|
||||||
|
icon: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ComponentConfig:
|
||||||
|
"""Component configuration"""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
title: str
|
||||||
|
description: str
|
||||||
|
plural: Optional[str] = None
|
||||||
|
formula: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigLoader:
|
||||||
|
"""Loads and parses framework configuration"""
|
||||||
|
|
||||||
|
def __init__(self, config_path: Path):
|
||||||
|
self.config_path = Path(config_path)
|
||||||
|
self.raw_config: Dict[str, Any] = {}
|
||||||
|
self.framework: Optional[FrameworkConfig] = None
|
||||||
|
self.systems: List[SystemConfig] = []
|
||||||
|
self.components: Dict[str, Dict[str, ComponentConfig]] = {}
|
||||||
|
|
||||||
|
def load(self) -> "ConfigLoader":
|
||||||
|
"""Load configuration from file"""
|
||||||
|
with open(self.config_path) as f:
|
||||||
|
self.raw_config = json.load(f)
|
||||||
|
|
||||||
|
self._parse_framework()
|
||||||
|
self._parse_systems()
|
||||||
|
self._parse_components()
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _parse_framework(self):
|
||||||
|
"""Parse framework metadata"""
|
||||||
|
fw = self.raw_config["framework"]
|
||||||
|
self.framework = FrameworkConfig(**fw)
|
||||||
|
|
||||||
|
def _parse_systems(self):
|
||||||
|
"""Parse system configurations"""
|
||||||
|
for sys in self.raw_config["systems"]:
|
||||||
|
self.systems.append(SystemConfig(**sys))
|
||||||
|
|
||||||
|
def _parse_components(self):
|
||||||
|
"""Parse component configurations"""
|
||||||
|
comps = self.raw_config["components"]
|
||||||
|
|
||||||
|
# Shared components
|
||||||
|
self.components["shared"] = {}
|
||||||
|
for key, value in comps.get("shared", {}).items():
|
||||||
|
self.components["shared"][key] = ComponentConfig(**value)
|
||||||
|
|
||||||
|
# System-specific components
|
||||||
|
for system_key in ["data_flow", "documentation", "execution"]:
|
||||||
|
self.components[system_key] = {}
|
||||||
|
for comp_key, comp_value in comps.get(system_key, {}).items():
|
||||||
|
self.components[system_key][comp_key] = ComponentConfig(**comp_value)
|
||||||
|
|
||||||
|
def get_system(self, key: str) -> Optional[SystemConfig]:
|
||||||
|
"""Get system config by key"""
|
||||||
|
for sys in self.systems:
|
||||||
|
if sys.key == key:
|
||||||
|
return sys
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_component(
|
||||||
|
self, system_key: str, component_key: str
|
||||||
|
) -> Optional[ComponentConfig]:
|
||||||
|
"""Get component config"""
|
||||||
|
return self.components.get(system_key, {}).get(component_key)
|
||||||
|
|
||||||
|
def get_shared_component(self, key: str) -> Optional[ComponentConfig]:
|
||||||
|
"""Get shared component config"""
|
||||||
|
return self.components.get("shared", {}).get(key)
|
||||||
|
|
||||||
|
|
||||||
|
def load_config(config_path: str | Path) -> ConfigLoader:
|
||||||
|
"""Load and validate configuration file"""
|
||||||
|
loader = ConfigLoader(config_path)
|
||||||
|
return loader.load()
|
||||||
20
tools/loader/extract/__init__.py
Normal file
20
tools/loader/extract/__init__.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
"""
|
||||||
|
Extractors - Extract model definitions from existing codebases.
|
||||||
|
|
||||||
|
Supported frameworks:
|
||||||
|
- Django: Extract from Django ORM models
|
||||||
|
- SQLAlchemy: Extract from SQLAlchemy models (planned)
|
||||||
|
- Prisma: Extract from Prisma schema (planned)
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Dict, Type
|
||||||
|
|
||||||
|
from .base import BaseExtractor
|
||||||
|
from .django import DjangoExtractor
|
||||||
|
|
||||||
|
# Registry of available extractors
|
||||||
|
EXTRACTORS: Dict[str, Type[BaseExtractor]] = {
|
||||||
|
"django": DjangoExtractor,
|
||||||
|
}
|
||||||
|
|
||||||
|
__all__ = ["BaseExtractor", "DjangoExtractor", "EXTRACTORS"]
|
||||||
38
tools/loader/extract/base.py
Normal file
38
tools/loader/extract/base.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
"""
|
||||||
|
Base Extractor
|
||||||
|
|
||||||
|
Abstract base class for model extractors.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from ..schema import EnumDefinition, ModelDefinition
|
||||||
|
|
||||||
|
|
||||||
|
class BaseExtractor(ABC):
|
||||||
|
"""Abstract base for codebase model extractors."""
|
||||||
|
|
||||||
|
def __init__(self, source_path: Path):
|
||||||
|
self.source_path = Path(source_path)
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def extract(self) -> tuple[List[ModelDefinition], List[EnumDefinition]]:
|
||||||
|
"""
|
||||||
|
Extract model definitions from source codebase.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (models, enums)
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def detect(self) -> bool:
|
||||||
|
"""
|
||||||
|
Detect if this extractor can handle the source path.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if this extractor can handle the source
|
||||||
|
"""
|
||||||
|
pass
|
||||||
237
tools/loader/extract/django.py
Normal file
237
tools/loader/extract/django.py
Normal file
@@ -0,0 +1,237 @@
|
|||||||
|
"""
|
||||||
|
Django Extractor
|
||||||
|
|
||||||
|
Extracts model definitions from Django ORM models.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import ast
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, List, Optional
|
||||||
|
|
||||||
|
from ..schema import EnumDefinition, FieldDefinition, ModelDefinition
|
||||||
|
from .base import BaseExtractor
|
||||||
|
|
||||||
|
# Django field type mappings to Python types
|
||||||
|
DJANGO_FIELD_TYPES = {
|
||||||
|
"CharField": str,
|
||||||
|
"TextField": str,
|
||||||
|
"EmailField": str,
|
||||||
|
"URLField": str,
|
||||||
|
"SlugField": str,
|
||||||
|
"UUIDField": "UUID",
|
||||||
|
"IntegerField": int,
|
||||||
|
"BigIntegerField": "bigint",
|
||||||
|
"SmallIntegerField": int,
|
||||||
|
"PositiveIntegerField": int,
|
||||||
|
"FloatField": float,
|
||||||
|
"DecimalField": float,
|
||||||
|
"BooleanField": bool,
|
||||||
|
"NullBooleanField": bool,
|
||||||
|
"DateField": "datetime",
|
||||||
|
"DateTimeField": "datetime",
|
||||||
|
"TimeField": "datetime",
|
||||||
|
"JSONField": "dict",
|
||||||
|
"ForeignKey": "FK",
|
||||||
|
"OneToOneField": "FK",
|
||||||
|
"ManyToManyField": "M2M",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class DjangoExtractor(BaseExtractor):
|
||||||
|
"""Extracts models from Django ORM."""
|
||||||
|
|
||||||
|
def detect(self) -> bool:
|
||||||
|
"""Check if this is a Django project."""
|
||||||
|
# Look for manage.py or settings.py
|
||||||
|
manage_py = self.source_path / "manage.py"
|
||||||
|
settings_py = self.source_path / "settings.py"
|
||||||
|
|
||||||
|
if manage_py.exists():
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Check for Django imports in any models.py
|
||||||
|
for models_file in self.source_path.rglob("models.py"):
|
||||||
|
content = models_file.read_text()
|
||||||
|
if "from django.db import models" in content:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return settings_py.exists()
|
||||||
|
|
||||||
|
def extract(self) -> tuple[List[ModelDefinition], List[EnumDefinition]]:
|
||||||
|
"""Extract Django models using AST parsing."""
|
||||||
|
models = []
|
||||||
|
enums = []
|
||||||
|
|
||||||
|
# Find all models.py files
|
||||||
|
for models_file in self.source_path.rglob("models.py"):
|
||||||
|
file_models, file_enums = self._extract_from_file(models_file)
|
||||||
|
models.extend(file_models)
|
||||||
|
enums.extend(file_enums)
|
||||||
|
|
||||||
|
return models, enums
|
||||||
|
|
||||||
|
def _extract_from_file(
|
||||||
|
self, file_path: Path
|
||||||
|
) -> tuple[List[ModelDefinition], List[EnumDefinition]]:
|
||||||
|
"""Extract models from a single models.py file."""
|
||||||
|
models = []
|
||||||
|
enums = []
|
||||||
|
|
||||||
|
content = file_path.read_text()
|
||||||
|
tree = ast.parse(content)
|
||||||
|
|
||||||
|
for node in ast.walk(tree):
|
||||||
|
if isinstance(node, ast.ClassDef):
|
||||||
|
# Check if it inherits from models.Model
|
||||||
|
if self._is_django_model(node):
|
||||||
|
model_def = self._parse_model_class(node)
|
||||||
|
if model_def:
|
||||||
|
models.append(model_def)
|
||||||
|
# Check if it's a TextChoices/IntegerChoices enum
|
||||||
|
elif self._is_django_choices(node):
|
||||||
|
enum_def = self._parse_choices_class(node)
|
||||||
|
if enum_def:
|
||||||
|
enums.append(enum_def)
|
||||||
|
|
||||||
|
return models, enums
|
||||||
|
|
||||||
|
def _is_django_model(self, node: ast.ClassDef) -> bool:
|
||||||
|
"""Check if class inherits from models.Model."""
|
||||||
|
for base in node.bases:
|
||||||
|
if isinstance(base, ast.Attribute):
|
||||||
|
if base.attr == "Model":
|
||||||
|
return True
|
||||||
|
elif isinstance(base, ast.Name):
|
||||||
|
if base.id in ("Model", "AbstractUser", "AbstractBaseUser"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _is_django_choices(self, node: ast.ClassDef) -> bool:
|
||||||
|
"""Check if class is a Django TextChoices/IntegerChoices."""
|
||||||
|
for base in node.bases:
|
||||||
|
if isinstance(base, ast.Attribute):
|
||||||
|
if base.attr in ("TextChoices", "IntegerChoices"):
|
||||||
|
return True
|
||||||
|
elif isinstance(base, ast.Name):
|
||||||
|
if base.id in ("TextChoices", "IntegerChoices"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _parse_model_class(self, node: ast.ClassDef) -> Optional[ModelDefinition]:
|
||||||
|
"""Parse a Django model class into ModelDefinition."""
|
||||||
|
fields = []
|
||||||
|
|
||||||
|
for item in node.body:
|
||||||
|
if isinstance(item, ast.Assign):
|
||||||
|
field_def = self._parse_field_assignment(item)
|
||||||
|
if field_def:
|
||||||
|
fields.append(field_def)
|
||||||
|
elif isinstance(item, ast.AnnAssign):
|
||||||
|
# Handle annotated assignments (Django 4.0+ style)
|
||||||
|
field_def = self._parse_annotated_field(item)
|
||||||
|
if field_def:
|
||||||
|
fields.append(field_def)
|
||||||
|
|
||||||
|
# Get docstring
|
||||||
|
docstring = ast.get_docstring(node)
|
||||||
|
|
||||||
|
return ModelDefinition(
|
||||||
|
name=node.name,
|
||||||
|
fields=fields,
|
||||||
|
docstring=docstring,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _parse_field_assignment(self, node: ast.Assign) -> Optional[FieldDefinition]:
|
||||||
|
"""Parse a field assignment like: name = models.CharField(...)"""
|
||||||
|
if not node.targets or not isinstance(node.targets[0], ast.Name):
|
||||||
|
return None
|
||||||
|
|
||||||
|
field_name = node.targets[0].id
|
||||||
|
|
||||||
|
# Skip private fields and Meta class
|
||||||
|
if field_name.startswith("_") or field_name == "Meta":
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Parse the field call
|
||||||
|
if isinstance(node.value, ast.Call):
|
||||||
|
return self._parse_field_call(field_name, node.value)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _parse_annotated_field(self, node: ast.AnnAssign) -> Optional[FieldDefinition]:
|
||||||
|
"""Parse an annotated field assignment."""
|
||||||
|
if not isinstance(node.target, ast.Name):
|
||||||
|
return None
|
||||||
|
|
||||||
|
field_name = node.target.id
|
||||||
|
|
||||||
|
if field_name.startswith("_"):
|
||||||
|
return None
|
||||||
|
|
||||||
|
if node.value and isinstance(node.value, ast.Call):
|
||||||
|
return self._parse_field_call(field_name, node.value)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _parse_field_call(
|
||||||
|
self, field_name: str, call: ast.Call
|
||||||
|
) -> Optional[FieldDefinition]:
|
||||||
|
"""Parse a Django field call like models.CharField(max_length=100)."""
|
||||||
|
# Get field type name
|
||||||
|
field_type_name = None
|
||||||
|
|
||||||
|
if isinstance(call.func, ast.Attribute):
|
||||||
|
field_type_name = call.func.attr
|
||||||
|
elif isinstance(call.func, ast.Name):
|
||||||
|
field_type_name = call.func.id
|
||||||
|
|
||||||
|
if not field_type_name:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Map to Python type
|
||||||
|
python_type = DJANGO_FIELD_TYPES.get(field_type_name, str)
|
||||||
|
|
||||||
|
# Check for null=True
|
||||||
|
optional = False
|
||||||
|
default = None
|
||||||
|
|
||||||
|
for keyword in call.keywords:
|
||||||
|
if keyword.arg == "null":
|
||||||
|
if isinstance(keyword.value, ast.Constant):
|
||||||
|
optional = keyword.value.value is True
|
||||||
|
elif keyword.arg == "default":
|
||||||
|
if isinstance(keyword.value, ast.Constant):
|
||||||
|
default = keyword.value.value
|
||||||
|
|
||||||
|
return FieldDefinition(
|
||||||
|
name=field_name,
|
||||||
|
type_hint=python_type,
|
||||||
|
default=default if default is not None else None,
|
||||||
|
optional=optional,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _parse_choices_class(self, node: ast.ClassDef) -> Optional[EnumDefinition]:
|
||||||
|
"""Parse a Django TextChoices/IntegerChoices class."""
|
||||||
|
values = []
|
||||||
|
|
||||||
|
for item in node.body:
|
||||||
|
if isinstance(item, ast.Assign):
|
||||||
|
if item.targets and isinstance(item.targets[0], ast.Name):
|
||||||
|
name = item.targets[0].id
|
||||||
|
if name.isupper(): # Enum values are typically uppercase
|
||||||
|
# Get the value
|
||||||
|
value = name.lower() # Default to lowercase name
|
||||||
|
if isinstance(item.value, ast.Constant):
|
||||||
|
value = str(item.value.value)
|
||||||
|
elif isinstance(item.value, ast.Tuple) and item.value.elts:
|
||||||
|
# TextChoices: NAME = "value", "Label"
|
||||||
|
if isinstance(item.value.elts[0], ast.Constant):
|
||||||
|
value = str(item.value.elts[0].value)
|
||||||
|
|
||||||
|
values.append((name, value))
|
||||||
|
|
||||||
|
if not values:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return EnumDefinition(name=node.name, values=values)
|
||||||
169
tools/loader/schema.py
Normal file
169
tools/loader/schema.py
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
"""
|
||||||
|
Schema Loader
|
||||||
|
|
||||||
|
Loads Python dataclasses from a schema/ folder.
|
||||||
|
Expects the folder to have an __init__.py that exports:
|
||||||
|
- DATACLASSES: List of dataclass types to generate
|
||||||
|
- ENUMS: List of Enum types to include
|
||||||
|
- GRPC_MESSAGES: (optional) List of gRPC message types
|
||||||
|
- GRPC_SERVICE: (optional) gRPC service definition dict
|
||||||
|
"""
|
||||||
|
|
||||||
|
import dataclasses as dc
|
||||||
|
import importlib.util
|
||||||
|
import sys
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from enum import Enum
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional, Type, get_type_hints
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class FieldDefinition:
|
||||||
|
"""Represents a model field."""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
type_hint: Any
|
||||||
|
default: Any = dc.MISSING
|
||||||
|
optional: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ModelDefinition:
|
||||||
|
"""Represents a model/dataclass."""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
fields: List[FieldDefinition]
|
||||||
|
docstring: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class EnumDefinition:
|
||||||
|
"""Represents an enum."""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
values: List[tuple[str, str]] # (name, value) pairs
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class GrpcServiceDefinition:
|
||||||
|
"""Represents a gRPC service."""
|
||||||
|
|
||||||
|
package: str
|
||||||
|
name: str
|
||||||
|
methods: List[Dict[str, Any]]
|
||||||
|
|
||||||
|
|
||||||
|
class SchemaLoader:
|
||||||
|
"""Loads model definitions from Python dataclasses in schema/ folder."""
|
||||||
|
|
||||||
|
def __init__(self, schema_path: Path):
|
||||||
|
self.schema_path = Path(schema_path)
|
||||||
|
self.models: List[ModelDefinition] = []
|
||||||
|
self.enums: List[EnumDefinition] = []
|
||||||
|
self.grpc_messages: List[ModelDefinition] = []
|
||||||
|
self.grpc_service: Optional[GrpcServiceDefinition] = None
|
||||||
|
|
||||||
|
def load(self) -> "SchemaLoader":
|
||||||
|
"""Load schema definitions from the schema folder."""
|
||||||
|
init_path = self.schema_path / "__init__.py"
|
||||||
|
|
||||||
|
if not init_path.exists():
|
||||||
|
raise FileNotFoundError(f"Schema folder must have __init__.py: {init_path}")
|
||||||
|
|
||||||
|
# Import the schema module
|
||||||
|
module = self._import_module(init_path)
|
||||||
|
|
||||||
|
# Extract DATACLASSES
|
||||||
|
dataclasses = getattr(module, "DATACLASSES", [])
|
||||||
|
for cls in dataclasses:
|
||||||
|
self.models.append(self._parse_dataclass(cls))
|
||||||
|
|
||||||
|
# Extract ENUMS
|
||||||
|
enums = getattr(module, "ENUMS", [])
|
||||||
|
for enum_cls in enums:
|
||||||
|
self.enums.append(self._parse_enum(enum_cls))
|
||||||
|
|
||||||
|
# Extract GRPC_MESSAGES (optional)
|
||||||
|
grpc_messages = getattr(module, "GRPC_MESSAGES", [])
|
||||||
|
for cls in grpc_messages:
|
||||||
|
self.grpc_messages.append(self._parse_dataclass(cls))
|
||||||
|
|
||||||
|
# Extract GRPC_SERVICE (optional)
|
||||||
|
grpc_service = getattr(module, "GRPC_SERVICE", None)
|
||||||
|
if grpc_service:
|
||||||
|
self.grpc_service = GrpcServiceDefinition(
|
||||||
|
package=grpc_service.get("package", "service"),
|
||||||
|
name=grpc_service.get("name", "Service"),
|
||||||
|
methods=grpc_service.get("methods", []),
|
||||||
|
)
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _import_module(self, path: Path):
|
||||||
|
"""Import a Python module from a file path."""
|
||||||
|
spec = importlib.util.spec_from_file_location("schema", path)
|
||||||
|
if spec is None or spec.loader is None:
|
||||||
|
raise ImportError(f"Could not load module from {path}")
|
||||||
|
|
||||||
|
module = importlib.util.module_from_spec(spec)
|
||||||
|
sys.modules["schema"] = module
|
||||||
|
spec.loader.exec_module(module)
|
||||||
|
return module
|
||||||
|
|
||||||
|
def _parse_dataclass(self, cls: Type) -> ModelDefinition:
|
||||||
|
"""Parse a dataclass into a ModelDefinition."""
|
||||||
|
hints = get_type_hints(cls)
|
||||||
|
fields_info = {f.name: f for f in dc.fields(cls)}
|
||||||
|
|
||||||
|
fields = []
|
||||||
|
for name, type_hint in hints.items():
|
||||||
|
if name.startswith("_"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
field_info = fields_info.get(name)
|
||||||
|
default = dc.MISSING
|
||||||
|
if field_info:
|
||||||
|
if field_info.default is not dc.MISSING:
|
||||||
|
default = field_info.default
|
||||||
|
elif field_info.default_factory is not dc.MISSING:
|
||||||
|
default = field_info.default_factory
|
||||||
|
|
||||||
|
# Check if optional (Union with None)
|
||||||
|
optional = self._is_optional(type_hint)
|
||||||
|
|
||||||
|
fields.append(
|
||||||
|
FieldDefinition(
|
||||||
|
name=name,
|
||||||
|
type_hint=type_hint,
|
||||||
|
default=default,
|
||||||
|
optional=optional,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return ModelDefinition(
|
||||||
|
name=cls.__name__,
|
||||||
|
fields=fields,
|
||||||
|
docstring=cls.__doc__,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _parse_enum(self, enum_cls: Type[Enum]) -> EnumDefinition:
|
||||||
|
"""Parse an Enum into an EnumDefinition."""
|
||||||
|
values = [(m.name, m.value) for m in enum_cls]
|
||||||
|
return EnumDefinition(name=enum_cls.__name__, values=values)
|
||||||
|
|
||||||
|
def _is_optional(self, type_hint: Any) -> bool:
|
||||||
|
"""Check if a type hint is Optional (Union with None)."""
|
||||||
|
from typing import Union, get_args, get_origin
|
||||||
|
|
||||||
|
origin = get_origin(type_hint)
|
||||||
|
if origin is Union:
|
||||||
|
args = get_args(type_hint)
|
||||||
|
return type(None) in args
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def load_schema(schema_path: str | Path) -> SchemaLoader:
|
||||||
|
"""Load schema definitions from folder."""
|
||||||
|
loader = SchemaLoader(schema_path)
|
||||||
|
return loader.load()
|
||||||
77
tools/model_generator.py
Normal file
77
tools/model_generator.py
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
"""
|
||||||
|
Model Generator
|
||||||
|
|
||||||
|
Orchestrates model generation from various sources to various formats.
|
||||||
|
Delegates to loaders for input and generators for output.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, Type
|
||||||
|
|
||||||
|
from .generator import GENERATORS, BaseGenerator
|
||||||
|
from .loader import ConfigLoader
|
||||||
|
|
||||||
|
|
||||||
|
class ModelGenerator:
|
||||||
|
"""
|
||||||
|
Generates typed models from configuration.
|
||||||
|
|
||||||
|
This is the main entry point for model generation.
|
||||||
|
Delegates to format-specific generators.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
config: ConfigLoader,
|
||||||
|
output_path: Path,
|
||||||
|
output_format: str = "pydantic",
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize the generator.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config: Loaded configuration
|
||||||
|
output_path: Exact path where to write (file or directory depending on format)
|
||||||
|
output_format: Output format (pydantic, django, prisma, typescript, protobuf)
|
||||||
|
"""
|
||||||
|
self.config = config
|
||||||
|
self.output_path = Path(output_path)
|
||||||
|
self.output_format = output_format
|
||||||
|
|
||||||
|
if output_format not in GENERATORS:
|
||||||
|
raise ValueError(
|
||||||
|
f"Unknown output format: {output_format}. "
|
||||||
|
f"Available: {list(GENERATORS.keys())}"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.generator = GENERATORS[output_format]()
|
||||||
|
|
||||||
|
def generate(self) -> Path:
|
||||||
|
"""
|
||||||
|
Generate models to the specified output path.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to the generated file/directory
|
||||||
|
"""
|
||||||
|
# Determine output file path
|
||||||
|
if self.output_path.suffix:
|
||||||
|
# User specified a file path
|
||||||
|
output_file = self.output_path
|
||||||
|
else:
|
||||||
|
# User specified a directory, add default filename
|
||||||
|
output_file = (
|
||||||
|
self.output_path / f"__init__{self.generator.file_extension()}"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.generator.generate(self.config, output_file)
|
||||||
|
print(f"Generated {self.output_format} models: {output_file}")
|
||||||
|
return output_file
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def available_formats(cls) -> list:
|
||||||
|
"""Return list of available output formats."""
|
||||||
|
return list(GENERATORS.keys())
|
||||||
|
|
||||||
|
|
||||||
|
# Re-export for backwards compatibility
|
||||||
|
WRITERS = GENERATORS
|
||||||
41
tools/modelgen/__init__.py
Normal file
41
tools/modelgen/__init__.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
"""
|
||||||
|
Modelgen - Generic Model Generation Tool
|
||||||
|
|
||||||
|
Generates typed models from various sources to various output formats.
|
||||||
|
|
||||||
|
Input sources:
|
||||||
|
- Configuration files (soleprint config.json style)
|
||||||
|
- Python dataclasses in schema/ folder
|
||||||
|
- Existing codebases: Django, SQLAlchemy, Prisma (for extraction)
|
||||||
|
|
||||||
|
Output formats:
|
||||||
|
- pydantic: Pydantic BaseModel classes
|
||||||
|
- django: Django ORM models
|
||||||
|
- typescript: TypeScript interfaces
|
||||||
|
- protobuf: Protocol Buffer definitions
|
||||||
|
- prisma: Prisma schema
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python -m soleprint.station.tools.modelgen from-config -c config.json -o models.py
|
||||||
|
python -m soleprint.station.tools.modelgen from-schema -o models/ --targets pydantic,typescript
|
||||||
|
python -m soleprint.station.tools.modelgen extract --source /path/to/django --targets pydantic
|
||||||
|
python -m soleprint.station.tools.modelgen list-formats
|
||||||
|
"""
|
||||||
|
|
||||||
|
__version__ = "0.2.0"
|
||||||
|
|
||||||
|
from .generator import GENERATORS, BaseGenerator
|
||||||
|
from .loader import ConfigLoader, load_config
|
||||||
|
from .model_generator import ModelGenerator
|
||||||
|
|
||||||
|
# Backwards compatibility
|
||||||
|
WRITERS = GENERATORS
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"ModelGenerator",
|
||||||
|
"ConfigLoader",
|
||||||
|
"load_config",
|
||||||
|
"GENERATORS",
|
||||||
|
"WRITERS",
|
||||||
|
"BaseGenerator",
|
||||||
|
]
|
||||||
290
tools/modelgen/__main__.py
Normal file
290
tools/modelgen/__main__.py
Normal file
@@ -0,0 +1,290 @@
|
|||||||
|
"""
|
||||||
|
Modelgen - Generic Model Generation Tool
|
||||||
|
|
||||||
|
Generates typed models from various sources to various formats.
|
||||||
|
|
||||||
|
Input sources:
|
||||||
|
- from-config: Configuration files (soleprint config.json style)
|
||||||
|
- from-schema: Python dataclasses in schema/ folder
|
||||||
|
- extract: Existing codebases (Django, SQLAlchemy, Prisma)
|
||||||
|
|
||||||
|
Output formats:
|
||||||
|
- pydantic: Pydantic BaseModel classes
|
||||||
|
- django: Django ORM models
|
||||||
|
- typescript: TypeScript interfaces
|
||||||
|
- protobuf: Protocol Buffer definitions
|
||||||
|
- prisma: Prisma schema
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python -m soleprint.station.tools.modelgen --help
|
||||||
|
python -m soleprint.station.tools.modelgen from-config -c config.json -o models.py
|
||||||
|
python -m soleprint.station.tools.modelgen from-schema -o models/ --targets pydantic,typescript
|
||||||
|
python -m soleprint.station.tools.modelgen extract --source /path/to/django --targets pydantic
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from .generator import GENERATORS
|
||||||
|
|
||||||
|
|
||||||
|
def cmd_from_config(args):
|
||||||
|
"""Generate models from a configuration file (soleprint config.json style)."""
|
||||||
|
from .loader import load_config
|
||||||
|
from .model_generator import ModelGenerator
|
||||||
|
|
||||||
|
config_path = Path(args.config)
|
||||||
|
if not config_path.exists():
|
||||||
|
print(f"Error: Config file not found: {config_path}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
output_path = Path(args.output)
|
||||||
|
|
||||||
|
print(f"Loading config: {config_path}")
|
||||||
|
config = load_config(config_path)
|
||||||
|
|
||||||
|
print(f"Generating {args.format} models to: {output_path}")
|
||||||
|
generator = ModelGenerator(
|
||||||
|
config=config,
|
||||||
|
output_path=output_path,
|
||||||
|
output_format=args.format,
|
||||||
|
)
|
||||||
|
result_path = generator.generate()
|
||||||
|
|
||||||
|
print(f"Models generated: {result_path}")
|
||||||
|
|
||||||
|
|
||||||
|
def cmd_from_schema(args):
|
||||||
|
"""Generate models from Python dataclasses in schema/ folder."""
|
||||||
|
from .loader import load_schema
|
||||||
|
from .writer import write_file
|
||||||
|
|
||||||
|
# Determine schema path
|
||||||
|
schema_path = Path(args.schema) if args.schema else Path.cwd() / "schema"
|
||||||
|
|
||||||
|
if not schema_path.exists():
|
||||||
|
print(f"Error: Schema folder not found: {schema_path}", file=sys.stderr)
|
||||||
|
print(
|
||||||
|
"Create a schema/ folder with Python dataclasses and an __init__.py",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
|
print("that exports DATACLASSES and ENUMS lists.", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print(f"Loading schema: {schema_path}")
|
||||||
|
schema = load_schema(schema_path)
|
||||||
|
|
||||||
|
print(f"Found {len(schema.models)} models, {len(schema.enums)} enums")
|
||||||
|
|
||||||
|
# Parse targets
|
||||||
|
targets = [t.strip() for t in args.targets.split(",")]
|
||||||
|
output_dir = Path(args.output)
|
||||||
|
|
||||||
|
for target in targets:
|
||||||
|
if target not in GENERATORS:
|
||||||
|
print(f"Warning: Unknown target '{target}', skipping", file=sys.stderr)
|
||||||
|
continue
|
||||||
|
|
||||||
|
generator = GENERATORS[target]()
|
||||||
|
ext = generator.file_extension()
|
||||||
|
|
||||||
|
# Determine output filename (use target name to avoid overwrites)
|
||||||
|
if len(targets) == 1 and args.output.endswith(ext):
|
||||||
|
output_file = output_dir
|
||||||
|
else:
|
||||||
|
output_file = output_dir / f"models_{target}{ext}"
|
||||||
|
|
||||||
|
print(f"Generating {target} to: {output_file}")
|
||||||
|
generator.generate(schema, output_file)
|
||||||
|
|
||||||
|
print("Done!")
|
||||||
|
|
||||||
|
|
||||||
|
def cmd_extract(args):
|
||||||
|
"""Extract models from existing codebase."""
|
||||||
|
from .loader.extract import EXTRACTORS
|
||||||
|
|
||||||
|
source_path = Path(args.source)
|
||||||
|
if not source_path.exists():
|
||||||
|
print(f"Error: Source path not found: {source_path}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Auto-detect or use specified framework
|
||||||
|
framework = args.framework
|
||||||
|
extractor = None
|
||||||
|
|
||||||
|
if framework == "auto":
|
||||||
|
for name, extractor_cls in EXTRACTORS.items():
|
||||||
|
ext = extractor_cls(source_path)
|
||||||
|
if ext.detect():
|
||||||
|
framework = name
|
||||||
|
extractor = ext
|
||||||
|
print(f"Detected framework: {framework}")
|
||||||
|
break
|
||||||
|
|
||||||
|
if not extractor:
|
||||||
|
print("Error: Could not auto-detect framework", file=sys.stderr)
|
||||||
|
print(f"Available frameworks: {list(EXTRACTORS.keys())}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
if framework not in EXTRACTORS:
|
||||||
|
print(f"Error: Unknown framework: {framework}", file=sys.stderr)
|
||||||
|
print(f"Available: {list(EXTRACTORS.keys())}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
extractor = EXTRACTORS[framework](source_path)
|
||||||
|
|
||||||
|
print(f"Extracting from: {source_path}")
|
||||||
|
models, enums = extractor.extract()
|
||||||
|
|
||||||
|
print(f"Extracted {len(models)} models, {len(enums)} enums")
|
||||||
|
|
||||||
|
# Parse targets
|
||||||
|
targets = [t.strip() for t in args.targets.split(",")]
|
||||||
|
output_dir = Path(args.output)
|
||||||
|
|
||||||
|
for target in targets:
|
||||||
|
if target not in GENERATORS:
|
||||||
|
print(f"Warning: Unknown target '{target}', skipping", file=sys.stderr)
|
||||||
|
continue
|
||||||
|
|
||||||
|
generator = GENERATORS[target]()
|
||||||
|
ext = generator.file_extension()
|
||||||
|
|
||||||
|
# Determine output filename (use target name to avoid overwrites)
|
||||||
|
if len(targets) == 1 and args.output.endswith(ext):
|
||||||
|
output_file = output_dir
|
||||||
|
else:
|
||||||
|
output_file = output_dir / f"models_{target}{ext}"
|
||||||
|
|
||||||
|
print(f"Generating {target} to: {output_file}")
|
||||||
|
generator.generate((models, enums), output_file)
|
||||||
|
|
||||||
|
print("Done!")
|
||||||
|
|
||||||
|
|
||||||
|
def cmd_list_formats(args):
|
||||||
|
"""List available output formats."""
|
||||||
|
print("Available output formats:")
|
||||||
|
for fmt in GENERATORS.keys():
|
||||||
|
print(f" - {fmt}")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Modelgen - Generic Model Generation Tool",
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||||
|
)
|
||||||
|
|
||||||
|
subparsers = parser.add_subparsers(dest="command", required=True)
|
||||||
|
|
||||||
|
# Available formats for help text
|
||||||
|
formats = list(GENERATORS.keys())
|
||||||
|
formats_str = ", ".join(formats)
|
||||||
|
|
||||||
|
# from-config command
|
||||||
|
config_parser = subparsers.add_parser(
|
||||||
|
"from-config",
|
||||||
|
help="Generate models from soleprint configuration file",
|
||||||
|
)
|
||||||
|
config_parser.add_argument(
|
||||||
|
"--config",
|
||||||
|
"-c",
|
||||||
|
type=str,
|
||||||
|
required=True,
|
||||||
|
help="Path to configuration file (e.g., config.json)",
|
||||||
|
)
|
||||||
|
config_parser.add_argument(
|
||||||
|
"--output",
|
||||||
|
"-o",
|
||||||
|
type=str,
|
||||||
|
required=True,
|
||||||
|
help="Output path (file or directory)",
|
||||||
|
)
|
||||||
|
config_parser.add_argument(
|
||||||
|
"--format",
|
||||||
|
"-f",
|
||||||
|
type=str,
|
||||||
|
default="pydantic",
|
||||||
|
choices=["pydantic"], # Only pydantic for config mode
|
||||||
|
help="Output format (default: pydantic)",
|
||||||
|
)
|
||||||
|
config_parser.set_defaults(func=cmd_from_config)
|
||||||
|
|
||||||
|
# from-schema command
|
||||||
|
schema_parser = subparsers.add_parser(
|
||||||
|
"from-schema",
|
||||||
|
help="Generate models from Python dataclasses in schema/ folder",
|
||||||
|
)
|
||||||
|
schema_parser.add_argument(
|
||||||
|
"--schema",
|
||||||
|
"-s",
|
||||||
|
type=str,
|
||||||
|
default=None,
|
||||||
|
help="Path to schema folder (default: ./schema)",
|
||||||
|
)
|
||||||
|
schema_parser.add_argument(
|
||||||
|
"--output",
|
||||||
|
"-o",
|
||||||
|
type=str,
|
||||||
|
required=True,
|
||||||
|
help="Output path (file or directory)",
|
||||||
|
)
|
||||||
|
schema_parser.add_argument(
|
||||||
|
"--targets",
|
||||||
|
"-t",
|
||||||
|
type=str,
|
||||||
|
default="pydantic",
|
||||||
|
help=f"Comma-separated output targets ({formats_str})",
|
||||||
|
)
|
||||||
|
schema_parser.set_defaults(func=cmd_from_schema)
|
||||||
|
|
||||||
|
# extract command
|
||||||
|
extract_parser = subparsers.add_parser(
|
||||||
|
"extract",
|
||||||
|
help="Extract models from existing codebase",
|
||||||
|
)
|
||||||
|
extract_parser.add_argument(
|
||||||
|
"--source",
|
||||||
|
"-s",
|
||||||
|
type=str,
|
||||||
|
required=True,
|
||||||
|
help="Path to source codebase",
|
||||||
|
)
|
||||||
|
extract_parser.add_argument(
|
||||||
|
"--framework",
|
||||||
|
"-f",
|
||||||
|
type=str,
|
||||||
|
choices=["django", "sqlalchemy", "prisma", "auto"],
|
||||||
|
default="auto",
|
||||||
|
help="Source framework (default: auto-detect)",
|
||||||
|
)
|
||||||
|
extract_parser.add_argument(
|
||||||
|
"--output",
|
||||||
|
"-o",
|
||||||
|
type=str,
|
||||||
|
required=True,
|
||||||
|
help="Output path (file or directory)",
|
||||||
|
)
|
||||||
|
extract_parser.add_argument(
|
||||||
|
"--targets",
|
||||||
|
"-t",
|
||||||
|
type=str,
|
||||||
|
default="pydantic",
|
||||||
|
help=f"Comma-separated output targets ({formats_str})",
|
||||||
|
)
|
||||||
|
extract_parser.set_defaults(func=cmd_extract)
|
||||||
|
|
||||||
|
# list-formats command
|
||||||
|
formats_parser = subparsers.add_parser(
|
||||||
|
"list-formats",
|
||||||
|
help="List available output formats",
|
||||||
|
)
|
||||||
|
formats_parser.set_defaults(func=cmd_list_formats)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
args.func(args)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
40
tools/modelgen/generator/__init__.py
Normal file
40
tools/modelgen/generator/__init__.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
"""
|
||||||
|
Generator - Stack-specific code generators for modelgen.
|
||||||
|
|
||||||
|
Supported generators:
|
||||||
|
- PydanticGenerator: Pydantic BaseModel classes
|
||||||
|
- DjangoGenerator: Django ORM models
|
||||||
|
- TypeScriptGenerator: TypeScript interfaces
|
||||||
|
- ProtobufGenerator: Protocol Buffer definitions
|
||||||
|
- PrismaGenerator: Prisma schema
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Dict, Type
|
||||||
|
|
||||||
|
from .base import BaseGenerator
|
||||||
|
from .django import DjangoGenerator
|
||||||
|
from .prisma import PrismaGenerator
|
||||||
|
from .protobuf import ProtobufGenerator
|
||||||
|
from .pydantic import PydanticGenerator
|
||||||
|
from .typescript import TypeScriptGenerator
|
||||||
|
|
||||||
|
# Registry of available generators
|
||||||
|
GENERATORS: Dict[str, Type[BaseGenerator]] = {
|
||||||
|
"pydantic": PydanticGenerator,
|
||||||
|
"django": DjangoGenerator,
|
||||||
|
"typescript": TypeScriptGenerator,
|
||||||
|
"ts": TypeScriptGenerator, # Alias
|
||||||
|
"protobuf": ProtobufGenerator,
|
||||||
|
"proto": ProtobufGenerator, # Alias
|
||||||
|
"prisma": PrismaGenerator,
|
||||||
|
}
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"BaseGenerator",
|
||||||
|
"PydanticGenerator",
|
||||||
|
"DjangoGenerator",
|
||||||
|
"TypeScriptGenerator",
|
||||||
|
"ProtobufGenerator",
|
||||||
|
"PrismaGenerator",
|
||||||
|
"GENERATORS",
|
||||||
|
]
|
||||||
23
tools/modelgen/generator/base.py
Normal file
23
tools/modelgen/generator/base.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
"""
|
||||||
|
Base Generator
|
||||||
|
|
||||||
|
Abstract base class for all code generators.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
|
class BaseGenerator(ABC):
|
||||||
|
"""Abstract base for code generators."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def generate(self, models: Any, output_path: Path) -> None:
|
||||||
|
"""Generate code for the given models to the specified path."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def file_extension(self) -> str:
|
||||||
|
"""Return the file extension for this format."""
|
||||||
|
pass
|
||||||
268
tools/modelgen/generator/django.py
Normal file
268
tools/modelgen/generator/django.py
Normal file
@@ -0,0 +1,268 @@
|
|||||||
|
"""
|
||||||
|
Django Generator
|
||||||
|
|
||||||
|
Generates Django ORM models from model definitions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import dataclasses as dc
|
||||||
|
from enum import Enum
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, List, get_type_hints
|
||||||
|
|
||||||
|
from ..helpers import format_opts, get_origin_name, get_type_name, unwrap_optional
|
||||||
|
from ..loader.schema import EnumDefinition, ModelDefinition
|
||||||
|
from ..types import DJANGO_SPECIAL, DJANGO_TYPES
|
||||||
|
from .base import BaseGenerator
|
||||||
|
|
||||||
|
|
||||||
|
class DjangoGenerator(BaseGenerator):
|
||||||
|
"""Generates Django ORM model files."""
|
||||||
|
|
||||||
|
def file_extension(self) -> str:
|
||||||
|
return ".py"
|
||||||
|
|
||||||
|
def generate(self, models, output_path: Path) -> None:
|
||||||
|
"""Generate Django models to output_path."""
|
||||||
|
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Handle different input types
|
||||||
|
if hasattr(models, "models"):
|
||||||
|
# SchemaLoader or similar
|
||||||
|
content = self._generate_from_definitions(
|
||||||
|
models.models, getattr(models, "enums", [])
|
||||||
|
)
|
||||||
|
elif isinstance(models, tuple):
|
||||||
|
# (models, enums) tuple
|
||||||
|
content = self._generate_from_definitions(models[0], models[1])
|
||||||
|
elif isinstance(models, list):
|
||||||
|
# List of dataclasses (MPR style)
|
||||||
|
content = self._generate_from_dataclasses(models)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||||
|
|
||||||
|
output_path.write_text(content)
|
||||||
|
|
||||||
|
def _generate_from_definitions(
|
||||||
|
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||||
|
) -> str:
|
||||||
|
"""Generate from ModelDefinition objects."""
|
||||||
|
lines = self._generate_header()
|
||||||
|
|
||||||
|
# Generate enums as TextChoices
|
||||||
|
for enum_def in enums:
|
||||||
|
lines.extend(self._generate_text_choices(enum_def))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
# Generate models
|
||||||
|
for model_def in models:
|
||||||
|
lines.extend(self._generate_model_from_definition(model_def))
|
||||||
|
lines.extend(["", ""])
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||||
|
"""Generate from Python dataclasses (MPR style)."""
|
||||||
|
lines = self._generate_header()
|
||||||
|
|
||||||
|
for cls in dataclasses:
|
||||||
|
lines.extend(self._generate_model_from_dataclass(cls))
|
||||||
|
lines.extend(["", ""])
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_header(self) -> List[str]:
|
||||||
|
"""Generate file header."""
|
||||||
|
return [
|
||||||
|
'"""',
|
||||||
|
"Django ORM Models - GENERATED FILE",
|
||||||
|
"",
|
||||||
|
"Do not edit directly. Regenerate using modelgen.",
|
||||||
|
'"""',
|
||||||
|
"",
|
||||||
|
"import uuid",
|
||||||
|
"from django.db import models",
|
||||||
|
"",
|
||||||
|
]
|
||||||
|
|
||||||
|
def _generate_text_choices(self, enum_def: EnumDefinition) -> List[str]:
|
||||||
|
"""Generate Django TextChoices from EnumDefinition."""
|
||||||
|
lines = [
|
||||||
|
f"class {enum_def.name}(models.TextChoices):",
|
||||||
|
]
|
||||||
|
for name, value in enum_def.values:
|
||||||
|
label = name.replace("_", " ").title()
|
||||||
|
lines.append(f' {name} = "{value}", "{label}"')
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_model_from_definition(self, model_def: ModelDefinition) -> List[str]:
|
||||||
|
"""Generate Django model from ModelDefinition."""
|
||||||
|
docstring = model_def.docstring or model_def.name
|
||||||
|
lines = [
|
||||||
|
f"class {model_def.name}(models.Model):",
|
||||||
|
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||||
|
"",
|
||||||
|
]
|
||||||
|
|
||||||
|
for field in model_def.fields:
|
||||||
|
django_field = self._resolve_field_type(
|
||||||
|
field.name, field.type_hint, field.default, field.optional
|
||||||
|
)
|
||||||
|
lines.append(f" {field.name} = {django_field}")
|
||||||
|
|
||||||
|
# Add Meta and __str__
|
||||||
|
lines.extend(
|
||||||
|
[
|
||||||
|
"",
|
||||||
|
" class Meta:",
|
||||||
|
' ordering = ["-created_at"]'
|
||||||
|
if any(f.name == "created_at" for f in model_def.fields)
|
||||||
|
else " pass",
|
||||||
|
"",
|
||||||
|
" def __str__(self):",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Determine __str__ return
|
||||||
|
field_names = [f.name for f in model_def.fields]
|
||||||
|
if "filename" in field_names:
|
||||||
|
lines.append(" return self.filename")
|
||||||
|
elif "name" in field_names:
|
||||||
|
lines.append(" return self.name")
|
||||||
|
else:
|
||||||
|
lines.append(" return str(self.id)")
|
||||||
|
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_model_from_dataclass(self, cls: type) -> List[str]:
|
||||||
|
"""Generate Django model from a dataclass (MPR style)."""
|
||||||
|
docstring = cls.__doc__ or cls.__name__
|
||||||
|
lines = [
|
||||||
|
f"class {cls.__name__}(models.Model):",
|
||||||
|
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||||
|
"",
|
||||||
|
]
|
||||||
|
|
||||||
|
hints = get_type_hints(cls)
|
||||||
|
fields = {f.name: f for f in dc.fields(cls)}
|
||||||
|
|
||||||
|
# Check for enums and add Status inner class if needed
|
||||||
|
for type_hint in hints.values():
|
||||||
|
base, _ = unwrap_optional(type_hint)
|
||||||
|
if isinstance(base, type) and issubclass(base, Enum):
|
||||||
|
lines.append(" class Status(models.TextChoices):")
|
||||||
|
for member in base:
|
||||||
|
label = member.name.replace("_", " ").title()
|
||||||
|
lines.append(f' {member.name} = "{member.value}", "{label}"')
|
||||||
|
lines.append("")
|
||||||
|
break
|
||||||
|
|
||||||
|
# Generate fields
|
||||||
|
for name, type_hint in hints.items():
|
||||||
|
if name.startswith("_"):
|
||||||
|
continue
|
||||||
|
field = fields.get(name)
|
||||||
|
default = dc.MISSING
|
||||||
|
if field and field.default is not dc.MISSING:
|
||||||
|
default = field.default
|
||||||
|
django_field = self._resolve_field_type(name, type_hint, default, False)
|
||||||
|
lines.append(f" {name} = {django_field}")
|
||||||
|
|
||||||
|
# Add Meta and __str__
|
||||||
|
lines.extend(
|
||||||
|
[
|
||||||
|
"",
|
||||||
|
" class Meta:",
|
||||||
|
' ordering = ["-created_at"]'
|
||||||
|
if "created_at" in hints
|
||||||
|
else " pass",
|
||||||
|
"",
|
||||||
|
" def __str__(self):",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
if "filename" in hints:
|
||||||
|
lines.append(" return self.filename")
|
||||||
|
elif "name" in hints:
|
||||||
|
lines.append(" return self.name")
|
||||||
|
else:
|
||||||
|
lines.append(" return str(self.id)")
|
||||||
|
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _resolve_field_type(
|
||||||
|
self, name: str, type_hint: Any, default: Any, optional: bool
|
||||||
|
) -> str:
|
||||||
|
"""Resolve Python type to Django field."""
|
||||||
|
# Special fields
|
||||||
|
if name in DJANGO_SPECIAL:
|
||||||
|
return DJANGO_SPECIAL[name]
|
||||||
|
|
||||||
|
base, is_optional = unwrap_optional(type_hint)
|
||||||
|
optional = optional or is_optional
|
||||||
|
origin = get_origin_name(base)
|
||||||
|
type_name = get_type_name(base)
|
||||||
|
opts = format_opts(optional)
|
||||||
|
|
||||||
|
# Container types
|
||||||
|
if origin == "dict":
|
||||||
|
return DJANGO_TYPES["dict"]
|
||||||
|
if origin == "list":
|
||||||
|
return DJANGO_TYPES["list"]
|
||||||
|
|
||||||
|
# UUID / datetime
|
||||||
|
if type_name == "UUID":
|
||||||
|
return DJANGO_TYPES["UUID"].format(opts=opts)
|
||||||
|
if type_name == "datetime":
|
||||||
|
return DJANGO_TYPES["datetime"].format(opts=opts)
|
||||||
|
|
||||||
|
# Enum
|
||||||
|
if isinstance(base, type) and issubclass(base, Enum):
|
||||||
|
extra = []
|
||||||
|
if optional:
|
||||||
|
extra.append("null=True, blank=True")
|
||||||
|
if default is not dc.MISSING and isinstance(default, Enum):
|
||||||
|
extra.append(f"default=Status.{default.name}")
|
||||||
|
return DJANGO_TYPES["enum"].format(
|
||||||
|
opts=", " + ", ".join(extra) if extra else ""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Text fields (based on name heuristics)
|
||||||
|
if base is str and any(
|
||||||
|
x in name for x in ("message", "comments", "description")
|
||||||
|
):
|
||||||
|
return DJANGO_TYPES["text"]
|
||||||
|
|
||||||
|
# BigInt fields
|
||||||
|
if base is int and name in ("file_size", "bitrate"):
|
||||||
|
return DJANGO_TYPES["bigint"].format(opts=opts)
|
||||||
|
|
||||||
|
# String with max_length
|
||||||
|
if base is str:
|
||||||
|
max_length = 1000 if "path" in name else 500 if "filename" in name else 255
|
||||||
|
return DJANGO_TYPES[str].format(
|
||||||
|
max_length=max_length, opts=", " + opts if opts else ""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Integer
|
||||||
|
if base is int:
|
||||||
|
extra = [opts] if opts else []
|
||||||
|
if default is not dc.MISSING and not callable(default):
|
||||||
|
extra.append(f"default={default}")
|
||||||
|
return DJANGO_TYPES[int].format(opts=", ".join(extra))
|
||||||
|
|
||||||
|
# Float
|
||||||
|
if base is float:
|
||||||
|
extra = [opts] if opts else []
|
||||||
|
if default is not dc.MISSING and not callable(default):
|
||||||
|
extra.append(f"default={default}")
|
||||||
|
return DJANGO_TYPES[float].format(opts=", ".join(extra))
|
||||||
|
|
||||||
|
# Boolean
|
||||||
|
if base is bool:
|
||||||
|
default_val = default if default is not dc.MISSING else False
|
||||||
|
return DJANGO_TYPES[bool].format(default=default_val)
|
||||||
|
|
||||||
|
# Fallback to CharField
|
||||||
|
return DJANGO_TYPES[str].format(
|
||||||
|
max_length=255, opts=", " + opts if opts else ""
|
||||||
|
)
|
||||||
173
tools/modelgen/generator/prisma.py
Normal file
173
tools/modelgen/generator/prisma.py
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
"""
|
||||||
|
Prisma Generator
|
||||||
|
|
||||||
|
Generates Prisma schema from model definitions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from enum import Enum
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, List, get_type_hints
|
||||||
|
|
||||||
|
from ..helpers import get_origin_name, get_type_name, unwrap_optional
|
||||||
|
from ..loader.schema import EnumDefinition, ModelDefinition
|
||||||
|
from ..types import PRISMA_SPECIAL, PRISMA_TYPES
|
||||||
|
from .base import BaseGenerator
|
||||||
|
|
||||||
|
|
||||||
|
class PrismaGenerator(BaseGenerator):
|
||||||
|
"""Generates Prisma schema files."""
|
||||||
|
|
||||||
|
def file_extension(self) -> str:
|
||||||
|
return ".prisma"
|
||||||
|
|
||||||
|
def generate(self, models, output_path: Path) -> None:
|
||||||
|
"""Generate Prisma schema to output_path."""
|
||||||
|
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Handle different input types
|
||||||
|
if hasattr(models, "models"):
|
||||||
|
# SchemaLoader
|
||||||
|
content = self._generate_from_definitions(
|
||||||
|
models.models, getattr(models, "enums", [])
|
||||||
|
)
|
||||||
|
elif isinstance(models, tuple):
|
||||||
|
# (models, enums) tuple
|
||||||
|
content = self._generate_from_definitions(models[0], models[1])
|
||||||
|
elif isinstance(models, list):
|
||||||
|
# List of dataclasses (MPR style)
|
||||||
|
content = self._generate_from_dataclasses(models)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||||
|
|
||||||
|
output_path.write_text(content)
|
||||||
|
|
||||||
|
def _generate_from_definitions(
|
||||||
|
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||||
|
) -> str:
|
||||||
|
"""Generate from ModelDefinition objects."""
|
||||||
|
lines = self._generate_header()
|
||||||
|
|
||||||
|
# Generate enums
|
||||||
|
for enum_def in enums:
|
||||||
|
lines.extend(self._generate_enum(enum_def))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
# Generate models
|
||||||
|
for model_def in models:
|
||||||
|
lines.extend(self._generate_model_from_definition(model_def))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||||
|
"""Generate from Python dataclasses (MPR style)."""
|
||||||
|
lines = self._generate_header()
|
||||||
|
|
||||||
|
# Collect and generate enums first
|
||||||
|
enums_generated = set()
|
||||||
|
for cls in dataclasses:
|
||||||
|
hints = get_type_hints(cls)
|
||||||
|
for type_hint in hints.values():
|
||||||
|
base, _ = unwrap_optional(type_hint)
|
||||||
|
if isinstance(base, type) and issubclass(base, Enum):
|
||||||
|
if base.__name__ not in enums_generated:
|
||||||
|
lines.extend(self._generate_enum_from_python(base))
|
||||||
|
lines.append("")
|
||||||
|
enums_generated.add(base.__name__)
|
||||||
|
|
||||||
|
# Generate models
|
||||||
|
for cls in dataclasses:
|
||||||
|
lines.extend(self._generate_model_from_dataclass(cls))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_header(self) -> List[str]:
|
||||||
|
"""Generate file header with datasource and generator."""
|
||||||
|
return [
|
||||||
|
"// Prisma Schema - GENERATED FILE",
|
||||||
|
"//",
|
||||||
|
"// Do not edit directly. Regenerate using modelgen.",
|
||||||
|
"",
|
||||||
|
"generator client {",
|
||||||
|
' provider = "prisma-client-py"',
|
||||||
|
"}",
|
||||||
|
"",
|
||||||
|
"datasource db {",
|
||||||
|
' provider = "postgresql"',
|
||||||
|
' url = env("DATABASE_URL")',
|
||||||
|
"}",
|
||||||
|
"",
|
||||||
|
]
|
||||||
|
|
||||||
|
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
|
||||||
|
"""Generate Prisma enum from EnumDefinition."""
|
||||||
|
lines = [f"enum {enum_def.name} {{"]
|
||||||
|
for name, _ in enum_def.values:
|
||||||
|
lines.append(f" {name}")
|
||||||
|
lines.append("}")
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_enum_from_python(self, enum_cls: type) -> List[str]:
|
||||||
|
"""Generate Prisma enum from Python Enum."""
|
||||||
|
lines = [f"enum {enum_cls.__name__} {{"]
|
||||||
|
for member in enum_cls:
|
||||||
|
lines.append(f" {member.name}")
|
||||||
|
lines.append("}")
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_model_from_definition(self, model_def: ModelDefinition) -> List[str]:
|
||||||
|
"""Generate Prisma model from ModelDefinition."""
|
||||||
|
lines = [f"model {model_def.name} {{"]
|
||||||
|
|
||||||
|
for field in model_def.fields:
|
||||||
|
prisma_type = self._resolve_type(
|
||||||
|
field.name, field.type_hint, field.optional
|
||||||
|
)
|
||||||
|
lines.append(f" {field.name} {prisma_type}")
|
||||||
|
|
||||||
|
lines.append("}")
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_model_from_dataclass(self, cls: type) -> List[str]:
|
||||||
|
"""Generate Prisma model from a dataclass."""
|
||||||
|
lines = [f"model {cls.__name__} {{"]
|
||||||
|
|
||||||
|
for name, type_hint in get_type_hints(cls).items():
|
||||||
|
if name.startswith("_"):
|
||||||
|
continue
|
||||||
|
prisma_type = self._resolve_type(name, type_hint, False)
|
||||||
|
lines.append(f" {name} {prisma_type}")
|
||||||
|
|
||||||
|
lines.append("}")
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _resolve_type(self, name: str, type_hint: Any, optional: bool) -> str:
|
||||||
|
"""Resolve Python type to Prisma type string."""
|
||||||
|
# Special fields
|
||||||
|
if name in PRISMA_SPECIAL:
|
||||||
|
return PRISMA_SPECIAL[name]
|
||||||
|
|
||||||
|
base, is_optional = unwrap_optional(type_hint)
|
||||||
|
optional = optional or is_optional
|
||||||
|
origin = get_origin_name(base)
|
||||||
|
type_name = get_type_name(base)
|
||||||
|
|
||||||
|
# Container types
|
||||||
|
if origin == "dict" or origin == "list":
|
||||||
|
result = PRISMA_TYPES.get(origin, "Json")
|
||||||
|
return f"{result}?" if optional else result
|
||||||
|
|
||||||
|
# UUID / datetime
|
||||||
|
if type_name in ("UUID", "datetime"):
|
||||||
|
result = PRISMA_TYPES.get(type_name, "String")
|
||||||
|
return f"{result}?" if optional else result
|
||||||
|
|
||||||
|
# Enum
|
||||||
|
if isinstance(base, type) and issubclass(base, Enum):
|
||||||
|
result = base.__name__
|
||||||
|
return f"{result}?" if optional else result
|
||||||
|
|
||||||
|
# Basic types
|
||||||
|
result = PRISMA_TYPES.get(base, "String")
|
||||||
|
return f"{result}?" if optional else result
|
||||||
168
tools/modelgen/generator/protobuf.py
Normal file
168
tools/modelgen/generator/protobuf.py
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
"""
|
||||||
|
Protobuf Generator
|
||||||
|
|
||||||
|
Generates Protocol Buffer definitions from model definitions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, List, get_type_hints
|
||||||
|
|
||||||
|
from ..helpers import get_origin_name, unwrap_optional
|
||||||
|
from ..loader.schema import GrpcServiceDefinition, ModelDefinition
|
||||||
|
from ..types import PROTO_RESOLVERS
|
||||||
|
from .base import BaseGenerator
|
||||||
|
|
||||||
|
|
||||||
|
class ProtobufGenerator(BaseGenerator):
|
||||||
|
"""Generates Protocol Buffer definition files."""
|
||||||
|
|
||||||
|
def file_extension(self) -> str:
|
||||||
|
return ".proto"
|
||||||
|
|
||||||
|
def generate(self, models, output_path: Path) -> None:
|
||||||
|
"""Generate protobuf definitions to output_path."""
|
||||||
|
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Handle different input types
|
||||||
|
if hasattr(models, "grpc_messages"):
|
||||||
|
# SchemaLoader with gRPC definitions
|
||||||
|
content = self._generate_from_loader(models)
|
||||||
|
elif isinstance(models, tuple) and len(models) >= 3:
|
||||||
|
# (messages, service_def) tuple
|
||||||
|
content = self._generate_from_definitions(models[0], models[1])
|
||||||
|
elif isinstance(models, list):
|
||||||
|
# List of dataclasses (MPR style)
|
||||||
|
content = self._generate_from_dataclasses(models)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||||
|
|
||||||
|
output_path.write_text(content)
|
||||||
|
|
||||||
|
def _generate_from_loader(self, loader) -> str:
|
||||||
|
"""Generate from SchemaLoader."""
|
||||||
|
messages = loader.grpc_messages
|
||||||
|
service = loader.grpc_service
|
||||||
|
|
||||||
|
lines = self._generate_header(
|
||||||
|
service.package if service else "service",
|
||||||
|
service.name if service else "Service",
|
||||||
|
service.methods if service else [],
|
||||||
|
)
|
||||||
|
|
||||||
|
for model_def in messages:
|
||||||
|
lines.extend(self._generate_message_from_definition(model_def))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_from_definitions(
|
||||||
|
self, messages: List[ModelDefinition], service: GrpcServiceDefinition
|
||||||
|
) -> str:
|
||||||
|
"""Generate from ModelDefinition objects."""
|
||||||
|
lines = self._generate_header(service.package, service.name, service.methods)
|
||||||
|
|
||||||
|
for model_def in messages:
|
||||||
|
lines.extend(self._generate_message_from_definition(model_def))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||||
|
"""Generate from Python dataclasses (MPR style)."""
|
||||||
|
lines = self._generate_header("service", "Service", [])
|
||||||
|
|
||||||
|
for cls in dataclasses:
|
||||||
|
lines.extend(self._generate_message_from_dataclass(cls))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_header(
|
||||||
|
self, package: str, service_name: str, methods: List[dict]
|
||||||
|
) -> List[str]:
|
||||||
|
"""Generate file header with service definition."""
|
||||||
|
lines = [
|
||||||
|
"// Protocol Buffer Definitions - GENERATED FILE",
|
||||||
|
"//",
|
||||||
|
"// Do not edit directly. Regenerate using modelgen.",
|
||||||
|
"",
|
||||||
|
'syntax = "proto3";',
|
||||||
|
"",
|
||||||
|
f"package {package};",
|
||||||
|
"",
|
||||||
|
]
|
||||||
|
|
||||||
|
if methods:
|
||||||
|
lines.append(f"service {service_name} {{")
|
||||||
|
for m in methods:
|
||||||
|
req = (
|
||||||
|
m["request"].__name__
|
||||||
|
if hasattr(m["request"], "__name__")
|
||||||
|
else str(m["request"])
|
||||||
|
)
|
||||||
|
resp = (
|
||||||
|
m["response"].__name__
|
||||||
|
if hasattr(m["response"], "__name__")
|
||||||
|
else str(m["response"])
|
||||||
|
)
|
||||||
|
returns = f"stream {resp}" if m.get("stream_response") else resp
|
||||||
|
lines.append(f" rpc {m['name']}({req}) returns ({returns});")
|
||||||
|
lines.extend(["}", ""])
|
||||||
|
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_message_from_definition(
|
||||||
|
self, model_def: ModelDefinition
|
||||||
|
) -> List[str]:
|
||||||
|
"""Generate proto message from ModelDefinition."""
|
||||||
|
lines = [f"message {model_def.name} {{"]
|
||||||
|
|
||||||
|
if not model_def.fields:
|
||||||
|
lines.append(" // Empty")
|
||||||
|
else:
|
||||||
|
for i, field in enumerate(model_def.fields, 1):
|
||||||
|
proto_type, optional = self._resolve_type(field.type_hint)
|
||||||
|
prefix = (
|
||||||
|
"optional "
|
||||||
|
if optional and not proto_type.startswith("repeated")
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
lines.append(f" {prefix}{proto_type} {field.name} = {i};")
|
||||||
|
|
||||||
|
lines.append("}")
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_message_from_dataclass(self, cls: type) -> List[str]:
|
||||||
|
"""Generate proto message from a dataclass."""
|
||||||
|
lines = [f"message {cls.__name__} {{"]
|
||||||
|
|
||||||
|
hints = get_type_hints(cls)
|
||||||
|
if not hints:
|
||||||
|
lines.append(" // Empty")
|
||||||
|
else:
|
||||||
|
for i, (name, type_hint) in enumerate(hints.items(), 1):
|
||||||
|
proto_type, optional = self._resolve_type(type_hint)
|
||||||
|
prefix = (
|
||||||
|
"optional "
|
||||||
|
if optional and not proto_type.startswith("repeated")
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
lines.append(f" {prefix}{proto_type} {name} = {i};")
|
||||||
|
|
||||||
|
lines.append("}")
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _resolve_type(self, type_hint: Any) -> tuple[str, bool]:
|
||||||
|
"""Resolve Python type to proto type. Returns (type, is_optional)."""
|
||||||
|
base, optional = unwrap_optional(type_hint)
|
||||||
|
origin = get_origin_name(base)
|
||||||
|
|
||||||
|
# Look up resolver
|
||||||
|
resolver = PROTO_RESOLVERS.get(origin) or PROTO_RESOLVERS.get(base)
|
||||||
|
|
||||||
|
if resolver:
|
||||||
|
result = resolver(base)
|
||||||
|
is_repeated = result.startswith("repeated")
|
||||||
|
return result, optional and not is_repeated
|
||||||
|
|
||||||
|
return "string", optional
|
||||||
427
tools/modelgen/generator/pydantic.py
Normal file
427
tools/modelgen/generator/pydantic.py
Normal file
@@ -0,0 +1,427 @@
|
|||||||
|
"""
|
||||||
|
Pydantic Generator
|
||||||
|
|
||||||
|
Generates Pydantic BaseModel classes from model definitions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from enum import Enum
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, List, get_type_hints
|
||||||
|
|
||||||
|
from ..helpers import get_origin_name, get_type_name, unwrap_optional
|
||||||
|
from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
|
||||||
|
from ..types import PYDANTIC_RESOLVERS
|
||||||
|
from .base import BaseGenerator
|
||||||
|
|
||||||
|
|
||||||
|
class PydanticGenerator(BaseGenerator):
|
||||||
|
"""Generates Pydantic model files."""
|
||||||
|
|
||||||
|
def file_extension(self) -> str:
|
||||||
|
return ".py"
|
||||||
|
|
||||||
|
def generate(self, models, output_path: Path) -> None:
|
||||||
|
"""Generate Pydantic models to output_path."""
|
||||||
|
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Detect input type and generate accordingly
|
||||||
|
if hasattr(models, "get_shared_component"):
|
||||||
|
# ConfigLoader (soleprint config)
|
||||||
|
content = self._generate_from_config(models)
|
||||||
|
elif hasattr(models, "models"):
|
||||||
|
# SchemaLoader
|
||||||
|
content = self._generate_from_definitions(
|
||||||
|
models.models, getattr(models, "enums", [])
|
||||||
|
)
|
||||||
|
elif isinstance(models, tuple):
|
||||||
|
# (models, enums) tuple from extractor
|
||||||
|
content = self._generate_from_definitions(models[0], models[1])
|
||||||
|
elif isinstance(models, list):
|
||||||
|
# List of dataclasses (MPR style)
|
||||||
|
content = self._generate_from_dataclasses(models)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||||
|
|
||||||
|
output_path.write_text(content)
|
||||||
|
|
||||||
|
def _generate_from_definitions(
|
||||||
|
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||||
|
) -> str:
|
||||||
|
"""Generate from ModelDefinition objects (schema/extract mode)."""
|
||||||
|
lines = self._generate_header()
|
||||||
|
|
||||||
|
# Generate enums
|
||||||
|
for enum_def in enums:
|
||||||
|
lines.extend(self._generate_enum(enum_def))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
# Generate models
|
||||||
|
for model_def in models:
|
||||||
|
lines.extend(self._generate_model_from_definition(model_def))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||||
|
"""Generate from Python dataclasses (MPR style)."""
|
||||||
|
lines = self._generate_header()
|
||||||
|
|
||||||
|
# Collect and generate enums first
|
||||||
|
enums_generated = set()
|
||||||
|
for cls in dataclasses:
|
||||||
|
hints = get_type_hints(cls)
|
||||||
|
for type_hint in hints.values():
|
||||||
|
base, _ = unwrap_optional(type_hint)
|
||||||
|
if isinstance(base, type) and issubclass(base, Enum):
|
||||||
|
if base.__name__ not in enums_generated:
|
||||||
|
lines.extend(self._generate_enum_from_python(base))
|
||||||
|
lines.append("")
|
||||||
|
enums_generated.add(base.__name__)
|
||||||
|
|
||||||
|
# Generate models
|
||||||
|
for cls in dataclasses:
|
||||||
|
lines.extend(self._generate_model_from_dataclass(cls))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_header(self) -> List[str]:
|
||||||
|
"""Generate file header."""
|
||||||
|
return [
|
||||||
|
'"""',
|
||||||
|
"Pydantic Models - GENERATED FILE",
|
||||||
|
"",
|
||||||
|
"Do not edit directly. Regenerate using modelgen.",
|
||||||
|
'"""',
|
||||||
|
"",
|
||||||
|
"from datetime import datetime",
|
||||||
|
"from enum import Enum",
|
||||||
|
"from typing import Any, Dict, List, Optional",
|
||||||
|
"from uuid import UUID",
|
||||||
|
"",
|
||||||
|
"from pydantic import BaseModel, Field",
|
||||||
|
"",
|
||||||
|
]
|
||||||
|
|
||||||
|
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
|
||||||
|
"""Generate Pydantic enum from EnumDefinition."""
|
||||||
|
lines = [f"class {enum_def.name}(str, Enum):"]
|
||||||
|
for name, value in enum_def.values:
|
||||||
|
lines.append(f' {name} = "{value}"')
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_enum_from_python(self, enum_cls: type) -> List[str]:
|
||||||
|
"""Generate Pydantic enum from Python Enum."""
|
||||||
|
lines = [f"class {enum_cls.__name__}(str, Enum):"]
|
||||||
|
for member in enum_cls:
|
||||||
|
lines.append(f' {member.name} = "{member.value}"')
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_model_from_definition(self, model_def: ModelDefinition) -> List[str]:
|
||||||
|
"""Generate Pydantic model from ModelDefinition."""
|
||||||
|
docstring = model_def.docstring or model_def.name
|
||||||
|
lines = [
|
||||||
|
f"class {model_def.name}(BaseModel):",
|
||||||
|
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||||
|
]
|
||||||
|
|
||||||
|
if not model_def.fields:
|
||||||
|
lines.append(" pass")
|
||||||
|
else:
|
||||||
|
for field in model_def.fields:
|
||||||
|
py_type = self._resolve_type(field.type_hint, field.optional)
|
||||||
|
default = self._format_default(field.default, field.optional)
|
||||||
|
lines.append(f" {field.name}: {py_type}{default}")
|
||||||
|
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_model_from_dataclass(self, cls: type) -> List[str]:
|
||||||
|
"""Generate Pydantic model from a dataclass."""
|
||||||
|
import dataclasses as dc
|
||||||
|
|
||||||
|
docstring = cls.__doc__ or cls.__name__
|
||||||
|
lines = [
|
||||||
|
f"class {cls.__name__}(BaseModel):",
|
||||||
|
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||||
|
]
|
||||||
|
|
||||||
|
hints = get_type_hints(cls)
|
||||||
|
fields = {f.name: f for f in dc.fields(cls)}
|
||||||
|
|
||||||
|
for name, type_hint in hints.items():
|
||||||
|
if name.startswith("_"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
field = fields.get(name)
|
||||||
|
default_val = dc.MISSING
|
||||||
|
if field:
|
||||||
|
if field.default is not dc.MISSING:
|
||||||
|
default_val = field.default
|
||||||
|
|
||||||
|
py_type = self._resolve_type(type_hint, False)
|
||||||
|
default = self._format_default(default_val, "Optional" in py_type)
|
||||||
|
lines.append(f" {name}: {py_type}{default}")
|
||||||
|
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _resolve_type(self, type_hint: Any, optional: bool) -> str:
|
||||||
|
"""Resolve Python type to Pydantic type string."""
|
||||||
|
base, is_optional = unwrap_optional(type_hint)
|
||||||
|
optional = optional or is_optional
|
||||||
|
origin = get_origin_name(base)
|
||||||
|
type_name = get_type_name(base)
|
||||||
|
|
||||||
|
# Look up resolver
|
||||||
|
resolver = (
|
||||||
|
PYDANTIC_RESOLVERS.get(origin)
|
||||||
|
or PYDANTIC_RESOLVERS.get(type_name)
|
||||||
|
or PYDANTIC_RESOLVERS.get(base)
|
||||||
|
or (
|
||||||
|
PYDANTIC_RESOLVERS["enum"]
|
||||||
|
if isinstance(base, type) and issubclass(base, Enum)
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
result = resolver(base) if resolver else "str"
|
||||||
|
return f"Optional[{result}]" if optional else result
|
||||||
|
|
||||||
|
def _format_default(self, default: Any, optional: bool) -> str:
|
||||||
|
"""Format default value for field."""
|
||||||
|
import dataclasses as dc
|
||||||
|
|
||||||
|
if optional:
|
||||||
|
return " = None"
|
||||||
|
if default is dc.MISSING or default is None:
|
||||||
|
return ""
|
||||||
|
if isinstance(default, str):
|
||||||
|
return f' = "{default}"'
|
||||||
|
if isinstance(default, Enum):
|
||||||
|
return f" = {default.__class__.__name__}.{default.name}"
|
||||||
|
if callable(default):
|
||||||
|
return " = Field(default_factory=list)" if "list" in str(default) else ""
|
||||||
|
return f" = {default!r}"
|
||||||
|
|
||||||
|
def _generate_from_config(self, config) -> str:
|
||||||
|
"""Generate from ConfigLoader (soleprint config.json mode)."""
|
||||||
|
# Get component names from config
|
||||||
|
config_comp = config.get_shared_component("config")
|
||||||
|
data_comp = config.get_shared_component("data")
|
||||||
|
|
||||||
|
data_flow_sys = config.get_system("data_flow")
|
||||||
|
doc_sys = config.get_system("documentation")
|
||||||
|
exec_sys = config.get_system("execution")
|
||||||
|
|
||||||
|
connector_comp = config.get_component("data_flow", "connector")
|
||||||
|
pulse_comp = config.get_component("data_flow", "composed")
|
||||||
|
|
||||||
|
pattern_comp = config.get_component("documentation", "pattern")
|
||||||
|
doc_composed = config.get_component("documentation", "composed")
|
||||||
|
|
||||||
|
tool_comp = config.get_component("execution", "utility")
|
||||||
|
monitor_comp = config.get_component("execution", "watcher")
|
||||||
|
cabinet_comp = config.get_component("execution", "container")
|
||||||
|
exec_composed = config.get_component("execution", "composed")
|
||||||
|
|
||||||
|
return f'''"""
|
||||||
|
Pydantic models - Generated from {config.framework.name}.config.json
|
||||||
|
|
||||||
|
DO NOT EDIT MANUALLY - Regenerate from config
|
||||||
|
"""
|
||||||
|
|
||||||
|
from enum import Enum
|
||||||
|
from typing import List, Literal, Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
class Status(str, Enum):
|
||||||
|
PENDING = "pending"
|
||||||
|
PLANNED = "planned"
|
||||||
|
BUILDING = "building"
|
||||||
|
DEV = "dev"
|
||||||
|
LIVE = "live"
|
||||||
|
READY = "ready"
|
||||||
|
|
||||||
|
|
||||||
|
class System(str, Enum):
|
||||||
|
{data_flow_sys.name.upper()} = "{data_flow_sys.name}"
|
||||||
|
{doc_sys.name.upper()} = "{doc_sys.name}"
|
||||||
|
{exec_sys.name.upper()} = "{exec_sys.name}"
|
||||||
|
|
||||||
|
|
||||||
|
class ToolType(str, Enum):
|
||||||
|
APP = "app"
|
||||||
|
CLI = "cli"
|
||||||
|
|
||||||
|
|
||||||
|
# === Shared Components ===
|
||||||
|
|
||||||
|
|
||||||
|
class {config_comp.title}(BaseModel):
|
||||||
|
"""{config_comp.description}. Shared across {data_flow_sys.name}, {exec_sys.name}."""
|
||||||
|
|
||||||
|
name: str # Unique identifier
|
||||||
|
slug: str # URL-friendly identifier
|
||||||
|
title: str # Display title for UI
|
||||||
|
status: Optional[Status] = None
|
||||||
|
config_path: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class {data_comp.title}(BaseModel):
|
||||||
|
"""{data_comp.description}. Shared across all systems."""
|
||||||
|
|
||||||
|
name: str # Unique identifier
|
||||||
|
slug: str # URL-friendly identifier
|
||||||
|
title: str # Display title for UI
|
||||||
|
status: Optional[Status] = None
|
||||||
|
source_template: Optional[str] = None
|
||||||
|
data_path: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
# === System-Specific Components ===
|
||||||
|
|
||||||
|
|
||||||
|
class {connector_comp.title}(BaseModel):
|
||||||
|
"""{connector_comp.description} ({data_flow_sys.name})."""
|
||||||
|
|
||||||
|
name: str # Unique identifier
|
||||||
|
slug: str # URL-friendly identifier
|
||||||
|
title: str # Display title for UI
|
||||||
|
status: Optional[Status] = None
|
||||||
|
system: Literal["{data_flow_sys.name}"] = "{data_flow_sys.name}"
|
||||||
|
mock: Optional[bool] = None
|
||||||
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class {pattern_comp.title}(BaseModel):
|
||||||
|
"""{pattern_comp.description} ({doc_sys.name})."""
|
||||||
|
|
||||||
|
name: str # Unique identifier
|
||||||
|
slug: str # URL-friendly identifier
|
||||||
|
title: str # Display title for UI
|
||||||
|
status: Optional[Status] = None
|
||||||
|
template_path: Optional[str] = None
|
||||||
|
system: Literal["{doc_sys.name}"] = "{doc_sys.name}"
|
||||||
|
|
||||||
|
|
||||||
|
class {tool_comp.title}(BaseModel):
|
||||||
|
"""{tool_comp.description} ({exec_sys.name})."""
|
||||||
|
|
||||||
|
name: str # Unique identifier
|
||||||
|
slug: str # URL-friendly identifier
|
||||||
|
title: str # Display title for UI
|
||||||
|
status: Optional[Status] = None
|
||||||
|
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||||
|
type: Optional[ToolType] = None
|
||||||
|
description: Optional[str] = None
|
||||||
|
path: Optional[str] = None
|
||||||
|
url: Optional[str] = None
|
||||||
|
cli: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class {monitor_comp.title}(BaseModel):
|
||||||
|
"""{monitor_comp.description} ({exec_sys.name})."""
|
||||||
|
|
||||||
|
name: str # Unique identifier
|
||||||
|
slug: str # URL-friendly identifier
|
||||||
|
title: str # Display title for UI
|
||||||
|
status: Optional[Status] = None
|
||||||
|
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||||
|
|
||||||
|
|
||||||
|
class {cabinet_comp.title}(BaseModel):
|
||||||
|
"""{cabinet_comp.description} ({exec_sys.name})."""
|
||||||
|
|
||||||
|
name: str # Unique identifier
|
||||||
|
slug: str # URL-friendly identifier
|
||||||
|
title: str # Display title for UI
|
||||||
|
status: Optional[Status] = None
|
||||||
|
tools: List[{tool_comp.title}] = Field(default_factory=list)
|
||||||
|
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||||
|
|
||||||
|
|
||||||
|
# === Composed Types ===
|
||||||
|
|
||||||
|
|
||||||
|
class {pulse_comp.title}(BaseModel):
|
||||||
|
"""{pulse_comp.description} ({data_flow_sys.name}). Formula: {pulse_comp.formula}."""
|
||||||
|
|
||||||
|
name: str # Unique identifier
|
||||||
|
slug: str # URL-friendly identifier
|
||||||
|
title: str # Display title for UI
|
||||||
|
status: Optional[Status] = None
|
||||||
|
{connector_comp.name}: Optional[{connector_comp.title}] = None
|
||||||
|
{config_comp.name}: Optional[{config_comp.title}] = None
|
||||||
|
{data_comp.name}: Optional[{data_comp.title}] = None
|
||||||
|
system: Literal["{data_flow_sys.name}"] = "{data_flow_sys.name}"
|
||||||
|
|
||||||
|
|
||||||
|
class {doc_composed.title}(BaseModel):
|
||||||
|
"""{doc_composed.description} ({doc_sys.name}). Formula: {doc_composed.formula}."""
|
||||||
|
|
||||||
|
name: str # Unique identifier
|
||||||
|
slug: str # URL-friendly identifier
|
||||||
|
title: str # Display title for UI
|
||||||
|
status: Optional[Status] = None
|
||||||
|
template: Optional[{pattern_comp.title}] = None
|
||||||
|
{data_comp.name}: Optional[{data_comp.title}] = None
|
||||||
|
output_{data_comp.name}: Optional[{data_comp.title}] = None
|
||||||
|
system: Literal["{doc_sys.name}"] = "{doc_sys.name}"
|
||||||
|
|
||||||
|
|
||||||
|
class {exec_composed.title}(BaseModel):
|
||||||
|
"""{exec_composed.description} ({exec_sys.name}). Formula: {exec_composed.formula}."""
|
||||||
|
|
||||||
|
name: str # Unique identifier
|
||||||
|
slug: str # URL-friendly identifier
|
||||||
|
title: str # Display title for UI
|
||||||
|
status: Optional[Status] = None
|
||||||
|
cabinet: Optional[{cabinet_comp.title}] = None
|
||||||
|
{config_comp.name}: Optional[{config_comp.title}] = None
|
||||||
|
{data_comp.plural}: List[{data_comp.title}] = Field(default_factory=list)
|
||||||
|
system: Literal["{exec_sys.name}"] = "{exec_sys.name}"
|
||||||
|
|
||||||
|
|
||||||
|
# === Collection wrappers for JSON files ===
|
||||||
|
|
||||||
|
|
||||||
|
class {config_comp.title}Collection(BaseModel):
|
||||||
|
items: List[{config_comp.title}] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class {data_comp.title}Collection(BaseModel):
|
||||||
|
items: List[{data_comp.title}] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class {connector_comp.title}Collection(BaseModel):
|
||||||
|
items: List[{connector_comp.title}] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class {pattern_comp.title}Collection(BaseModel):
|
||||||
|
items: List[{pattern_comp.title}] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class {tool_comp.title}Collection(BaseModel):
|
||||||
|
items: List[{tool_comp.title}] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class {monitor_comp.title}Collection(BaseModel):
|
||||||
|
items: List[{monitor_comp.title}] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class {cabinet_comp.title}Collection(BaseModel):
|
||||||
|
items: List[{cabinet_comp.title}] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class {pulse_comp.title}Collection(BaseModel):
|
||||||
|
items: List[{pulse_comp.title}] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class {doc_composed.title}Collection(BaseModel):
|
||||||
|
items: List[{doc_composed.title}] = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class {exec_composed.title}Collection(BaseModel):
|
||||||
|
items: List[{exec_composed.title}] = Field(default_factory=list)
|
||||||
|
'''
|
||||||
144
tools/modelgen/generator/typescript.py
Normal file
144
tools/modelgen/generator/typescript.py
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
"""
|
||||||
|
TypeScript Generator
|
||||||
|
|
||||||
|
Generates TypeScript interfaces from model definitions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from enum import Enum
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, List, get_type_hints
|
||||||
|
|
||||||
|
from ..helpers import get_origin_name, get_type_name, unwrap_optional
|
||||||
|
from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
|
||||||
|
from ..types import TS_RESOLVERS
|
||||||
|
from .base import BaseGenerator
|
||||||
|
|
||||||
|
|
||||||
|
class TypeScriptGenerator(BaseGenerator):
|
||||||
|
"""Generates TypeScript interface files."""
|
||||||
|
|
||||||
|
def file_extension(self) -> str:
|
||||||
|
return ".ts"
|
||||||
|
|
||||||
|
def generate(self, models, output_path: Path) -> None:
|
||||||
|
"""Generate TypeScript types to output_path."""
|
||||||
|
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Handle different input types
|
||||||
|
if hasattr(models, "models"):
|
||||||
|
# SchemaLoader
|
||||||
|
content = self._generate_from_definitions(
|
||||||
|
models.models, getattr(models, "enums", [])
|
||||||
|
)
|
||||||
|
elif isinstance(models, tuple):
|
||||||
|
# (models, enums) tuple
|
||||||
|
content = self._generate_from_definitions(models[0], models[1])
|
||||||
|
elif isinstance(models, list):
|
||||||
|
# List of dataclasses (MPR style)
|
||||||
|
content = self._generate_from_dataclasses(models)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||||
|
|
||||||
|
output_path.write_text(content)
|
||||||
|
|
||||||
|
def _generate_from_definitions(
|
||||||
|
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||||
|
) -> str:
|
||||||
|
"""Generate from ModelDefinition objects."""
|
||||||
|
lines = self._generate_header()
|
||||||
|
|
||||||
|
# Generate enums as union types
|
||||||
|
for enum_def in enums:
|
||||||
|
values = " | ".join(f'"{v}"' for _, v in enum_def.values)
|
||||||
|
lines.append(f"export type {enum_def.name} = {values};")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
# Generate interfaces
|
||||||
|
for model_def in models:
|
||||||
|
lines.extend(self._generate_interface_from_definition(model_def))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||||
|
"""Generate from Python dataclasses (MPR style)."""
|
||||||
|
lines = self._generate_header()
|
||||||
|
|
||||||
|
# Collect and generate enums first
|
||||||
|
enums_generated = set()
|
||||||
|
for cls in dataclasses:
|
||||||
|
hints = get_type_hints(cls)
|
||||||
|
for type_hint in hints.values():
|
||||||
|
base, _ = unwrap_optional(type_hint)
|
||||||
|
if isinstance(base, type) and issubclass(base, Enum):
|
||||||
|
if base.__name__ not in enums_generated:
|
||||||
|
values = " | ".join(f'"{m.value}"' for m in base)
|
||||||
|
lines.append(f"export type {base.__name__} = {values};")
|
||||||
|
enums_generated.add(base.__name__)
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
# Generate interfaces
|
||||||
|
for cls in dataclasses:
|
||||||
|
lines.extend(self._generate_interface_from_dataclass(cls))
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _generate_header(self) -> List[str]:
|
||||||
|
"""Generate file header."""
|
||||||
|
return [
|
||||||
|
"/**",
|
||||||
|
" * TypeScript Types - GENERATED FILE",
|
||||||
|
" *",
|
||||||
|
" * Do not edit directly. Regenerate using modelgen.",
|
||||||
|
" */",
|
||||||
|
"",
|
||||||
|
]
|
||||||
|
|
||||||
|
def _generate_interface_from_definition(
|
||||||
|
self, model_def: ModelDefinition
|
||||||
|
) -> List[str]:
|
||||||
|
"""Generate TypeScript interface from ModelDefinition."""
|
||||||
|
lines = [f"export interface {model_def.name} {{"]
|
||||||
|
|
||||||
|
for field in model_def.fields:
|
||||||
|
ts_type = self._resolve_type(field.type_hint, field.optional)
|
||||||
|
lines.append(f" {field.name}: {ts_type};")
|
||||||
|
|
||||||
|
lines.append("}")
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _generate_interface_from_dataclass(self, cls: type) -> List[str]:
|
||||||
|
"""Generate TypeScript interface from a dataclass."""
|
||||||
|
lines = [f"export interface {cls.__name__} {{"]
|
||||||
|
|
||||||
|
for name, type_hint in get_type_hints(cls).items():
|
||||||
|
if name.startswith("_"):
|
||||||
|
continue
|
||||||
|
ts_type = self._resolve_type(type_hint, False)
|
||||||
|
lines.append(f" {name}: {ts_type};")
|
||||||
|
|
||||||
|
lines.append("}")
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def _resolve_type(self, type_hint: Any, optional: bool) -> str:
|
||||||
|
"""Resolve Python type to TypeScript type string."""
|
||||||
|
base, is_optional = unwrap_optional(type_hint)
|
||||||
|
optional = optional or is_optional
|
||||||
|
origin = get_origin_name(base)
|
||||||
|
type_name = get_type_name(base)
|
||||||
|
|
||||||
|
# Look up resolver
|
||||||
|
resolver = (
|
||||||
|
TS_RESOLVERS.get(origin)
|
||||||
|
or TS_RESOLVERS.get(type_name)
|
||||||
|
or TS_RESOLVERS.get(base)
|
||||||
|
or (
|
||||||
|
TS_RESOLVERS["enum"]
|
||||||
|
if isinstance(base, type) and issubclass(base, Enum)
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
result = resolver(base) if resolver else "string"
|
||||||
|
return f"{result} | null" if optional else result
|
||||||
72
tools/modelgen/helpers.py
Normal file
72
tools/modelgen/helpers.py
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
"""
|
||||||
|
Type Helpers
|
||||||
|
|
||||||
|
Utilities for type introspection and resolution.
|
||||||
|
Used by generators and loaders.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import dataclasses as dc
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Any, Union, get_args, get_origin
|
||||||
|
|
||||||
|
|
||||||
|
def unwrap_optional(type_hint: Any) -> tuple[Any, bool]:
|
||||||
|
"""Unwrap Optional[T] -> (T, True) or (T, False) if not optional."""
|
||||||
|
origin = get_origin(type_hint)
|
||||||
|
if origin is Union:
|
||||||
|
args = [a for a in get_args(type_hint) if a is not type(None)]
|
||||||
|
return (args[0] if args else str, True)
|
||||||
|
return (type_hint, False)
|
||||||
|
|
||||||
|
|
||||||
|
def get_origin_name(type_hint: Any) -> str | None:
|
||||||
|
"""Get origin type name: 'dict', 'list', or None."""
|
||||||
|
origin = get_origin(type_hint)
|
||||||
|
if origin is dict:
|
||||||
|
return "dict"
|
||||||
|
if origin is list:
|
||||||
|
return "list"
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_type_name(type_hint: Any) -> str | None:
|
||||||
|
"""Get type name for special types like UUID, datetime."""
|
||||||
|
if hasattr(type_hint, "__name__"):
|
||||||
|
return type_hint.__name__
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_list_inner(type_hint: Any) -> str:
|
||||||
|
"""Get inner type of List[T]."""
|
||||||
|
args = get_args(type_hint)
|
||||||
|
if args and args[0] in (str, int, float, bool):
|
||||||
|
return {str: "str", int: "int", float: "float", bool: "bool"}[args[0]]
|
||||||
|
return "str"
|
||||||
|
|
||||||
|
|
||||||
|
def get_field_default(field: dc.Field) -> Any:
|
||||||
|
"""Get default value from dataclass field."""
|
||||||
|
if field.default is not dc.MISSING:
|
||||||
|
return field.default
|
||||||
|
return dc.MISSING
|
||||||
|
|
||||||
|
|
||||||
|
def format_opts(optional: bool, extra: list[str] | None = None) -> str:
|
||||||
|
"""Format field options string for Django."""
|
||||||
|
parts = []
|
||||||
|
if optional:
|
||||||
|
parts.append("null=True, blank=True")
|
||||||
|
if extra:
|
||||||
|
parts.extend(extra)
|
||||||
|
return ", ".join(parts)
|
||||||
|
|
||||||
|
|
||||||
|
def is_enum(type_hint: Any) -> bool:
|
||||||
|
"""Check if type is an Enum."""
|
||||||
|
base, _ = unwrap_optional(type_hint)
|
||||||
|
return isinstance(base, type) and issubclass(base, Enum)
|
||||||
|
|
||||||
|
|
||||||
|
def get_enum_values(enum_class: type) -> list[tuple[str, str]]:
|
||||||
|
"""Get list of (name, value) pairs from an Enum."""
|
||||||
|
return [(m.name, m.value) for m in enum_class]
|
||||||
37
tools/modelgen/loader/__init__.py
Normal file
37
tools/modelgen/loader/__init__.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
"""
|
||||||
|
Loader - Input source handlers for modelgen.
|
||||||
|
|
||||||
|
Supported loaders:
|
||||||
|
- ConfigLoader: Load from soleprint config.json
|
||||||
|
- SchemaLoader: Load from Python dataclasses in schema/ folder
|
||||||
|
- Extractors: Extract from existing codebases (Django, SQLAlchemy, Prisma)
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .config import ConfigLoader, load_config
|
||||||
|
from .extract import EXTRACTORS, BaseExtractor, DjangoExtractor
|
||||||
|
from .schema import (
|
||||||
|
EnumDefinition,
|
||||||
|
FieldDefinition,
|
||||||
|
GrpcServiceDefinition,
|
||||||
|
ModelDefinition,
|
||||||
|
SchemaLoader,
|
||||||
|
load_schema,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
# Config loader
|
||||||
|
"ConfigLoader",
|
||||||
|
"load_config",
|
||||||
|
# Schema loader
|
||||||
|
"SchemaLoader",
|
||||||
|
"load_schema",
|
||||||
|
# Model definitions
|
||||||
|
"ModelDefinition",
|
||||||
|
"FieldDefinition",
|
||||||
|
"EnumDefinition",
|
||||||
|
"GrpcServiceDefinition",
|
||||||
|
# Extractors
|
||||||
|
"BaseExtractor",
|
||||||
|
"DjangoExtractor",
|
||||||
|
"EXTRACTORS",
|
||||||
|
]
|
||||||
116
tools/modelgen/loader/config.py
Normal file
116
tools/modelgen/loader/config.py
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
"""
|
||||||
|
Configuration Loader
|
||||||
|
|
||||||
|
Loads and validates framework configuration files (soleprint config.json style).
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class FrameworkConfig:
|
||||||
|
"""Framework metadata"""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
slug: str
|
||||||
|
version: str
|
||||||
|
description: str
|
||||||
|
tagline: str
|
||||||
|
icon: str
|
||||||
|
hub_port: int
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class SystemConfig:
|
||||||
|
"""System configuration"""
|
||||||
|
|
||||||
|
key: str
|
||||||
|
name: str
|
||||||
|
slug: str = ""
|
||||||
|
title: str = ""
|
||||||
|
tagline: str = ""
|
||||||
|
icon: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ComponentConfig:
|
||||||
|
"""Component configuration"""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
title: str
|
||||||
|
description: str
|
||||||
|
plural: Optional[str] = None
|
||||||
|
formula: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigLoader:
|
||||||
|
"""Loads and parses framework configuration"""
|
||||||
|
|
||||||
|
def __init__(self, config_path: Path):
|
||||||
|
self.config_path = Path(config_path)
|
||||||
|
self.raw_config: Dict[str, Any] = {}
|
||||||
|
self.framework: Optional[FrameworkConfig] = None
|
||||||
|
self.systems: List[SystemConfig] = []
|
||||||
|
self.components: Dict[str, Dict[str, ComponentConfig]] = {}
|
||||||
|
|
||||||
|
def load(self) -> "ConfigLoader":
|
||||||
|
"""Load configuration from file"""
|
||||||
|
with open(self.config_path) as f:
|
||||||
|
self.raw_config = json.load(f)
|
||||||
|
|
||||||
|
self._parse_framework()
|
||||||
|
self._parse_systems()
|
||||||
|
self._parse_components()
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _parse_framework(self):
|
||||||
|
"""Parse framework metadata"""
|
||||||
|
fw = self.raw_config["framework"]
|
||||||
|
self.framework = FrameworkConfig(**fw)
|
||||||
|
|
||||||
|
def _parse_systems(self):
|
||||||
|
"""Parse system configurations"""
|
||||||
|
for sys in self.raw_config["systems"]:
|
||||||
|
self.systems.append(SystemConfig(**sys))
|
||||||
|
|
||||||
|
def _parse_components(self):
|
||||||
|
"""Parse component configurations"""
|
||||||
|
comps = self.raw_config["components"]
|
||||||
|
|
||||||
|
# Shared components
|
||||||
|
self.components["shared"] = {}
|
||||||
|
for key, value in comps.get("shared", {}).items():
|
||||||
|
self.components["shared"][key] = ComponentConfig(**value)
|
||||||
|
|
||||||
|
# System-specific components
|
||||||
|
for system_key in ["data_flow", "documentation", "execution"]:
|
||||||
|
self.components[system_key] = {}
|
||||||
|
for comp_key, comp_value in comps.get(system_key, {}).items():
|
||||||
|
self.components[system_key][comp_key] = ComponentConfig(**comp_value)
|
||||||
|
|
||||||
|
def get_system(self, key: str) -> Optional[SystemConfig]:
|
||||||
|
"""Get system config by key"""
|
||||||
|
for sys in self.systems:
|
||||||
|
if sys.key == key:
|
||||||
|
return sys
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_component(
|
||||||
|
self, system_key: str, component_key: str
|
||||||
|
) -> Optional[ComponentConfig]:
|
||||||
|
"""Get component config"""
|
||||||
|
return self.components.get(system_key, {}).get(component_key)
|
||||||
|
|
||||||
|
def get_shared_component(self, key: str) -> Optional[ComponentConfig]:
|
||||||
|
"""Get shared component config"""
|
||||||
|
return self.components.get("shared", {}).get(key)
|
||||||
|
|
||||||
|
|
||||||
|
def load_config(config_path: str | Path) -> ConfigLoader:
|
||||||
|
"""Load and validate configuration file"""
|
||||||
|
loader = ConfigLoader(config_path)
|
||||||
|
return loader.load()
|
||||||
20
tools/modelgen/loader/extract/__init__.py
Normal file
20
tools/modelgen/loader/extract/__init__.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
"""
|
||||||
|
Extractors - Extract model definitions from existing codebases.
|
||||||
|
|
||||||
|
Supported frameworks:
|
||||||
|
- Django: Extract from Django ORM models
|
||||||
|
- SQLAlchemy: Extract from SQLAlchemy models (planned)
|
||||||
|
- Prisma: Extract from Prisma schema (planned)
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Dict, Type
|
||||||
|
|
||||||
|
from .base import BaseExtractor
|
||||||
|
from .django import DjangoExtractor
|
||||||
|
|
||||||
|
# Registry of available extractors
|
||||||
|
EXTRACTORS: Dict[str, Type[BaseExtractor]] = {
|
||||||
|
"django": DjangoExtractor,
|
||||||
|
}
|
||||||
|
|
||||||
|
__all__ = ["BaseExtractor", "DjangoExtractor", "EXTRACTORS"]
|
||||||
38
tools/modelgen/loader/extract/base.py
Normal file
38
tools/modelgen/loader/extract/base.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
"""
|
||||||
|
Base Extractor
|
||||||
|
|
||||||
|
Abstract base class for model extractors.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from ..schema import EnumDefinition, ModelDefinition
|
||||||
|
|
||||||
|
|
||||||
|
class BaseExtractor(ABC):
|
||||||
|
"""Abstract base for codebase model extractors."""
|
||||||
|
|
||||||
|
def __init__(self, source_path: Path):
|
||||||
|
self.source_path = Path(source_path)
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def extract(self) -> tuple[List[ModelDefinition], List[EnumDefinition]]:
|
||||||
|
"""
|
||||||
|
Extract model definitions from source codebase.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (models, enums)
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def detect(self) -> bool:
|
||||||
|
"""
|
||||||
|
Detect if this extractor can handle the source path.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if this extractor can handle the source
|
||||||
|
"""
|
||||||
|
pass
|
||||||
237
tools/modelgen/loader/extract/django.py
Normal file
237
tools/modelgen/loader/extract/django.py
Normal file
@@ -0,0 +1,237 @@
|
|||||||
|
"""
|
||||||
|
Django Extractor
|
||||||
|
|
||||||
|
Extracts model definitions from Django ORM models.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import ast
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, List, Optional
|
||||||
|
|
||||||
|
from ..schema import EnumDefinition, FieldDefinition, ModelDefinition
|
||||||
|
from .base import BaseExtractor
|
||||||
|
|
||||||
|
# Django field type mappings to Python types
|
||||||
|
DJANGO_FIELD_TYPES = {
|
||||||
|
"CharField": str,
|
||||||
|
"TextField": str,
|
||||||
|
"EmailField": str,
|
||||||
|
"URLField": str,
|
||||||
|
"SlugField": str,
|
||||||
|
"UUIDField": "UUID",
|
||||||
|
"IntegerField": int,
|
||||||
|
"BigIntegerField": "bigint",
|
||||||
|
"SmallIntegerField": int,
|
||||||
|
"PositiveIntegerField": int,
|
||||||
|
"FloatField": float,
|
||||||
|
"DecimalField": float,
|
||||||
|
"BooleanField": bool,
|
||||||
|
"NullBooleanField": bool,
|
||||||
|
"DateField": "datetime",
|
||||||
|
"DateTimeField": "datetime",
|
||||||
|
"TimeField": "datetime",
|
||||||
|
"JSONField": "dict",
|
||||||
|
"ForeignKey": "FK",
|
||||||
|
"OneToOneField": "FK",
|
||||||
|
"ManyToManyField": "M2M",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class DjangoExtractor(BaseExtractor):
|
||||||
|
"""Extracts models from Django ORM."""
|
||||||
|
|
||||||
|
def detect(self) -> bool:
|
||||||
|
"""Check if this is a Django project."""
|
||||||
|
# Look for manage.py or settings.py
|
||||||
|
manage_py = self.source_path / "manage.py"
|
||||||
|
settings_py = self.source_path / "settings.py"
|
||||||
|
|
||||||
|
if manage_py.exists():
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Check for Django imports in any models.py
|
||||||
|
for models_file in self.source_path.rglob("models.py"):
|
||||||
|
content = models_file.read_text()
|
||||||
|
if "from django.db import models" in content:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return settings_py.exists()
|
||||||
|
|
||||||
|
def extract(self) -> tuple[List[ModelDefinition], List[EnumDefinition]]:
|
||||||
|
"""Extract Django models using AST parsing."""
|
||||||
|
models = []
|
||||||
|
enums = []
|
||||||
|
|
||||||
|
# Find all models.py files
|
||||||
|
for models_file in self.source_path.rglob("models.py"):
|
||||||
|
file_models, file_enums = self._extract_from_file(models_file)
|
||||||
|
models.extend(file_models)
|
||||||
|
enums.extend(file_enums)
|
||||||
|
|
||||||
|
return models, enums
|
||||||
|
|
||||||
|
def _extract_from_file(
|
||||||
|
self, file_path: Path
|
||||||
|
) -> tuple[List[ModelDefinition], List[EnumDefinition]]:
|
||||||
|
"""Extract models from a single models.py file."""
|
||||||
|
models = []
|
||||||
|
enums = []
|
||||||
|
|
||||||
|
content = file_path.read_text()
|
||||||
|
tree = ast.parse(content)
|
||||||
|
|
||||||
|
for node in ast.walk(tree):
|
||||||
|
if isinstance(node, ast.ClassDef):
|
||||||
|
# Check if it inherits from models.Model
|
||||||
|
if self._is_django_model(node):
|
||||||
|
model_def = self._parse_model_class(node)
|
||||||
|
if model_def:
|
||||||
|
models.append(model_def)
|
||||||
|
# Check if it's a TextChoices/IntegerChoices enum
|
||||||
|
elif self._is_django_choices(node):
|
||||||
|
enum_def = self._parse_choices_class(node)
|
||||||
|
if enum_def:
|
||||||
|
enums.append(enum_def)
|
||||||
|
|
||||||
|
return models, enums
|
||||||
|
|
||||||
|
def _is_django_model(self, node: ast.ClassDef) -> bool:
|
||||||
|
"""Check if class inherits from models.Model."""
|
||||||
|
for base in node.bases:
|
||||||
|
if isinstance(base, ast.Attribute):
|
||||||
|
if base.attr == "Model":
|
||||||
|
return True
|
||||||
|
elif isinstance(base, ast.Name):
|
||||||
|
if base.id in ("Model", "AbstractUser", "AbstractBaseUser"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _is_django_choices(self, node: ast.ClassDef) -> bool:
|
||||||
|
"""Check if class is a Django TextChoices/IntegerChoices."""
|
||||||
|
for base in node.bases:
|
||||||
|
if isinstance(base, ast.Attribute):
|
||||||
|
if base.attr in ("TextChoices", "IntegerChoices"):
|
||||||
|
return True
|
||||||
|
elif isinstance(base, ast.Name):
|
||||||
|
if base.id in ("TextChoices", "IntegerChoices"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _parse_model_class(self, node: ast.ClassDef) -> Optional[ModelDefinition]:
|
||||||
|
"""Parse a Django model class into ModelDefinition."""
|
||||||
|
fields = []
|
||||||
|
|
||||||
|
for item in node.body:
|
||||||
|
if isinstance(item, ast.Assign):
|
||||||
|
field_def = self._parse_field_assignment(item)
|
||||||
|
if field_def:
|
||||||
|
fields.append(field_def)
|
||||||
|
elif isinstance(item, ast.AnnAssign):
|
||||||
|
# Handle annotated assignments (Django 4.0+ style)
|
||||||
|
field_def = self._parse_annotated_field(item)
|
||||||
|
if field_def:
|
||||||
|
fields.append(field_def)
|
||||||
|
|
||||||
|
# Get docstring
|
||||||
|
docstring = ast.get_docstring(node)
|
||||||
|
|
||||||
|
return ModelDefinition(
|
||||||
|
name=node.name,
|
||||||
|
fields=fields,
|
||||||
|
docstring=docstring,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _parse_field_assignment(self, node: ast.Assign) -> Optional[FieldDefinition]:
|
||||||
|
"""Parse a field assignment like: name = models.CharField(...)"""
|
||||||
|
if not node.targets or not isinstance(node.targets[0], ast.Name):
|
||||||
|
return None
|
||||||
|
|
||||||
|
field_name = node.targets[0].id
|
||||||
|
|
||||||
|
# Skip private fields and Meta class
|
||||||
|
if field_name.startswith("_") or field_name == "Meta":
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Parse the field call
|
||||||
|
if isinstance(node.value, ast.Call):
|
||||||
|
return self._parse_field_call(field_name, node.value)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _parse_annotated_field(self, node: ast.AnnAssign) -> Optional[FieldDefinition]:
|
||||||
|
"""Parse an annotated field assignment."""
|
||||||
|
if not isinstance(node.target, ast.Name):
|
||||||
|
return None
|
||||||
|
|
||||||
|
field_name = node.target.id
|
||||||
|
|
||||||
|
if field_name.startswith("_"):
|
||||||
|
return None
|
||||||
|
|
||||||
|
if node.value and isinstance(node.value, ast.Call):
|
||||||
|
return self._parse_field_call(field_name, node.value)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _parse_field_call(
|
||||||
|
self, field_name: str, call: ast.Call
|
||||||
|
) -> Optional[FieldDefinition]:
|
||||||
|
"""Parse a Django field call like models.CharField(max_length=100)."""
|
||||||
|
# Get field type name
|
||||||
|
field_type_name = None
|
||||||
|
|
||||||
|
if isinstance(call.func, ast.Attribute):
|
||||||
|
field_type_name = call.func.attr
|
||||||
|
elif isinstance(call.func, ast.Name):
|
||||||
|
field_type_name = call.func.id
|
||||||
|
|
||||||
|
if not field_type_name:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Map to Python type
|
||||||
|
python_type = DJANGO_FIELD_TYPES.get(field_type_name, str)
|
||||||
|
|
||||||
|
# Check for null=True
|
||||||
|
optional = False
|
||||||
|
default = None
|
||||||
|
|
||||||
|
for keyword in call.keywords:
|
||||||
|
if keyword.arg == "null":
|
||||||
|
if isinstance(keyword.value, ast.Constant):
|
||||||
|
optional = keyword.value.value is True
|
||||||
|
elif keyword.arg == "default":
|
||||||
|
if isinstance(keyword.value, ast.Constant):
|
||||||
|
default = keyword.value.value
|
||||||
|
|
||||||
|
return FieldDefinition(
|
||||||
|
name=field_name,
|
||||||
|
type_hint=python_type,
|
||||||
|
default=default if default is not None else None,
|
||||||
|
optional=optional,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _parse_choices_class(self, node: ast.ClassDef) -> Optional[EnumDefinition]:
|
||||||
|
"""Parse a Django TextChoices/IntegerChoices class."""
|
||||||
|
values = []
|
||||||
|
|
||||||
|
for item in node.body:
|
||||||
|
if isinstance(item, ast.Assign):
|
||||||
|
if item.targets and isinstance(item.targets[0], ast.Name):
|
||||||
|
name = item.targets[0].id
|
||||||
|
if name.isupper(): # Enum values are typically uppercase
|
||||||
|
# Get the value
|
||||||
|
value = name.lower() # Default to lowercase name
|
||||||
|
if isinstance(item.value, ast.Constant):
|
||||||
|
value = str(item.value.value)
|
||||||
|
elif isinstance(item.value, ast.Tuple) and item.value.elts:
|
||||||
|
# TextChoices: NAME = "value", "Label"
|
||||||
|
if isinstance(item.value.elts[0], ast.Constant):
|
||||||
|
value = str(item.value.elts[0].value)
|
||||||
|
|
||||||
|
values.append((name, value))
|
||||||
|
|
||||||
|
if not values:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return EnumDefinition(name=node.name, values=values)
|
||||||
169
tools/modelgen/loader/schema.py
Normal file
169
tools/modelgen/loader/schema.py
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
"""
|
||||||
|
Schema Loader
|
||||||
|
|
||||||
|
Loads Python dataclasses from a schema/ folder.
|
||||||
|
Expects the folder to have an __init__.py that exports:
|
||||||
|
- DATACLASSES: List of dataclass types to generate
|
||||||
|
- ENUMS: List of Enum types to include
|
||||||
|
- GRPC_MESSAGES: (optional) List of gRPC message types
|
||||||
|
- GRPC_SERVICE: (optional) gRPC service definition dict
|
||||||
|
"""
|
||||||
|
|
||||||
|
import dataclasses as dc
|
||||||
|
import importlib.util
|
||||||
|
import sys
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from enum import Enum
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional, Type, get_type_hints
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class FieldDefinition:
|
||||||
|
"""Represents a model field."""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
type_hint: Any
|
||||||
|
default: Any = dc.MISSING
|
||||||
|
optional: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ModelDefinition:
|
||||||
|
"""Represents a model/dataclass."""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
fields: List[FieldDefinition]
|
||||||
|
docstring: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class EnumDefinition:
|
||||||
|
"""Represents an enum."""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
values: List[tuple[str, str]] # (name, value) pairs
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class GrpcServiceDefinition:
|
||||||
|
"""Represents a gRPC service."""
|
||||||
|
|
||||||
|
package: str
|
||||||
|
name: str
|
||||||
|
methods: List[Dict[str, Any]]
|
||||||
|
|
||||||
|
|
||||||
|
class SchemaLoader:
|
||||||
|
"""Loads model definitions from Python dataclasses in schema/ folder."""
|
||||||
|
|
||||||
|
def __init__(self, schema_path: Path):
|
||||||
|
self.schema_path = Path(schema_path)
|
||||||
|
self.models: List[ModelDefinition] = []
|
||||||
|
self.enums: List[EnumDefinition] = []
|
||||||
|
self.grpc_messages: List[ModelDefinition] = []
|
||||||
|
self.grpc_service: Optional[GrpcServiceDefinition] = None
|
||||||
|
|
||||||
|
def load(self) -> "SchemaLoader":
|
||||||
|
"""Load schema definitions from the schema folder."""
|
||||||
|
init_path = self.schema_path / "__init__.py"
|
||||||
|
|
||||||
|
if not init_path.exists():
|
||||||
|
raise FileNotFoundError(f"Schema folder must have __init__.py: {init_path}")
|
||||||
|
|
||||||
|
# Import the schema module
|
||||||
|
module = self._import_module(init_path)
|
||||||
|
|
||||||
|
# Extract DATACLASSES
|
||||||
|
dataclasses = getattr(module, "DATACLASSES", [])
|
||||||
|
for cls in dataclasses:
|
||||||
|
self.models.append(self._parse_dataclass(cls))
|
||||||
|
|
||||||
|
# Extract ENUMS
|
||||||
|
enums = getattr(module, "ENUMS", [])
|
||||||
|
for enum_cls in enums:
|
||||||
|
self.enums.append(self._parse_enum(enum_cls))
|
||||||
|
|
||||||
|
# Extract GRPC_MESSAGES (optional)
|
||||||
|
grpc_messages = getattr(module, "GRPC_MESSAGES", [])
|
||||||
|
for cls in grpc_messages:
|
||||||
|
self.grpc_messages.append(self._parse_dataclass(cls))
|
||||||
|
|
||||||
|
# Extract GRPC_SERVICE (optional)
|
||||||
|
grpc_service = getattr(module, "GRPC_SERVICE", None)
|
||||||
|
if grpc_service:
|
||||||
|
self.grpc_service = GrpcServiceDefinition(
|
||||||
|
package=grpc_service.get("package", "service"),
|
||||||
|
name=grpc_service.get("name", "Service"),
|
||||||
|
methods=grpc_service.get("methods", []),
|
||||||
|
)
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _import_module(self, path: Path):
|
||||||
|
"""Import a Python module from a file path."""
|
||||||
|
spec = importlib.util.spec_from_file_location("schema", path)
|
||||||
|
if spec is None or spec.loader is None:
|
||||||
|
raise ImportError(f"Could not load module from {path}")
|
||||||
|
|
||||||
|
module = importlib.util.module_from_spec(spec)
|
||||||
|
sys.modules["schema"] = module
|
||||||
|
spec.loader.exec_module(module)
|
||||||
|
return module
|
||||||
|
|
||||||
|
def _parse_dataclass(self, cls: Type) -> ModelDefinition:
|
||||||
|
"""Parse a dataclass into a ModelDefinition."""
|
||||||
|
hints = get_type_hints(cls)
|
||||||
|
fields_info = {f.name: f for f in dc.fields(cls)}
|
||||||
|
|
||||||
|
fields = []
|
||||||
|
for name, type_hint in hints.items():
|
||||||
|
if name.startswith("_"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
field_info = fields_info.get(name)
|
||||||
|
default = dc.MISSING
|
||||||
|
if field_info:
|
||||||
|
if field_info.default is not dc.MISSING:
|
||||||
|
default = field_info.default
|
||||||
|
elif field_info.default_factory is not dc.MISSING:
|
||||||
|
default = field_info.default_factory
|
||||||
|
|
||||||
|
# Check if optional (Union with None)
|
||||||
|
optional = self._is_optional(type_hint)
|
||||||
|
|
||||||
|
fields.append(
|
||||||
|
FieldDefinition(
|
||||||
|
name=name,
|
||||||
|
type_hint=type_hint,
|
||||||
|
default=default,
|
||||||
|
optional=optional,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return ModelDefinition(
|
||||||
|
name=cls.__name__,
|
||||||
|
fields=fields,
|
||||||
|
docstring=cls.__doc__,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _parse_enum(self, enum_cls: Type[Enum]) -> EnumDefinition:
|
||||||
|
"""Parse an Enum into an EnumDefinition."""
|
||||||
|
values = [(m.name, m.value) for m in enum_cls]
|
||||||
|
return EnumDefinition(name=enum_cls.__name__, values=values)
|
||||||
|
|
||||||
|
def _is_optional(self, type_hint: Any) -> bool:
|
||||||
|
"""Check if a type hint is Optional (Union with None)."""
|
||||||
|
from typing import Union, get_args, get_origin
|
||||||
|
|
||||||
|
origin = get_origin(type_hint)
|
||||||
|
if origin is Union:
|
||||||
|
args = get_args(type_hint)
|
||||||
|
return type(None) in args
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def load_schema(schema_path: str | Path) -> SchemaLoader:
|
||||||
|
"""Load schema definitions from folder."""
|
||||||
|
loader = SchemaLoader(schema_path)
|
||||||
|
return loader.load()
|
||||||
77
tools/modelgen/model_generator.py
Normal file
77
tools/modelgen/model_generator.py
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
"""
|
||||||
|
Model Generator
|
||||||
|
|
||||||
|
Orchestrates model generation from various sources to various formats.
|
||||||
|
Delegates to loaders for input and generators for output.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, Type
|
||||||
|
|
||||||
|
from .generator import GENERATORS, BaseGenerator
|
||||||
|
from .loader import ConfigLoader
|
||||||
|
|
||||||
|
|
||||||
|
class ModelGenerator:
|
||||||
|
"""
|
||||||
|
Generates typed models from configuration.
|
||||||
|
|
||||||
|
This is the main entry point for model generation.
|
||||||
|
Delegates to format-specific generators.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
config: ConfigLoader,
|
||||||
|
output_path: Path,
|
||||||
|
output_format: str = "pydantic",
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize the generator.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config: Loaded configuration
|
||||||
|
output_path: Exact path where to write (file or directory depending on format)
|
||||||
|
output_format: Output format (pydantic, django, prisma, typescript, protobuf)
|
||||||
|
"""
|
||||||
|
self.config = config
|
||||||
|
self.output_path = Path(output_path)
|
||||||
|
self.output_format = output_format
|
||||||
|
|
||||||
|
if output_format not in GENERATORS:
|
||||||
|
raise ValueError(
|
||||||
|
f"Unknown output format: {output_format}. "
|
||||||
|
f"Available: {list(GENERATORS.keys())}"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.generator = GENERATORS[output_format]()
|
||||||
|
|
||||||
|
def generate(self) -> Path:
|
||||||
|
"""
|
||||||
|
Generate models to the specified output path.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to the generated file/directory
|
||||||
|
"""
|
||||||
|
# Determine output file path
|
||||||
|
if self.output_path.suffix:
|
||||||
|
# User specified a file path
|
||||||
|
output_file = self.output_path
|
||||||
|
else:
|
||||||
|
# User specified a directory, add default filename
|
||||||
|
output_file = (
|
||||||
|
self.output_path / f"__init__{self.generator.file_extension()}"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.generator.generate(self.config, output_file)
|
||||||
|
print(f"Generated {self.output_format} models: {output_file}")
|
||||||
|
return output_file
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def available_formats(cls) -> list:
|
||||||
|
"""Return list of available output formats."""
|
||||||
|
return list(GENERATORS.keys())
|
||||||
|
|
||||||
|
|
||||||
|
# Re-export for backwards compatibility
|
||||||
|
WRITERS = GENERATORS
|
||||||
0
tools/modelgen/schema/.gitkeep
Normal file
0
tools/modelgen/schema/.gitkeep
Normal file
139
tools/modelgen/types.py
Normal file
139
tools/modelgen/types.py
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
"""
|
||||||
|
Type Dispatch Tables
|
||||||
|
|
||||||
|
Type mappings for each output format.
|
||||||
|
Used by generators to convert Python types to target framework types.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Any, Callable, get_args
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Django Type Mappings
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
DJANGO_TYPES: dict[Any, str] = {
|
||||||
|
str: "models.CharField(max_length={max_length}{opts})",
|
||||||
|
int: "models.IntegerField({opts})",
|
||||||
|
float: "models.FloatField({opts})",
|
||||||
|
bool: "models.BooleanField(default={default})",
|
||||||
|
"UUID": "models.UUIDField({opts})",
|
||||||
|
"datetime": "models.DateTimeField({opts})",
|
||||||
|
"dict": "models.JSONField(default=dict, blank=True)",
|
||||||
|
"list": "models.JSONField(default=list, blank=True)",
|
||||||
|
"text": "models.TextField(blank=True, default='')",
|
||||||
|
"bigint": "models.BigIntegerField({opts})",
|
||||||
|
"enum": "models.CharField(max_length=20, choices=Status.choices{opts})",
|
||||||
|
}
|
||||||
|
|
||||||
|
DJANGO_SPECIAL: dict[str, str] = {
|
||||||
|
"id": "models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)",
|
||||||
|
"created_at": "models.DateTimeField(auto_now_add=True)",
|
||||||
|
"updated_at": "models.DateTimeField(auto_now=True)",
|
||||||
|
}
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Pydantic Type Resolvers
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
def _get_list_inner(type_hint: Any) -> str:
|
||||||
|
"""Get inner type of List[T] for Pydantic."""
|
||||||
|
args = get_args(type_hint)
|
||||||
|
if args and args[0] in (str, int, float, bool):
|
||||||
|
return {str: "str", int: "int", float: "float", bool: "bool"}[args[0]]
|
||||||
|
return "str"
|
||||||
|
|
||||||
|
|
||||||
|
PYDANTIC_RESOLVERS: dict[Any, Callable[[Any], str]] = {
|
||||||
|
str: lambda _: "str",
|
||||||
|
int: lambda _: "int",
|
||||||
|
float: lambda _: "float",
|
||||||
|
bool: lambda _: "bool",
|
||||||
|
"UUID": lambda _: "UUID",
|
||||||
|
"datetime": lambda _: "datetime",
|
||||||
|
"dict": lambda _: "Dict[str, Any]",
|
||||||
|
"list": lambda base: f"List[{_get_list_inner(base)}]",
|
||||||
|
"enum": lambda base: base.__name__,
|
||||||
|
}
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# TypeScript Type Resolvers
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_ts_list(base: Any) -> str:
|
||||||
|
"""Resolve TypeScript list type."""
|
||||||
|
args = get_args(base)
|
||||||
|
if args:
|
||||||
|
inner = args[0]
|
||||||
|
if inner is str:
|
||||||
|
return "string[]"
|
||||||
|
elif inner is int or inner is float:
|
||||||
|
return "number[]"
|
||||||
|
elif inner is bool:
|
||||||
|
return "boolean[]"
|
||||||
|
return "string[]"
|
||||||
|
|
||||||
|
|
||||||
|
TS_RESOLVERS: dict[Any, Callable[[Any], str]] = {
|
||||||
|
str: lambda _: "string",
|
||||||
|
int: lambda _: "number",
|
||||||
|
float: lambda _: "number",
|
||||||
|
bool: lambda _: "boolean",
|
||||||
|
"UUID": lambda _: "string",
|
||||||
|
"datetime": lambda _: "string",
|
||||||
|
"dict": lambda _: "Record<string, unknown>",
|
||||||
|
"list": _resolve_ts_list,
|
||||||
|
"enum": lambda base: base.__name__,
|
||||||
|
}
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Protobuf Type Resolvers
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_proto_list(base: Any) -> str:
|
||||||
|
"""Resolve Protobuf repeated type."""
|
||||||
|
args = get_args(base)
|
||||||
|
if args:
|
||||||
|
inner = args[0]
|
||||||
|
if inner is str:
|
||||||
|
return "repeated string"
|
||||||
|
elif inner is int:
|
||||||
|
return "repeated int32"
|
||||||
|
elif inner is float:
|
||||||
|
return "repeated float"
|
||||||
|
elif inner is bool:
|
||||||
|
return "repeated bool"
|
||||||
|
return "repeated string"
|
||||||
|
|
||||||
|
|
||||||
|
PROTO_RESOLVERS: dict[Any, Callable[[Any], str]] = {
|
||||||
|
str: lambda _: "string",
|
||||||
|
int: lambda _: "int32",
|
||||||
|
float: lambda _: "float",
|
||||||
|
bool: lambda _: "bool",
|
||||||
|
"list": _resolve_proto_list,
|
||||||
|
}
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Prisma Type Mappings
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
PRISMA_TYPES: dict[Any, str] = {
|
||||||
|
str: "String",
|
||||||
|
int: "Int",
|
||||||
|
float: "Float",
|
||||||
|
bool: "Boolean",
|
||||||
|
"UUID": "String @default(uuid())",
|
||||||
|
"datetime": "DateTime",
|
||||||
|
"dict": "Json",
|
||||||
|
"list": "Json",
|
||||||
|
"bigint": "BigInt",
|
||||||
|
}
|
||||||
|
|
||||||
|
PRISMA_SPECIAL: dict[str, str] = {
|
||||||
|
"id": "String @id @default(uuid())",
|
||||||
|
"created_at": "DateTime @default(now())",
|
||||||
|
"updated_at": "DateTime @updatedAt",
|
||||||
|
}
|
||||||
7
tools/modelgen/writer/__init__.py
Normal file
7
tools/modelgen/writer/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
"""
|
||||||
|
Writer - File writing utilities for modelgen.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .file import write_file, write_multiple
|
||||||
|
|
||||||
|
__all__ = ["write_file", "write_multiple"]
|
||||||
30
tools/modelgen/writer/file.py
Normal file
30
tools/modelgen/writer/file.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
"""
|
||||||
|
File Writer
|
||||||
|
|
||||||
|
Utilities for writing generated files to disk.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
|
||||||
|
def write_file(path: Path, content: str) -> None:
|
||||||
|
"""Write content to file, creating directories as needed."""
|
||||||
|
path = Path(path)
|
||||||
|
path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
path.write_text(content)
|
||||||
|
|
||||||
|
|
||||||
|
def write_multiple(directory: Path, files: Dict[str, str]) -> None:
|
||||||
|
"""Write multiple files to a directory.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
directory: Target directory
|
||||||
|
files: Dict mapping filename to content
|
||||||
|
"""
|
||||||
|
directory = Path(directory)
|
||||||
|
directory.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
for filename, content in files.items():
|
||||||
|
file_path = directory / filename
|
||||||
|
file_path.write_text(content)
|
||||||
0
tools/schema/.gitkeep
Normal file
0
tools/schema/.gitkeep
Normal file
139
tools/types.py
Normal file
139
tools/types.py
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
"""
|
||||||
|
Type Dispatch Tables
|
||||||
|
|
||||||
|
Type mappings for each output format.
|
||||||
|
Used by generators to convert Python types to target framework types.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Any, Callable, get_args
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Django Type Mappings
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
DJANGO_TYPES: dict[Any, str] = {
|
||||||
|
str: "models.CharField(max_length={max_length}{opts})",
|
||||||
|
int: "models.IntegerField({opts})",
|
||||||
|
float: "models.FloatField({opts})",
|
||||||
|
bool: "models.BooleanField(default={default})",
|
||||||
|
"UUID": "models.UUIDField({opts})",
|
||||||
|
"datetime": "models.DateTimeField({opts})",
|
||||||
|
"dict": "models.JSONField(default=dict, blank=True)",
|
||||||
|
"list": "models.JSONField(default=list, blank=True)",
|
||||||
|
"text": "models.TextField(blank=True, default='')",
|
||||||
|
"bigint": "models.BigIntegerField({opts})",
|
||||||
|
"enum": "models.CharField(max_length=20, choices=Status.choices{opts})",
|
||||||
|
}
|
||||||
|
|
||||||
|
DJANGO_SPECIAL: dict[str, str] = {
|
||||||
|
"id": "models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)",
|
||||||
|
"created_at": "models.DateTimeField(auto_now_add=True)",
|
||||||
|
"updated_at": "models.DateTimeField(auto_now=True)",
|
||||||
|
}
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Pydantic Type Resolvers
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
def _get_list_inner(type_hint: Any) -> str:
|
||||||
|
"""Get inner type of List[T] for Pydantic."""
|
||||||
|
args = get_args(type_hint)
|
||||||
|
if args and args[0] in (str, int, float, bool):
|
||||||
|
return {str: "str", int: "int", float: "float", bool: "bool"}[args[0]]
|
||||||
|
return "str"
|
||||||
|
|
||||||
|
|
||||||
|
PYDANTIC_RESOLVERS: dict[Any, Callable[[Any], str]] = {
|
||||||
|
str: lambda _: "str",
|
||||||
|
int: lambda _: "int",
|
||||||
|
float: lambda _: "float",
|
||||||
|
bool: lambda _: "bool",
|
||||||
|
"UUID": lambda _: "UUID",
|
||||||
|
"datetime": lambda _: "datetime",
|
||||||
|
"dict": lambda _: "Dict[str, Any]",
|
||||||
|
"list": lambda base: f"List[{_get_list_inner(base)}]",
|
||||||
|
"enum": lambda base: base.__name__,
|
||||||
|
}
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# TypeScript Type Resolvers
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_ts_list(base: Any) -> str:
|
||||||
|
"""Resolve TypeScript list type."""
|
||||||
|
args = get_args(base)
|
||||||
|
if args:
|
||||||
|
inner = args[0]
|
||||||
|
if inner is str:
|
||||||
|
return "string[]"
|
||||||
|
elif inner is int or inner is float:
|
||||||
|
return "number[]"
|
||||||
|
elif inner is bool:
|
||||||
|
return "boolean[]"
|
||||||
|
return "string[]"
|
||||||
|
|
||||||
|
|
||||||
|
TS_RESOLVERS: dict[Any, Callable[[Any], str]] = {
|
||||||
|
str: lambda _: "string",
|
||||||
|
int: lambda _: "number",
|
||||||
|
float: lambda _: "number",
|
||||||
|
bool: lambda _: "boolean",
|
||||||
|
"UUID": lambda _: "string",
|
||||||
|
"datetime": lambda _: "string",
|
||||||
|
"dict": lambda _: "Record<string, unknown>",
|
||||||
|
"list": _resolve_ts_list,
|
||||||
|
"enum": lambda base: base.__name__,
|
||||||
|
}
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Protobuf Type Resolvers
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_proto_list(base: Any) -> str:
|
||||||
|
"""Resolve Protobuf repeated type."""
|
||||||
|
args = get_args(base)
|
||||||
|
if args:
|
||||||
|
inner = args[0]
|
||||||
|
if inner is str:
|
||||||
|
return "repeated string"
|
||||||
|
elif inner is int:
|
||||||
|
return "repeated int32"
|
||||||
|
elif inner is float:
|
||||||
|
return "repeated float"
|
||||||
|
elif inner is bool:
|
||||||
|
return "repeated bool"
|
||||||
|
return "repeated string"
|
||||||
|
|
||||||
|
|
||||||
|
PROTO_RESOLVERS: dict[Any, Callable[[Any], str]] = {
|
||||||
|
str: lambda _: "string",
|
||||||
|
int: lambda _: "int32",
|
||||||
|
float: lambda _: "float",
|
||||||
|
bool: lambda _: "bool",
|
||||||
|
"list": _resolve_proto_list,
|
||||||
|
}
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Prisma Type Mappings
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
PRISMA_TYPES: dict[Any, str] = {
|
||||||
|
str: "String",
|
||||||
|
int: "Int",
|
||||||
|
float: "Float",
|
||||||
|
bool: "Boolean",
|
||||||
|
"UUID": "String @default(uuid())",
|
||||||
|
"datetime": "DateTime",
|
||||||
|
"dict": "Json",
|
||||||
|
"list": "Json",
|
||||||
|
"bigint": "BigInt",
|
||||||
|
}
|
||||||
|
|
||||||
|
PRISMA_SPECIAL: dict[str, str] = {
|
||||||
|
"id": "String @id @default(uuid())",
|
||||||
|
"created_at": "DateTime @default(now())",
|
||||||
|
"updated_at": "DateTime @updatedAt",
|
||||||
|
}
|
||||||
7
tools/writer/__init__.py
Normal file
7
tools/writer/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
"""
|
||||||
|
Writer - File writing utilities for modelgen.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .file import write_file, write_multiple
|
||||||
|
|
||||||
|
__all__ = ["write_file", "write_multiple"]
|
||||||
30
tools/writer/file.py
Normal file
30
tools/writer/file.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
"""
|
||||||
|
File Writer
|
||||||
|
|
||||||
|
Utilities for writing generated files to disk.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
|
||||||
|
def write_file(path: Path, content: str) -> None:
|
||||||
|
"""Write content to file, creating directories as needed."""
|
||||||
|
path = Path(path)
|
||||||
|
path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
path.write_text(content)
|
||||||
|
|
||||||
|
|
||||||
|
def write_multiple(directory: Path, files: Dict[str, str]) -> None:
|
||||||
|
"""Write multiple files to a directory.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
directory: Target directory
|
||||||
|
files: Dict mapping filename to content
|
||||||
|
"""
|
||||||
|
directory = Path(directory)
|
||||||
|
directory.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
for filename, content in files.items():
|
||||||
|
file_path = directory / filename
|
||||||
|
file_path.write_text(content)
|
||||||
@@ -1,8 +1,7 @@
|
|||||||
/**
|
/**
|
||||||
* MPR TypeScript Types - GENERATED FILE
|
* TypeScript Types - GENERATED FILE
|
||||||
*
|
*
|
||||||
* Do not edit directly. Modify schema/models/*.py and run:
|
* Do not edit directly. Regenerate using modelgen.
|
||||||
* python schema/generate.py --typescript
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
export type AssetStatus = "pending" | "ready" | "error";
|
export type AssetStatus = "pending" | "ready" | "error";
|
||||||
@@ -72,25 +71,3 @@ export interface TranscodeJob {
|
|||||||
started_at: string | null;
|
started_at: string | null;
|
||||||
completed_at: string | null;
|
completed_at: string | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// API Request/Response Types
|
|
||||||
|
|
||||||
export interface CreateJobRequest {
|
|
||||||
source_asset_id: string;
|
|
||||||
preset_id: string | null;
|
|
||||||
trim_start: number | null;
|
|
||||||
trim_end: number | null;
|
|
||||||
output_filename: string | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SystemStatus {
|
|
||||||
status: string;
|
|
||||||
version: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface WorkerStatus {
|
|
||||||
available: boolean;
|
|
||||||
active_jobs: number;
|
|
||||||
supported_codecs: string[];
|
|
||||||
gpu_available: boolean;
|
|
||||||
}
|
|
||||||
|
|||||||
Reference in New Issue
Block a user