Compare commits
1 Commits
8f5d407e0e
...
aws-int
| Author | SHA1 | Date | |
|---|---|---|---|
| 72e4113529 |
@@ -3,14 +3,14 @@ GraphQL API using graphene, mounted on FastAPI/Starlette.
|
||||
|
||||
Provides the same data as the REST API but via GraphQL queries and mutations.
|
||||
Uses Django ORM directly for data access.
|
||||
Types are generated from schema/ via modelgen — see api/schemas/graphql_types.py.
|
||||
Types are generated from schema/ via modelgen — see api/schema/graphql.py.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import graphene
|
||||
|
||||
from api.schemas.graphql import (
|
||||
from api.schema.graphql import (
|
||||
CreateJobInput,
|
||||
MediaAssetType,
|
||||
ScanResultType,
|
||||
|
||||
@@ -8,7 +8,7 @@ from uuid import UUID
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
|
||||
from api.deps import get_asset
|
||||
from api.schemas import AssetCreate, AssetResponse, AssetUpdate
|
||||
from api.schema import AssetCreate, AssetResponse, AssetUpdate
|
||||
from core.storage import BUCKET_IN, list_objects
|
||||
|
||||
router = APIRouter(prefix="/assets", tags=["assets"])
|
||||
|
||||
@@ -10,7 +10,7 @@ from uuid import UUID
|
||||
from fastapi import APIRouter, Depends, Header, HTTPException, Query
|
||||
|
||||
from api.deps import get_asset, get_job, get_preset
|
||||
from api.schemas import JobCreate, JobResponse
|
||||
from api.schema import JobCreate, JobResponse
|
||||
|
||||
router = APIRouter(prefix="/jobs", tags=["jobs"])
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ from uuid import UUID
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from api.deps import get_preset
|
||||
from api.schemas import PresetCreate, PresetResponse, PresetUpdate
|
||||
from api.schema import PresetCreate, PresetResponse, PresetUpdate
|
||||
|
||||
router = APIRouter(prefix="/presets", tags=["presets"])
|
||||
|
||||
|
||||
@@ -1,16 +1,21 @@
|
||||
"""MediaAsset Schemas - GENERATED FILE"""
|
||||
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, List, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from .base import BaseSchema
|
||||
from .models import AssetStatus
|
||||
|
||||
|
||||
class AssetStatus(str, Enum):
|
||||
PENDING = "pending"
|
||||
READY = "ready"
|
||||
ERROR = "error"
|
||||
|
||||
|
||||
class AssetCreate(BaseSchema):
|
||||
"""AssetCreate schema."""
|
||||
|
||||
filename: str
|
||||
file_path: str
|
||||
file_size: Optional[int] = None
|
||||
@@ -23,12 +28,10 @@ class AssetCreate(BaseSchema):
|
||||
bitrate: Optional[int] = None
|
||||
properties: Dict[str, Any]
|
||||
comments: str = ""
|
||||
tags: List[str]
|
||||
|
||||
tags: List[str] = Field(default_factory=list)
|
||||
|
||||
class AssetUpdate(BaseSchema):
|
||||
"""AssetUpdate schema."""
|
||||
|
||||
filename: Optional[str] = None
|
||||
file_path: Optional[str] = None
|
||||
status: Optional[AssetStatus] = None
|
||||
@@ -45,10 +48,8 @@ class AssetUpdate(BaseSchema):
|
||||
comments: Optional[str] = None
|
||||
tags: Optional[List[str]] = None
|
||||
|
||||
|
||||
class AssetResponse(BaseSchema):
|
||||
"""AssetResponse schema."""
|
||||
|
||||
id: UUID
|
||||
filename: str
|
||||
file_path: str
|
||||
@@ -64,6 +65,6 @@ class AssetResponse(BaseSchema):
|
||||
bitrate: Optional[int] = None
|
||||
properties: Dict[str, Any]
|
||||
comments: str = ""
|
||||
tags: List[str]
|
||||
tags: List[str] = Field(default_factory=list)
|
||||
created_at: Optional[datetime] = None
|
||||
updated_at: Optional[datetime] = None
|
||||
@@ -1,27 +1,43 @@
|
||||
"""TranscodeJob Schemas - GENERATED FILE"""
|
||||
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, List, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from .base import BaseSchema
|
||||
from .models import JobStatus
|
||||
|
||||
|
||||
class JobStatus(str, Enum):
|
||||
PENDING = "pending"
|
||||
PROCESSING = "processing"
|
||||
COMPLETED = "completed"
|
||||
FAILED = "failed"
|
||||
CANCELLED = "cancelled"
|
||||
|
||||
|
||||
class JobCreate(BaseSchema):
|
||||
"""Client-facing job creation request."""
|
||||
|
||||
"""JobCreate schema."""
|
||||
source_asset_id: UUID
|
||||
preset_id: Optional[UUID] = None
|
||||
preset_snapshot: Dict[str, Any]
|
||||
trim_start: Optional[float] = None
|
||||
trim_end: Optional[float] = None
|
||||
output_filename: Optional[str] = None
|
||||
output_filename: str = ""
|
||||
output_path: Optional[str] = None
|
||||
output_asset_id: Optional[UUID] = None
|
||||
progress: float = 0.0
|
||||
current_frame: Optional[int] = None
|
||||
current_time: Optional[float] = None
|
||||
speed: Optional[str] = None
|
||||
celery_task_id: Optional[str] = None
|
||||
execution_arn: Optional[str] = None
|
||||
priority: int = 0
|
||||
|
||||
started_at: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
|
||||
class JobUpdate(BaseSchema):
|
||||
"""JobUpdate schema."""
|
||||
|
||||
source_asset_id: Optional[UUID] = None
|
||||
preset_id: Optional[UUID] = None
|
||||
preset_snapshot: Optional[Dict[str, Any]] = None
|
||||
@@ -37,14 +53,13 @@ class JobUpdate(BaseSchema):
|
||||
speed: Optional[str] = None
|
||||
error_message: Optional[str] = None
|
||||
celery_task_id: Optional[str] = None
|
||||
execution_arn: Optional[str] = None
|
||||
priority: Optional[int] = None
|
||||
started_at: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
|
||||
|
||||
class JobResponse(BaseSchema):
|
||||
"""JobResponse schema."""
|
||||
|
||||
id: UUID
|
||||
source_asset_id: UUID
|
||||
preset_id: Optional[UUID] = None
|
||||
@@ -61,6 +76,7 @@ class JobResponse(BaseSchema):
|
||||
speed: Optional[str] = None
|
||||
error_message: Optional[str] = None
|
||||
celery_task_id: Optional[str] = None
|
||||
execution_arn: Optional[str] = None
|
||||
priority: int = 0
|
||||
created_at: Optional[datetime] = None
|
||||
started_at: Optional[datetime] = None
|
||||
@@ -24,7 +24,7 @@ class PresetCreate(BaseSchema):
|
||||
audio_bitrate: Optional[str] = None
|
||||
audio_channels: Optional[int] = None
|
||||
audio_samplerate: Optional[int] = None
|
||||
extra_args: List[str]
|
||||
extra_args: List[str] = Field(default_factory=list)
|
||||
|
||||
class PresetUpdate(BaseSchema):
|
||||
"""PresetUpdate schema."""
|
||||
@@ -61,6 +61,6 @@ class PresetResponse(BaseSchema):
|
||||
audio_bitrate: Optional[str] = None
|
||||
audio_channels: Optional[int] = None
|
||||
audio_samplerate: Optional[int] = None
|
||||
extra_args: List[str]
|
||||
extra_args: List[str] = Field(default_factory=list)
|
||||
created_at: Optional[datetime] = None
|
||||
updated_at: Optional[datetime] = None
|
||||
@@ -1,90 +0,0 @@
|
||||
"""
|
||||
Pydantic Models - GENERATED FILE
|
||||
|
||||
Do not edit directly. Regenerate using modelgen.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, List, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
class AssetStatus(str, Enum):
|
||||
PENDING = "pending"
|
||||
READY = "ready"
|
||||
ERROR = "error"
|
||||
|
||||
class JobStatus(str, Enum):
|
||||
PENDING = "pending"
|
||||
PROCESSING = "processing"
|
||||
COMPLETED = "completed"
|
||||
FAILED = "failed"
|
||||
CANCELLED = "cancelled"
|
||||
|
||||
class MediaAsset(BaseModel):
|
||||
"""A video/audio file registered in the system."""
|
||||
id: UUID
|
||||
filename: str
|
||||
file_path: str
|
||||
status: AssetStatus = "AssetStatus.PENDING"
|
||||
error_message: Optional[str] = None
|
||||
file_size: Optional[int] = None
|
||||
duration: Optional[float] = None
|
||||
video_codec: Optional[str] = None
|
||||
audio_codec: Optional[str] = None
|
||||
width: Optional[int] = None
|
||||
height: Optional[int] = None
|
||||
framerate: Optional[float] = None
|
||||
bitrate: Optional[int] = None
|
||||
properties: Dict[str, Any]
|
||||
comments: str = ""
|
||||
tags: List[str] = Field(default_factory=list)
|
||||
created_at: Optional[datetime] = None
|
||||
updated_at: Optional[datetime] = None
|
||||
|
||||
class TranscodePreset(BaseModel):
|
||||
"""A reusable transcoding configuration (like Handbrake presets)."""
|
||||
id: UUID
|
||||
name: str
|
||||
description: str = ""
|
||||
is_builtin: bool = False
|
||||
container: str = "mp4"
|
||||
video_codec: str = "libx264"
|
||||
video_bitrate: Optional[str] = None
|
||||
video_crf: Optional[int] = None
|
||||
video_preset: Optional[str] = None
|
||||
resolution: Optional[str] = None
|
||||
framerate: Optional[float] = None
|
||||
audio_codec: str = "aac"
|
||||
audio_bitrate: Optional[str] = None
|
||||
audio_channels: Optional[int] = None
|
||||
audio_samplerate: Optional[int] = None
|
||||
extra_args: List[str] = Field(default_factory=list)
|
||||
created_at: Optional[datetime] = None
|
||||
updated_at: Optional[datetime] = None
|
||||
|
||||
class TranscodeJob(BaseModel):
|
||||
"""A transcoding or trimming job in the queue."""
|
||||
id: UUID
|
||||
source_asset_id: UUID
|
||||
preset_id: Optional[UUID] = None
|
||||
preset_snapshot: Dict[str, Any]
|
||||
trim_start: Optional[float] = None
|
||||
trim_end: Optional[float] = None
|
||||
output_filename: str = ""
|
||||
output_path: Optional[str] = None
|
||||
output_asset_id: Optional[UUID] = None
|
||||
status: JobStatus = "JobStatus.PENDING"
|
||||
progress: float = 0.0
|
||||
current_frame: Optional[int] = None
|
||||
current_time: Optional[float] = None
|
||||
speed: Optional[str] = None
|
||||
error_message: Optional[str] = None
|
||||
celery_task_id: Optional[str] = None
|
||||
execution_arn: Optional[str] = None
|
||||
priority: int = 0
|
||||
created_at: Optional[datetime] = None
|
||||
started_at: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
@@ -1,47 +1,12 @@
|
||||
#!/bin/bash
|
||||
# Model generation script for MPR
|
||||
# Generates Django, Pydantic, TypeScript, and Protobuf from schema/models
|
||||
# Generates all targets from schema/modelgen.json config
|
||||
|
||||
set -e
|
||||
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
echo "Generating models from schema/models..."
|
||||
|
||||
# Django ORM models: domain models + enums
|
||||
python -m modelgen from-schema \
|
||||
--schema schema/models \
|
||||
--output mpr/media_assets/models.py \
|
||||
--targets django \
|
||||
--include dataclasses,enums
|
||||
|
||||
# Pydantic schemas for FastAPI: domain models + enums
|
||||
python -m modelgen from-schema \
|
||||
--schema schema/models \
|
||||
--output api/schemas/models.py \
|
||||
--targets pydantic \
|
||||
--include dataclasses,enums
|
||||
|
||||
# TypeScript types for Timeline UI: domain models + enums + API types
|
||||
python -m modelgen from-schema \
|
||||
--schema schema/models \
|
||||
--output ui/timeline/src/types.ts \
|
||||
--targets typescript \
|
||||
--include dataclasses,enums,api
|
||||
|
||||
# Graphene types for GraphQL: domain models + enums + API types
|
||||
python -m modelgen from-schema \
|
||||
--schema schema/models \
|
||||
--output api/schemas/graphql.py \
|
||||
--targets graphene \
|
||||
--include dataclasses,enums,api
|
||||
|
||||
# Protobuf for gRPC: gRPC messages + service
|
||||
python -m modelgen from-schema \
|
||||
--schema schema/models \
|
||||
--output rpc/protos/worker.proto \
|
||||
--targets proto \
|
||||
--include grpc
|
||||
python -m modelgen generate --config schema/modelgen.json
|
||||
|
||||
# Generate gRPC stubs from proto
|
||||
echo "Generating gRPC stubs..."
|
||||
|
||||
@@ -16,10 +16,10 @@ Output formats:
|
||||
- prisma: Prisma schema
|
||||
|
||||
Usage:
|
||||
python -m modelgen from-config -c config.json -o models.py
|
||||
python -m modelgen from-schema -o models/ --targets pydantic,typescript
|
||||
python -m modelgen extract --source /path/to/django --targets pydantic
|
||||
python -m modelgen list-formats
|
||||
python -m soleprint.station.tools.modelgen from-config -c config.json -o models.py
|
||||
python -m soleprint.station.tools.modelgen from-schema -o models/ --targets pydantic,typescript
|
||||
python -m soleprint.station.tools.modelgen extract --source /path/to/django --targets pydantic
|
||||
python -m soleprint.station.tools.modelgen list-formats
|
||||
"""
|
||||
|
||||
__version__ = "0.2.0"
|
||||
|
||||
@@ -16,10 +16,11 @@ Output formats:
|
||||
- prisma: Prisma schema
|
||||
|
||||
Usage:
|
||||
python -m modelgen --help
|
||||
python -m modelgen from-config -c config.json -o models.py
|
||||
python -m modelgen from-schema -o models/ --targets pydantic,typescript
|
||||
python -m modelgen extract --source /path/to/django --targets pydantic
|
||||
python -m soleprint.station.tools.modelgen --help
|
||||
python -m soleprint.station.tools.modelgen from-config -c config.json -o models.py
|
||||
python -m soleprint.station.tools.modelgen from-schema -o models/ --targets pydantic,typescript
|
||||
python -m soleprint.station.tools.modelgen extract --source /path/to/django --targets pydantic
|
||||
python -m soleprint.station.tools.modelgen generate --config schema/modelgen.json
|
||||
"""
|
||||
|
||||
import argparse
|
||||
@@ -177,6 +178,47 @@ def cmd_extract(args):
|
||||
print("Done!")
|
||||
|
||||
|
||||
def cmd_generate(args):
|
||||
"""Generate all targets from a JSON config file."""
|
||||
import json
|
||||
from .loader import load_schema
|
||||
|
||||
config_path = Path(args.config)
|
||||
if not config_path.exists():
|
||||
print(f"Error: Config file not found: {config_path}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
with open(config_path) as f:
|
||||
config = json.load(f)
|
||||
|
||||
# Resolve paths relative to current working directory
|
||||
schema_path = Path(config["schema"])
|
||||
if not schema_path.exists():
|
||||
print(f"Error: Schema folder not found: {schema_path}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
print(f"Loading schema: {schema_path}")
|
||||
|
||||
for target_conf in config["targets"]:
|
||||
target = target_conf["target"]
|
||||
output = Path(target_conf["output"])
|
||||
include = set(target_conf.get("include", []))
|
||||
name_map = target_conf.get("name_map", {})
|
||||
|
||||
if target not in GENERATORS:
|
||||
print(f"Warning: Unknown target '{target}', skipping", file=sys.stderr)
|
||||
continue
|
||||
|
||||
# Load schema with this target's include filter
|
||||
schema = load_schema(schema_path, include=include or None)
|
||||
|
||||
generator = GENERATORS[target](name_map=name_map)
|
||||
print(f"Generating {target} to: {output}")
|
||||
generator.generate(schema, output)
|
||||
|
||||
print("Done!")
|
||||
|
||||
|
||||
def cmd_list_formats(args):
|
||||
"""List available output formats."""
|
||||
print("Available output formats:")
|
||||
@@ -295,6 +337,21 @@ def main():
|
||||
)
|
||||
extract_parser.set_defaults(func=cmd_extract)
|
||||
|
||||
|
||||
# generate command (config-driven multi-target)
|
||||
gen_parser = subparsers.add_parser(
|
||||
"generate",
|
||||
help="Generate all targets from a JSON config file",
|
||||
)
|
||||
gen_parser.add_argument(
|
||||
"--config",
|
||||
"-c",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Path to generation config file (e.g., schema/modelgen.json)",
|
||||
)
|
||||
gen_parser.set_defaults(func=cmd_generate)
|
||||
|
||||
# list-formats command
|
||||
formats_parser = subparsers.add_parser(
|
||||
"list-formats",
|
||||
|
||||
@@ -7,6 +7,7 @@ Supported generators:
|
||||
- TypeScriptGenerator: TypeScript interfaces
|
||||
- ProtobufGenerator: Protocol Buffer definitions
|
||||
- PrismaGenerator: Prisma schema
|
||||
- GrapheneGenerator: Graphene ObjectType/InputObjectType classes
|
||||
"""
|
||||
|
||||
from typing import Dict, Type
|
||||
|
||||
@@ -6,12 +6,19 @@ Abstract base class for all code generators.
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
class BaseGenerator(ABC):
|
||||
"""Abstract base for code generators."""
|
||||
|
||||
def __init__(self, name_map: Dict[str, str] = None):
|
||||
self.name_map = name_map or {}
|
||||
|
||||
def map_name(self, name: str) -> str:
|
||||
"""Apply name_map to a model name."""
|
||||
return self.name_map.get(name, name)
|
||||
|
||||
@abstractmethod
|
||||
def generate(self, models: Any, output_path: Path) -> None:
|
||||
"""Generate code for the given models to the specified path."""
|
||||
|
||||
@@ -224,7 +224,8 @@ class DjangoGenerator(BaseGenerator):
|
||||
if default is not dc.MISSING and isinstance(default, Enum):
|
||||
extra.append(f"default={enum_name}.{default.name}")
|
||||
return DJANGO_TYPES["enum"].format(
|
||||
enum_name=enum_name, opts=", " + ", ".join(extra) if extra else ""
|
||||
enum_name=enum_name,
|
||||
opts=", " + ", ".join(extra) if extra else ""
|
||||
)
|
||||
|
||||
# Text fields (based on name heuristics)
|
||||
|
||||
@@ -2,8 +2,12 @@
|
||||
Pydantic Generator
|
||||
|
||||
Generates Pydantic BaseModel classes from model definitions.
|
||||
Supports two output modes:
|
||||
- File output: flat models (backwards compatible)
|
||||
- Directory output: CRUD variants (Create/Update/Response) per model
|
||||
"""
|
||||
|
||||
import dataclasses as dc
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, List, get_type_hints
|
||||
@@ -13,6 +17,13 @@ from ..loader.schema import EnumDefinition, FieldDefinition, ModelDefinition
|
||||
from ..types import PYDANTIC_RESOLVERS
|
||||
from .base import BaseGenerator
|
||||
|
||||
# Fields to skip per CRUD variant
|
||||
SKIP_FIELDS = {
|
||||
"Create": {"id", "created_at", "updated_at", "status", "error_message"},
|
||||
"Update": {"id", "created_at", "updated_at"},
|
||||
"Response": set(),
|
||||
}
|
||||
|
||||
|
||||
class PydanticGenerator(BaseGenerator):
|
||||
"""Generates Pydantic model files."""
|
||||
@@ -21,52 +32,187 @@ class PydanticGenerator(BaseGenerator):
|
||||
return ".py"
|
||||
|
||||
def generate(self, models, output_path: Path) -> None:
|
||||
"""Generate Pydantic models to output_path."""
|
||||
"""Generate Pydantic models to output_path.
|
||||
|
||||
If output_path is a directory (or doesn't end in .py), generate
|
||||
multi-file CRUD variants. Otherwise, generate flat models to a
|
||||
single file.
|
||||
"""
|
||||
output_path = Path(output_path)
|
||||
|
||||
if output_path.suffix != ".py":
|
||||
# Directory mode: CRUD variants
|
||||
self._generate_crud_directory(models, output_path)
|
||||
else:
|
||||
# File mode: flat models (backwards compatible)
|
||||
self._generate_flat_file(models, output_path)
|
||||
|
||||
def _generate_flat_file(self, models, output_path: Path) -> None:
|
||||
"""Generate flat models to a single file (original behavior)."""
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Detect input type and generate accordingly
|
||||
if hasattr(models, "get_shared_component"):
|
||||
# ConfigLoader (soleprint config)
|
||||
content = self._generate_from_config(models)
|
||||
elif hasattr(models, "models"):
|
||||
# SchemaLoader
|
||||
content = self._generate_from_definitions(
|
||||
models.models, getattr(models, "enums", [])
|
||||
)
|
||||
elif isinstance(models, tuple):
|
||||
# (models, enums) tuple from extractor
|
||||
content = self._generate_from_definitions(models[0], models[1])
|
||||
elif isinstance(models, list):
|
||||
# List of dataclasses (MPR style)
|
||||
content = self._generate_from_dataclasses(models)
|
||||
else:
|
||||
raise ValueError(f"Unsupported input type: {type(models)}")
|
||||
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_from_definitions(
|
||||
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||
) -> str:
|
||||
"""Generate from ModelDefinition objects (schema/extract mode)."""
|
||||
lines = self._generate_header()
|
||||
def _generate_crud_directory(self, models, output_dir: Path) -> None:
|
||||
"""Generate CRUD variant files in a directory."""
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Generate enums
|
||||
for enum_def in enums:
|
||||
if hasattr(models, "models"):
|
||||
model_defs = models.models
|
||||
enum_defs = getattr(models, "enums", [])
|
||||
elif isinstance(models, tuple):
|
||||
model_defs = models[0]
|
||||
enum_defs = models[1]
|
||||
else:
|
||||
raise ValueError(f"Unsupported input type for CRUD mode: {type(models)}")
|
||||
|
||||
# base.py
|
||||
base_content = "\n".join([
|
||||
'"""Pydantic Base Schema - GENERATED FILE"""',
|
||||
"",
|
||||
"from pydantic import BaseModel, ConfigDict",
|
||||
"",
|
||||
"",
|
||||
"class BaseSchema(BaseModel):",
|
||||
' """Base schema with ORM mode."""',
|
||||
" model_config = ConfigDict(from_attributes=True)",
|
||||
"",
|
||||
])
|
||||
(output_dir / "base.py").write_text(base_content)
|
||||
|
||||
# Per-model files
|
||||
imports = ["from .base import BaseSchema"]
|
||||
all_exports = ['"BaseSchema"']
|
||||
|
||||
for model_def in model_defs:
|
||||
mapped = self.map_name(model_def.name)
|
||||
module_name = mapped.lower()
|
||||
|
||||
lines = [
|
||||
f'"""{model_def.name} Schemas - GENERATED FILE"""',
|
||||
"",
|
||||
"from datetime import datetime",
|
||||
"from enum import Enum",
|
||||
"from typing import Any, Dict, List, Optional",
|
||||
"from uuid import UUID",
|
||||
"",
|
||||
"from .base import BaseSchema",
|
||||
"",
|
||||
]
|
||||
|
||||
# Inline enums used by this model
|
||||
model_enums = self._collect_model_enums(model_def, enum_defs)
|
||||
for enum_def in model_enums:
|
||||
lines.append("")
|
||||
lines.extend(self._generate_enum(enum_def))
|
||||
lines.append("")
|
||||
|
||||
# Generate models
|
||||
# CRUD variants
|
||||
for suffix in ["Create", "Update", "Response"]:
|
||||
lines.append("")
|
||||
lines.extend(self._generate_crud_model(model_def, mapped, suffix))
|
||||
|
||||
lines.append("")
|
||||
content = "\n".join(lines)
|
||||
(output_dir / f"{module_name}.py").write_text(content)
|
||||
|
||||
# Track imports
|
||||
imports.append(
|
||||
f"from .{module_name} import {mapped}Create, {mapped}Update, {mapped}Response"
|
||||
)
|
||||
all_exports.extend([
|
||||
f'"{mapped}Create"', f'"{mapped}Update"', f'"{mapped}Response"'
|
||||
])
|
||||
|
||||
for enum_def in model_enums:
|
||||
imports.append(f"from .{module_name} import {enum_def.name}")
|
||||
all_exports.append(f'"{enum_def.name}"')
|
||||
|
||||
# __init__.py
|
||||
init_content = "\n".join([
|
||||
'"""API Schemas - GENERATED FILE"""',
|
||||
"",
|
||||
*imports,
|
||||
"",
|
||||
f"__all__ = [{', '.join(all_exports)}]",
|
||||
"",
|
||||
])
|
||||
(output_dir / "__init__.py").write_text(init_content)
|
||||
|
||||
def _collect_model_enums(
|
||||
self, model_def: ModelDefinition, enum_defs: List[EnumDefinition]
|
||||
) -> List[EnumDefinition]:
|
||||
"""Find enums referenced by a model's fields."""
|
||||
enum_names = set()
|
||||
for field in model_def.fields:
|
||||
base, _ = unwrap_optional(field.type_hint)
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
enum_names.add(base.__name__)
|
||||
return [e for e in enum_defs if e.name in enum_names]
|
||||
|
||||
def _generate_crud_model(
|
||||
self, model_def: ModelDefinition, mapped_name: str, suffix: str
|
||||
) -> List[str]:
|
||||
"""Generate a single CRUD variant (Create/Update/Response)."""
|
||||
class_name = f"{mapped_name}{suffix}"
|
||||
skip = SKIP_FIELDS.get(suffix, set())
|
||||
|
||||
lines = [
|
||||
f"class {class_name}(BaseSchema):",
|
||||
f' """{class_name} schema."""',
|
||||
]
|
||||
|
||||
has_fields = False
|
||||
for field in model_def.fields:
|
||||
if field.name.startswith("_") or field.name in skip:
|
||||
continue
|
||||
|
||||
has_fields = True
|
||||
py_type = self._resolve_type(field.type_hint, field.optional)
|
||||
|
||||
# Update variant: all fields optional
|
||||
if suffix == "Update" and "Optional" not in py_type:
|
||||
py_type = f"Optional[{py_type}]"
|
||||
|
||||
default = self._format_default(field.default, "Optional" in py_type)
|
||||
lines.append(f" {field.name}: {py_type}{default}")
|
||||
|
||||
if not has_fields:
|
||||
lines.append(" pass")
|
||||
|
||||
return lines
|
||||
|
||||
# =========================================================================
|
||||
# Flat file generation (original behavior)
|
||||
# =========================================================================
|
||||
|
||||
def _generate_from_definitions(
|
||||
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||
) -> str:
|
||||
lines = self._generate_header()
|
||||
for enum_def in enums:
|
||||
lines.extend(self._generate_enum(enum_def))
|
||||
lines.append("")
|
||||
for model_def in models:
|
||||
lines.extend(self._generate_model_from_definition(model_def))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||
"""Generate from Python dataclasses (MPR style)."""
|
||||
lines = self._generate_header()
|
||||
|
||||
# Collect and generate enums first
|
||||
enums_generated = set()
|
||||
for cls in dataclasses:
|
||||
hints = get_type_hints(cls)
|
||||
@@ -77,16 +223,12 @@ class PydanticGenerator(BaseGenerator):
|
||||
lines.extend(self._generate_enum_from_python(base))
|
||||
lines.append("")
|
||||
enums_generated.add(base.__name__)
|
||||
|
||||
# Generate models
|
||||
for cls in dataclasses:
|
||||
lines.extend(self._generate_model_from_dataclass(cls))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_header(self) -> List[str]:
|
||||
"""Generate file header."""
|
||||
return [
|
||||
'"""',
|
||||
"Pydantic Models - GENERATED FILE",
|
||||
@@ -104,27 +246,23 @@ class PydanticGenerator(BaseGenerator):
|
||||
]
|
||||
|
||||
def _generate_enum(self, enum_def: EnumDefinition) -> List[str]:
|
||||
"""Generate Pydantic enum from EnumDefinition."""
|
||||
lines = [f"class {enum_def.name}(str, Enum):"]
|
||||
for name, value in enum_def.values:
|
||||
lines.append(f' {name} = "{value}"')
|
||||
return lines
|
||||
|
||||
def _generate_enum_from_python(self, enum_cls: type) -> List[str]:
|
||||
"""Generate Pydantic enum from Python Enum."""
|
||||
lines = [f"class {enum_cls.__name__}(str, Enum):"]
|
||||
for member in enum_cls:
|
||||
lines.append(f' {member.name} = "{member.value}"')
|
||||
return lines
|
||||
|
||||
def _generate_model_from_definition(self, model_def: ModelDefinition) -> List[str]:
|
||||
"""Generate Pydantic model from ModelDefinition."""
|
||||
docstring = model_def.docstring or model_def.name
|
||||
lines = [
|
||||
f"class {model_def.name}(BaseModel):",
|
||||
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||
]
|
||||
|
||||
if not model_def.fields:
|
||||
lines.append(" pass")
|
||||
else:
|
||||
@@ -132,46 +270,34 @@ class PydanticGenerator(BaseGenerator):
|
||||
py_type = self._resolve_type(field.type_hint, field.optional)
|
||||
default = self._format_default(field.default, field.optional)
|
||||
lines.append(f" {field.name}: {py_type}{default}")
|
||||
|
||||
return lines
|
||||
|
||||
def _generate_model_from_dataclass(self, cls: type) -> List[str]:
|
||||
"""Generate Pydantic model from a dataclass."""
|
||||
import dataclasses as dc
|
||||
|
||||
docstring = cls.__doc__ or cls.__name__
|
||||
lines = [
|
||||
f"class {cls.__name__}(BaseModel):",
|
||||
f' """{docstring.strip().split(chr(10))[0]}"""',
|
||||
]
|
||||
|
||||
hints = get_type_hints(cls)
|
||||
fields = {f.name: f for f in dc.fields(cls)}
|
||||
|
||||
for name, type_hint in hints.items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
|
||||
field = fields.get(name)
|
||||
default_val = dc.MISSING
|
||||
if field:
|
||||
if field.default is not dc.MISSING:
|
||||
default_val = field.default
|
||||
|
||||
py_type = self._resolve_type(type_hint, False)
|
||||
default = self._format_default(default_val, "Optional" in py_type)
|
||||
lines.append(f" {name}: {py_type}{default}")
|
||||
|
||||
return lines
|
||||
|
||||
def _resolve_type(self, type_hint: Any, optional: bool) -> str:
|
||||
"""Resolve Python type to Pydantic type string."""
|
||||
base, is_optional = unwrap_optional(type_hint)
|
||||
optional = optional or is_optional
|
||||
origin = get_origin_name(base)
|
||||
type_name = get_type_name(base)
|
||||
|
||||
# Look up resolver
|
||||
resolver = (
|
||||
PYDANTIC_RESOLVERS.get(origin)
|
||||
or PYDANTIC_RESOLVERS.get(type_name)
|
||||
@@ -182,14 +308,10 @@ class PydanticGenerator(BaseGenerator):
|
||||
else None
|
||||
)
|
||||
)
|
||||
|
||||
result = resolver(base) if resolver else "str"
|
||||
return f"Optional[{result}]" if optional else result
|
||||
|
||||
def _format_default(self, default: Any, optional: bool) -> str:
|
||||
"""Format default value for field."""
|
||||
import dataclasses as dc
|
||||
|
||||
if optional:
|
||||
return " = None"
|
||||
if default is dc.MISSING or default is None:
|
||||
@@ -204,7 +326,6 @@ class PydanticGenerator(BaseGenerator):
|
||||
|
||||
def _generate_from_config(self, config) -> str:
|
||||
"""Generate from ConfigLoader (soleprint config.json mode)."""
|
||||
# Get component names from config
|
||||
config_comp = config.get_shared_component("config")
|
||||
data_comp = config.get_shared_component("data")
|
||||
|
||||
|
||||
@@ -26,11 +26,10 @@ class TypeScriptGenerator(BaseGenerator):
|
||||
|
||||
# Handle different input types
|
||||
if hasattr(models, "models"):
|
||||
# SchemaLoader
|
||||
# SchemaLoader — include api_models if present
|
||||
all_models = models.models + getattr(models, "api_models", [])
|
||||
content = self._generate_from_definitions(
|
||||
models.models,
|
||||
getattr(models, "enums", []),
|
||||
api_models=getattr(models, "api_models", []),
|
||||
all_models, getattr(models, "enums", [])
|
||||
)
|
||||
elif isinstance(models, tuple):
|
||||
# (models, enums) tuple
|
||||
@@ -44,10 +43,7 @@ class TypeScriptGenerator(BaseGenerator):
|
||||
output_path.write_text(content)
|
||||
|
||||
def _generate_from_definitions(
|
||||
self,
|
||||
models: List[ModelDefinition],
|
||||
enums: List[EnumDefinition],
|
||||
api_models: List[ModelDefinition] = None,
|
||||
self, models: List[ModelDefinition], enums: List[EnumDefinition]
|
||||
) -> str:
|
||||
"""Generate from ModelDefinition objects."""
|
||||
lines = self._generate_header()
|
||||
@@ -63,14 +59,6 @@ class TypeScriptGenerator(BaseGenerator):
|
||||
lines.extend(self._generate_interface_from_definition(model_def))
|
||||
lines.append("")
|
||||
|
||||
# Generate API request/response interfaces
|
||||
if api_models:
|
||||
lines.append("// API request/response types")
|
||||
lines.append("")
|
||||
for model_def in api_models:
|
||||
lines.extend(self._generate_interface_from_definition(model_def))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_from_dataclasses(self, dataclasses: List[type]) -> str:
|
||||
|
||||
@@ -5,6 +5,7 @@ Loads Python dataclasses from a schema/ folder.
|
||||
Expects the folder to have an __init__.py that exports:
|
||||
- DATACLASSES: List of dataclass types to generate
|
||||
- ENUMS: List of Enum types to include
|
||||
- API_MODELS: (optional) List of API request/response types
|
||||
- GRPC_MESSAGES: (optional) List of gRPC message types
|
||||
- GRPC_SERVICE: (optional) gRPC service definition dict
|
||||
"""
|
||||
@@ -88,7 +89,7 @@ class SchemaLoader:
|
||||
for cls in dataclasses:
|
||||
self.models.append(self._parse_dataclass(cls))
|
||||
|
||||
# Extract API_MODELS (TypeScript-only request/response types)
|
||||
# Extract API_MODELS (request/response types)
|
||||
if load_all or "api" in include:
|
||||
api_models = getattr(module, "API_MODELS", [])
|
||||
for cls in api_models:
|
||||
|
||||
@@ -4,7 +4,7 @@ MPR Schema Definitions - Source of Truth
|
||||
This package defines the core data models as Python dataclasses.
|
||||
These definitions are used to generate:
|
||||
- Django ORM models (mpr/media_assets/models.py)
|
||||
- Pydantic schemas (api/schemas/*.py)
|
||||
- Pydantic schemas (api/schema/*.py)
|
||||
- TypeScript types (ui/timeline/src/types.ts)
|
||||
- Protobuf definitions (grpc/protos/worker.proto)
|
||||
|
||||
|
||||
@@ -1,718 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
MPR Model Generator
|
||||
|
||||
Generates framework-specific models from schema/models/:
|
||||
- Django ORM models -> mpr/media_assets/models.py
|
||||
- Pydantic schemas -> api/schemas/*.py
|
||||
- TypeScript types -> ui/timeline/src/types.ts
|
||||
- Protobuf -> grpc/protos/worker.proto
|
||||
|
||||
Usage:
|
||||
python schema/generate.py [--django] [--pydantic] [--typescript] [--proto] [--all]
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import dataclasses as dc
|
||||
import subprocess
|
||||
import sys
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, Union, get_args, get_origin, get_type_hints
|
||||
|
||||
PROJECT_ROOT = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(PROJECT_ROOT))
|
||||
|
||||
from schema.models import API_MODELS, DATACLASSES, ENUMS, GRPC_MESSAGES, GRPC_SERVICE
|
||||
|
||||
# =============================================================================
|
||||
# Type Dispatch Tables
|
||||
# =============================================================================
|
||||
|
||||
DJANGO_TYPES: dict[Any, str] = {
|
||||
str: "models.CharField(max_length={max_length}{opts})",
|
||||
int: "models.IntegerField({opts})",
|
||||
float: "models.FloatField({opts})",
|
||||
bool: "models.BooleanField(default={default})",
|
||||
"UUID": "models.UUIDField({opts})",
|
||||
"datetime": "models.DateTimeField({opts})",
|
||||
"dict": "models.JSONField(default=dict, blank=True)",
|
||||
"list": "models.JSONField(default=list, blank=True)",
|
||||
"text": "models.TextField(blank=True, default='')",
|
||||
"bigint": "models.BigIntegerField({opts})",
|
||||
"enum": "models.CharField(max_length=20, choices={enum_name}.choices{opts})",
|
||||
}
|
||||
|
||||
DJANGO_SPECIAL: dict[str, str] = {
|
||||
"id": "models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)",
|
||||
"created_at": "models.DateTimeField(auto_now_add=True)",
|
||||
"updated_at": "models.DateTimeField(auto_now=True)",
|
||||
}
|
||||
|
||||
PYDANTIC_RESOLVERS: dict[Any, Callable[[Any], str]] = {
|
||||
str: lambda _: "str",
|
||||
int: lambda _: "int",
|
||||
float: lambda _: "float",
|
||||
bool: lambda _: "bool",
|
||||
"UUID": lambda _: "UUID",
|
||||
"datetime": lambda _: "datetime",
|
||||
"dict": lambda _: "Dict[str, Any]",
|
||||
"list": lambda base: f"List[{get_list_inner(base)}]",
|
||||
"enum": lambda base: base.__name__,
|
||||
}
|
||||
|
||||
TS_RESOLVERS: dict[Any, Callable[[Any], str]] = {
|
||||
str: lambda _: "string",
|
||||
int: lambda _: "number",
|
||||
float: lambda _: "number",
|
||||
bool: lambda _: "boolean",
|
||||
"UUID": lambda _: "string",
|
||||
"datetime": lambda _: "string",
|
||||
"dict": lambda _: "Record<string, unknown>",
|
||||
"list": lambda base: (
|
||||
f"{TS_RESOLVERS.get(get_args(base)[0], lambda _: 'string')(None)}[]"
|
||||
if get_args(base)
|
||||
else "string[]"
|
||||
),
|
||||
"enum": lambda base: base.__name__,
|
||||
}
|
||||
|
||||
PROTO_RESOLVERS: dict[Any, Callable[[Any], str]] = {
|
||||
str: lambda _: "string",
|
||||
int: lambda _: "int32",
|
||||
float: lambda _: "float",
|
||||
bool: lambda _: "bool",
|
||||
"list": lambda base: (
|
||||
f"repeated {PROTO_RESOLVERS.get(get_args(base)[0], lambda _: 'string')(None)}"
|
||||
if get_args(base)
|
||||
else "repeated string"
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Type Helpers
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def unwrap_optional(type_hint: Any) -> tuple[Any, bool]:
|
||||
"""Unwrap Optional[T] -> (T, True) or (T, False) if not optional."""
|
||||
origin = get_origin(type_hint)
|
||||
if origin is Union:
|
||||
args = [a for a in get_args(type_hint) if a is not type(None)]
|
||||
return (args[0] if args else str, True)
|
||||
return (type_hint, False)
|
||||
|
||||
|
||||
def get_origin_name(type_hint: Any) -> str | None:
|
||||
"""Get origin type name: 'dict', 'list', or None."""
|
||||
origin = get_origin(type_hint)
|
||||
if origin is dict:
|
||||
return "dict"
|
||||
if origin is list:
|
||||
return "list"
|
||||
return None
|
||||
|
||||
|
||||
def get_type_name(type_hint: Any) -> str | None:
|
||||
"""Get type name for special types like UUID, datetime."""
|
||||
if hasattr(type_hint, "__name__"):
|
||||
return type_hint.__name__
|
||||
return None
|
||||
|
||||
|
||||
def get_list_inner(type_hint: Any) -> str:
|
||||
"""Get inner type of List[T]."""
|
||||
args = get_args(type_hint)
|
||||
if args and args[0] in (str, int, float, bool):
|
||||
return {str: "str", int: "int", float: "float", bool: "bool"}[args[0]]
|
||||
return "str"
|
||||
|
||||
|
||||
def get_field_default(field: dc.Field) -> Any:
|
||||
"""Get default value from dataclass field."""
|
||||
if field.default is not dc.MISSING:
|
||||
return field.default
|
||||
return dc.MISSING
|
||||
|
||||
|
||||
def format_opts(optional: bool, extra: list[str] | None = None) -> str:
|
||||
"""Format field options string."""
|
||||
parts = []
|
||||
if optional:
|
||||
parts.append("null=True, blank=True")
|
||||
if extra:
|
||||
parts.extend(extra)
|
||||
return ", ".join(parts)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Django Generator
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def resolve_django_type(name: str, type_hint: Any, default: Any) -> str:
|
||||
"""Resolve Python type to Django field."""
|
||||
# Special fields
|
||||
if name in DJANGO_SPECIAL:
|
||||
return DJANGO_SPECIAL[name]
|
||||
|
||||
base, optional = unwrap_optional(type_hint)
|
||||
origin = get_origin_name(base)
|
||||
type_name = get_type_name(base)
|
||||
opts = format_opts(optional)
|
||||
|
||||
# Container types
|
||||
if origin == "dict":
|
||||
return DJANGO_TYPES["dict"]
|
||||
if origin == "list":
|
||||
return DJANGO_TYPES["list"]
|
||||
|
||||
# UUID / datetime
|
||||
if type_name == "UUID":
|
||||
return DJANGO_TYPES["UUID"].format(opts=opts)
|
||||
if type_name == "datetime":
|
||||
return DJANGO_TYPES["datetime"].format(opts=opts)
|
||||
|
||||
# Enum
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
enum_name = base.__name__
|
||||
extra = []
|
||||
if optional:
|
||||
extra.append("null=True, blank=True")
|
||||
if default is not dc.MISSING and isinstance(default, Enum):
|
||||
extra.append(f"default={enum_name}.{default.name}")
|
||||
return DJANGO_TYPES["enum"].format(
|
||||
enum_name=enum_name, opts=", " + ", ".join(extra) if extra else ""
|
||||
)
|
||||
|
||||
# Text fields
|
||||
if base is str and any(x in name for x in ("message", "comments", "description")):
|
||||
return DJANGO_TYPES["text"]
|
||||
|
||||
# BigInt fields
|
||||
if base is int and name in ("file_size", "bitrate"):
|
||||
return DJANGO_TYPES["bigint"].format(opts=opts)
|
||||
|
||||
# Basic types
|
||||
if base is str:
|
||||
max_length = 1000 if "path" in name else 500 if "filename" in name else 255
|
||||
return DJANGO_TYPES[str].format(
|
||||
max_length=max_length, opts=", " + opts if opts else ""
|
||||
)
|
||||
|
||||
if base is int:
|
||||
extra = [opts] if opts else []
|
||||
if default is not dc.MISSING and not callable(default):
|
||||
extra.append(f"default={default}")
|
||||
return DJANGO_TYPES[int].format(opts=", ".join(extra))
|
||||
|
||||
if base is float:
|
||||
extra = [opts] if opts else []
|
||||
if default is not dc.MISSING and not callable(default):
|
||||
extra.append(f"default={default}")
|
||||
return DJANGO_TYPES[float].format(opts=", ".join(extra))
|
||||
|
||||
if base is bool:
|
||||
default_val = default if default is not dc.MISSING else False
|
||||
return DJANGO_TYPES[bool].format(default=default_val)
|
||||
|
||||
# Fallback
|
||||
return DJANGO_TYPES[str].format(max_length=255, opts=", " + opts if opts else "")
|
||||
|
||||
|
||||
def generate_django_enum(enum_cls: type) -> list[str]:
|
||||
"""Generate Django TextChoices enum."""
|
||||
lines = [f"class {enum_cls.__name__}(models.TextChoices):"]
|
||||
for member in enum_cls:
|
||||
label = member.name.replace("_", " ").title()
|
||||
lines.append(f' {member.name} = "{member.value}", "{label}"')
|
||||
return lines
|
||||
|
||||
|
||||
def generate_django_model(cls: type) -> list[str]:
|
||||
"""Generate Django model lines from dataclass."""
|
||||
lines = [
|
||||
f"class {cls.__name__}(models.Model):",
|
||||
f' """{(cls.__doc__ or cls.__name__).strip().split(chr(10))[0]}"""',
|
||||
"",
|
||||
]
|
||||
|
||||
hints = get_type_hints(cls)
|
||||
fields = {f.name: f for f in dc.fields(cls)}
|
||||
|
||||
# Fields
|
||||
for name, type_hint in hints.items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
field = fields.get(name)
|
||||
default = get_field_default(field) if field else dc.MISSING
|
||||
django_field = resolve_django_type(name, type_hint, default)
|
||||
lines.append(f" {name} = {django_field}")
|
||||
|
||||
# Meta and __str__
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
" class Meta:",
|
||||
' ordering = ["-created_at"]',
|
||||
"",
|
||||
" def __str__(self):",
|
||||
]
|
||||
)
|
||||
|
||||
if "filename" in hints:
|
||||
lines.append(" return self.filename")
|
||||
elif "name" in hints:
|
||||
lines.append(" return self.name")
|
||||
else:
|
||||
lines.append(" return str(self.id)")
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
def generate_django() -> str:
|
||||
"""Generate complete Django models file."""
|
||||
header = [
|
||||
'"""',
|
||||
"Django ORM Models - GENERATED FILE",
|
||||
"",
|
||||
"Do not edit directly. Modify schema/models/*.py and run:",
|
||||
" python schema/generate.py --django",
|
||||
'"""',
|
||||
"",
|
||||
"import uuid",
|
||||
"from django.db import models",
|
||||
"",
|
||||
]
|
||||
|
||||
# Generate enums first
|
||||
body = []
|
||||
for enum_cls in ENUMS:
|
||||
body.extend(generate_django_enum(enum_cls))
|
||||
body.extend(["", ""])
|
||||
|
||||
# Generate models
|
||||
for cls in DATACLASSES:
|
||||
body.extend(generate_django_model(cls))
|
||||
body.extend(["", ""])
|
||||
|
||||
return "\n".join(header + body)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Pydantic Generator
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def resolve_pydantic_type(type_hint: Any) -> str:
|
||||
"""Resolve Python type to Pydantic type string."""
|
||||
base, optional = unwrap_optional(type_hint)
|
||||
origin = get_origin_name(base)
|
||||
type_name = get_type_name(base)
|
||||
|
||||
# Look up resolver by origin, type name, base type, or enum
|
||||
resolver = (
|
||||
PYDANTIC_RESOLVERS.get(origin)
|
||||
or PYDANTIC_RESOLVERS.get(type_name)
|
||||
or PYDANTIC_RESOLVERS.get(base)
|
||||
or (
|
||||
PYDANTIC_RESOLVERS["enum"]
|
||||
if isinstance(base, type) and issubclass(base, Enum)
|
||||
else None
|
||||
)
|
||||
)
|
||||
|
||||
result = resolver(base) if resolver else "str"
|
||||
return f"Optional[{result}]" if optional else result
|
||||
|
||||
|
||||
def generate_pydantic_schema(cls: type, suffix: str) -> list[str]:
|
||||
"""Generate Pydantic schema lines from dataclass."""
|
||||
name = cls.__name__.replace("Transcode", "").replace("Media", "")
|
||||
class_name = f"{name}{suffix}"
|
||||
|
||||
skip_fields = {
|
||||
"Create": {"id", "created_at", "updated_at", "status", "error_message"},
|
||||
"Update": {"id", "created_at", "updated_at"},
|
||||
"Response": set(),
|
||||
}
|
||||
|
||||
lines = [
|
||||
f"class {class_name}(BaseSchema):",
|
||||
f' """{class_name} schema."""',
|
||||
]
|
||||
|
||||
hints = get_type_hints(cls)
|
||||
fields = {f.name: f for f in dc.fields(cls)}
|
||||
|
||||
for name, type_hint in hints.items():
|
||||
if name.startswith("_") or name in skip_fields.get(suffix, set()):
|
||||
continue
|
||||
|
||||
py_type = resolve_pydantic_type(type_hint)
|
||||
|
||||
# Update schemas: all fields optional
|
||||
if suffix == "Update" and "Optional" not in py_type:
|
||||
py_type = f"Optional[{py_type}]"
|
||||
|
||||
field = fields.get(name)
|
||||
default = get_field_default(field) if field else dc.MISSING
|
||||
|
||||
if "Optional" in py_type:
|
||||
lines.append(f" {name}: {py_type} = None")
|
||||
elif default is not dc.MISSING and not callable(default):
|
||||
if isinstance(default, str):
|
||||
lines.append(f' {name}: {py_type} = "{default}"')
|
||||
elif isinstance(default, Enum):
|
||||
lines.append(
|
||||
f" {name}: {py_type} = {default.__class__.__name__}.{default.name}"
|
||||
)
|
||||
else:
|
||||
lines.append(f" {name}: {py_type} = {default!r}")
|
||||
else:
|
||||
lines.append(f" {name}: {py_type}")
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
def generate_pydantic() -> dict[str, str]:
|
||||
"""Generate all Pydantic schema files."""
|
||||
files = {}
|
||||
|
||||
# base.py
|
||||
files["base.py"] = "\n".join(
|
||||
[
|
||||
'"""Pydantic Base Schema - GENERATED FILE"""',
|
||||
"",
|
||||
"from pydantic import BaseModel, ConfigDict",
|
||||
"",
|
||||
"",
|
||||
"class BaseSchema(BaseModel):",
|
||||
' """Base schema with ORM mode."""',
|
||||
" model_config = ConfigDict(from_attributes=True)",
|
||||
"",
|
||||
]
|
||||
)
|
||||
|
||||
# Schema files per model
|
||||
for cls in DATACLASSES:
|
||||
module_name = cls.__name__.replace("Transcode", "").replace("Media", "").lower()
|
||||
|
||||
lines = [
|
||||
f'"""{cls.__name__} Schemas - GENERATED FILE"""',
|
||||
"",
|
||||
"from datetime import datetime",
|
||||
"from enum import Enum",
|
||||
"from typing import Any, Dict, List, Optional",
|
||||
"from uuid import UUID",
|
||||
"",
|
||||
"from .base import BaseSchema",
|
||||
"",
|
||||
]
|
||||
|
||||
# Add enum if present
|
||||
hints = get_type_hints(cls)
|
||||
for type_hint in hints.values():
|
||||
base, _ = unwrap_optional(type_hint)
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
f"class {base.__name__}(str, Enum):",
|
||||
]
|
||||
)
|
||||
for m in base:
|
||||
lines.append(f' {m.name} = "{m.value}"')
|
||||
lines.append("")
|
||||
break
|
||||
|
||||
# Schemas
|
||||
for suffix in ["Create", "Update", "Response"]:
|
||||
lines.append("")
|
||||
lines.extend(generate_pydantic_schema(cls, suffix))
|
||||
|
||||
lines.append("")
|
||||
files[f"{module_name}.py"] = "\n".join(lines)
|
||||
|
||||
# __init__.py
|
||||
imports = ["from .base import BaseSchema"]
|
||||
all_exports = ['"BaseSchema"']
|
||||
|
||||
for cls in DATACLASSES:
|
||||
name = cls.__name__.replace("Transcode", "").replace("Media", "")
|
||||
module = name.lower()
|
||||
imports.append(
|
||||
f"from .{module} import {name}Create, {name}Update, {name}Response"
|
||||
)
|
||||
all_exports.extend([f'"{name}Create"', f'"{name}Update"', f'"{name}Response"'])
|
||||
|
||||
# Add enum export
|
||||
hints = get_type_hints(cls)
|
||||
for type_hint in hints.values():
|
||||
base, _ = unwrap_optional(type_hint)
|
||||
if isinstance(base, type) and issubclass(base, Enum):
|
||||
imports.append(f"from .{module} import {base.__name__}")
|
||||
all_exports.append(f'"{base.__name__}"')
|
||||
break
|
||||
|
||||
files["__init__.py"] = "\n".join(
|
||||
[
|
||||
'"""API Schemas - GENERATED FILE"""',
|
||||
"",
|
||||
*imports,
|
||||
"",
|
||||
f"__all__ = [{', '.join(all_exports)}]",
|
||||
"",
|
||||
]
|
||||
)
|
||||
|
||||
return files
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# TypeScript Generator
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def resolve_ts_type(type_hint: Any) -> str:
|
||||
"""Resolve Python type to TypeScript type string."""
|
||||
base, optional = unwrap_optional(type_hint)
|
||||
origin = get_origin_name(base)
|
||||
type_name = get_type_name(base)
|
||||
|
||||
# Look up resolver by origin, type name, base type, or enum
|
||||
resolver = (
|
||||
TS_RESOLVERS.get(origin)
|
||||
or TS_RESOLVERS.get(type_name)
|
||||
or TS_RESOLVERS.get(base)
|
||||
or (
|
||||
TS_RESOLVERS["enum"]
|
||||
if isinstance(base, type) and issubclass(base, Enum)
|
||||
else None
|
||||
)
|
||||
)
|
||||
|
||||
result = resolver(base) if resolver else "string"
|
||||
return f"{result} | null" if optional else result
|
||||
|
||||
|
||||
def generate_ts_interface(cls: type) -> list[str]:
|
||||
"""Generate TypeScript interface lines from dataclass."""
|
||||
lines = [f"export interface {cls.__name__} {{"]
|
||||
|
||||
for name, type_hint in get_type_hints(cls).items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
ts_type = resolve_ts_type(type_hint)
|
||||
lines.append(f" {name}: {ts_type};")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
|
||||
def generate_typescript() -> str:
|
||||
"""Generate complete TypeScript file."""
|
||||
lines = [
|
||||
"/**",
|
||||
" * MPR TypeScript Types - GENERATED FILE",
|
||||
" *",
|
||||
" * Do not edit directly. Modify schema/models/*.py and run:",
|
||||
" * python schema/generate.py --typescript",
|
||||
" */",
|
||||
"",
|
||||
]
|
||||
|
||||
# Enums as union types
|
||||
for enum in ENUMS:
|
||||
values = " | ".join(f'"{m.value}"' for m in enum)
|
||||
lines.append(f"export type {enum.__name__} = {values};")
|
||||
lines.append("")
|
||||
|
||||
# Interfaces - domain models
|
||||
for cls in DATACLASSES:
|
||||
lines.extend(generate_ts_interface(cls))
|
||||
lines.append("")
|
||||
|
||||
# Interfaces - API request/response models
|
||||
lines.append("// API Request/Response Types")
|
||||
lines.append("")
|
||||
for cls in API_MODELS:
|
||||
lines.extend(generate_ts_interface(cls))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Proto Generator
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def resolve_proto_type(type_hint: Any) -> tuple[str, bool]:
|
||||
"""Resolve Python type to proto type. Returns (type, is_optional)."""
|
||||
base, optional = unwrap_optional(type_hint)
|
||||
origin = get_origin_name(base)
|
||||
|
||||
# Look up resolver by origin or base type
|
||||
resolver = PROTO_RESOLVERS.get(origin) or PROTO_RESOLVERS.get(base)
|
||||
|
||||
if resolver:
|
||||
result = resolver(base)
|
||||
is_repeated = result.startswith("repeated")
|
||||
return result, optional and not is_repeated
|
||||
|
||||
return "string", optional
|
||||
|
||||
|
||||
def generate_proto_message(cls: type) -> list[str]:
|
||||
"""Generate proto message lines from dataclass."""
|
||||
lines = [f"message {cls.__name__} {{"]
|
||||
|
||||
hints = get_type_hints(cls)
|
||||
if not hints:
|
||||
lines.append(" // Empty")
|
||||
else:
|
||||
for i, (name, type_hint) in enumerate(hints.items(), 1):
|
||||
proto_type, optional = resolve_proto_type(type_hint)
|
||||
prefix = (
|
||||
"optional "
|
||||
if optional and not proto_type.startswith("repeated")
|
||||
else ""
|
||||
)
|
||||
lines.append(f" {prefix}{proto_type} {name} = {i};")
|
||||
|
||||
lines.append("}")
|
||||
return lines
|
||||
|
||||
|
||||
def generate_proto() -> str:
|
||||
"""Generate complete proto file."""
|
||||
lines = [
|
||||
"// MPR Worker Service - GENERATED FILE",
|
||||
"//",
|
||||
"// Do not edit directly. Modify schema/models/grpc.py and run:",
|
||||
"// python schema/generate.py --proto",
|
||||
"",
|
||||
'syntax = "proto3";',
|
||||
"",
|
||||
f"package {GRPC_SERVICE['package']};",
|
||||
"",
|
||||
f"service {GRPC_SERVICE['name']} {{",
|
||||
]
|
||||
|
||||
# Methods
|
||||
for m in GRPC_SERVICE["methods"]:
|
||||
req = m["request"].__name__
|
||||
resp = m["response"].__name__
|
||||
returns = f"stream {resp}" if m["stream_response"] else resp
|
||||
lines.append(f" rpc {m['name']}({req}) returns ({returns});")
|
||||
|
||||
lines.extend(["}", ""])
|
||||
|
||||
# Messages
|
||||
for cls in GRPC_MESSAGES:
|
||||
lines.extend(generate_proto_message(cls))
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Writers
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def write_file(path: Path, content: str) -> None:
|
||||
"""Write content to file, creating directories as needed."""
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text(content)
|
||||
print(f" {path}")
|
||||
|
||||
|
||||
def write_django(output_dir: Path) -> None:
|
||||
"""Write Django models."""
|
||||
write_file(output_dir / "mpr" / "media_assets" / "models.py", generate_django())
|
||||
|
||||
|
||||
def write_pydantic(output_dir: Path) -> None:
|
||||
"""Write Pydantic schemas."""
|
||||
schemas_dir = output_dir / "api" / "schemas"
|
||||
for filename, content in generate_pydantic().items():
|
||||
write_file(schemas_dir / filename, content)
|
||||
|
||||
|
||||
def write_typescript(output_dir: Path) -> None:
|
||||
"""Write TypeScript types."""
|
||||
write_file(
|
||||
output_dir / "ui" / "timeline" / "src" / "types.ts", generate_typescript()
|
||||
)
|
||||
|
||||
|
||||
def write_proto(output_dir: Path) -> None:
|
||||
"""Write proto and generate stubs."""
|
||||
proto_dir = output_dir / "grpc" / "protos"
|
||||
proto_path = proto_dir / "worker.proto"
|
||||
write_file(proto_path, generate_proto())
|
||||
|
||||
# Generate Python stubs
|
||||
grpc_dir = output_dir / "grpc"
|
||||
result = subprocess.run(
|
||||
[
|
||||
sys.executable,
|
||||
"-m",
|
||||
"grpc_tools.protoc",
|
||||
f"-I{proto_dir}",
|
||||
f"--python_out={grpc_dir}",
|
||||
f"--grpc_python_out={grpc_dir}",
|
||||
str(proto_path),
|
||||
],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
if result.returncode == 0:
|
||||
print(f" {grpc_dir}/worker_pb2.py")
|
||||
print(f" {grpc_dir}/worker_pb2_grpc.py")
|
||||
else:
|
||||
print(" Warning: grpc_tools failed - pip install grpcio-tools")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Main
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description="Generate from schema")
|
||||
parser.add_argument("--django", action="store_true")
|
||||
parser.add_argument("--pydantic", action="store_true")
|
||||
parser.add_argument("--typescript", action="store_true")
|
||||
parser.add_argument("--proto", action="store_true")
|
||||
parser.add_argument("--all", action="store_true")
|
||||
parser.add_argument("--output", type=Path, default=PROJECT_ROOT)
|
||||
args = parser.parse_args()
|
||||
|
||||
if not any([args.django, args.pydantic, args.typescript, args.proto, args.all]):
|
||||
args.all = True
|
||||
|
||||
print(f"Generating to {args.output}\n")
|
||||
|
||||
targets: list[tuple[bool, str, Callable]] = [
|
||||
(args.django or args.all, "Django", write_django),
|
||||
(args.pydantic or args.all, "Pydantic", write_pydantic),
|
||||
(args.typescript or args.all, "TypeScript", write_typescript),
|
||||
(args.proto or args.all, "Proto", write_proto),
|
||||
]
|
||||
|
||||
for enabled, name, writer in targets:
|
||||
if enabled:
|
||||
print(f"{name}:")
|
||||
writer(args.output)
|
||||
print()
|
||||
|
||||
print("Done!")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
35
schema/modelgen.json
Normal file
35
schema/modelgen.json
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"schema": "schema/models",
|
||||
"targets": [
|
||||
{
|
||||
"target": "django",
|
||||
"output": "mpr/media_assets/models.py",
|
||||
"include": ["dataclasses", "enums"]
|
||||
},
|
||||
{
|
||||
"target": "pydantic",
|
||||
"output": "api/schema/",
|
||||
"include": ["dataclasses", "enums"],
|
||||
"name_map": {
|
||||
"TranscodeJob": "Job",
|
||||
"MediaAsset": "Asset",
|
||||
"TranscodePreset": "Preset"
|
||||
}
|
||||
},
|
||||
{
|
||||
"target": "graphene",
|
||||
"output": "api/schema/graphql.py",
|
||||
"include": ["dataclasses", "enums", "api"]
|
||||
},
|
||||
{
|
||||
"target": "typescript",
|
||||
"output": "ui/timeline/src/types.ts",
|
||||
"include": ["dataclasses", "enums", "api"]
|
||||
},
|
||||
{
|
||||
"target": "protobuf",
|
||||
"output": "rpc/protos/worker.proto",
|
||||
"include": ["grpc"]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -73,8 +73,6 @@ export interface TranscodeJob {
|
||||
completed_at: string | null;
|
||||
}
|
||||
|
||||
// API request/response types
|
||||
|
||||
export interface CreateJobRequest {
|
||||
source_asset_id: string;
|
||||
preset_id: string | null;
|
||||
|
||||
Reference in New Issue
Block a user