move to postgresql

This commit is contained in:
2026-03-26 10:27:07 -03:00
parent c9ba9e4f5f
commit a85722f96a
20 changed files with 800 additions and 234 deletions

View File

@@ -17,3 +17,4 @@ from .presets import (
get_preset,
list_presets,
)
from .connection import get_session, create_tables

View File

@@ -1,48 +1,58 @@
"""Database operations for MediaAsset."""
"""Database operations for MediaAsset — SQLModel."""
from __future__ import annotations
from typing import Optional
from uuid import UUID
from sqlmodel import select
def list_assets(status: Optional[str] = None, search: Optional[str] = None):
from admin.mpr.media_assets.models import MediaAsset
qs = MediaAsset.objects.all()
if status:
qs = qs.filter(status=status)
if search:
qs = qs.filter(filename__icontains=search)
return list(qs)
from .connection import get_session
from .models import MediaAsset
def get_asset(id: UUID):
from admin.mpr.media_assets.models import MediaAsset
def list_assets(status: Optional[str] = None, search: Optional[str] = None) -> list[MediaAsset]:
with get_session() as session:
stmt = select(MediaAsset)
if status:
stmt = stmt.where(MediaAsset.status == status)
if search:
stmt = stmt.where(MediaAsset.filename.ilike(f"%{search}%"))
return list(session.exec(stmt).all())
return MediaAsset.objects.get(id=id)
def get_asset(id: UUID) -> MediaAsset | None:
with get_session() as session:
return session.get(MediaAsset, id)
def get_asset_filenames() -> set[str]:
from admin.mpr.media_assets.models import MediaAsset
return set(MediaAsset.objects.values_list("filename", flat=True))
with get_session() as session:
return set(session.exec(select(MediaAsset.filename)).all())
def create_asset(*, filename: str, file_path: str, file_size: int):
from admin.mpr.media_assets.models import MediaAsset
return MediaAsset.objects.create(
filename=filename,
file_path=file_path,
file_size=file_size,
)
def create_asset(*, filename: str, file_path: str, file_size: int) -> MediaAsset:
asset = MediaAsset(filename=filename, file_path=file_path, file_size=file_size)
with get_session() as session:
session.add(asset)
session.commit()
session.refresh(asset)
return asset
def update_asset(asset, **fields):
for key, value in fields.items():
setattr(asset, key, value)
asset.save(update_fields=list(fields.keys()))
return asset
def update_asset(id: UUID, **fields) -> None:
with get_session() as session:
asset = session.get(MediaAsset, id)
if not asset:
return
for k, v in fields.items():
setattr(asset, k, v)
session.commit()
def delete_asset(asset):
asset.delete()
def delete_asset(id: UUID) -> None:
with get_session() as session:
asset = session.get(MediaAsset, id)
if asset:
session.delete(asset)
session.commit()

33
core/db/connection.py Normal file
View File

@@ -0,0 +1,33 @@
"""
Database engine and session — SQLModel/SQLAlchemy, no Django.
Reads DATABASE_URL from the environment.
"""
from __future__ import annotations
import os
from sqlalchemy import create_engine
from sqlmodel import Session
DATABASE_URL = os.environ.get("DATABASE_URL", "postgresql://mpr:mpr@localhost:5432/mpr")
_engine = None
def get_engine():
global _engine
if _engine is None:
_engine = create_engine(DATABASE_URL, pool_size=5, max_overflow=10)
return _engine
def get_session() -> Session:
return Session(get_engine())
def create_tables():
"""Create all SQLModel tables."""
from .models import SQLModel # noqa — registers all models
SQLModel.metadata.create_all(get_engine())

View File

@@ -1,175 +1,213 @@
"""Database operations for DetectJob and StageCheckpoint."""
"""Database operations for detection pipeline — SQLModel."""
from __future__ import annotations
from typing import Optional
from uuid import UUID
from sqlmodel import select
from .connection import get_session
from .models import (
DetectJob, StageCheckpoint, KnownBrand, SourceBrandSighting,
)
# ---------------------------------------------------------------------------
# DetectJob
# ---------------------------------------------------------------------------
def create_detect_job(**fields):
from admin.mpr.media_assets.models import DetectJob
return DetectJob.objects.create(**fields)
def create_detect_job(**fields) -> DetectJob:
job = DetectJob(**fields)
with get_session() as session:
session.add(job)
session.commit()
session.refresh(job)
return job
def get_detect_job(id: UUID):
from admin.mpr.media_assets.models import DetectJob
return DetectJob.objects.get(id=id)
def get_detect_job(id: UUID) -> DetectJob | None:
with get_session() as session:
return session.get(DetectJob, id)
def update_detect_job(job_id: UUID, **fields):
from admin.mpr.media_assets.models import DetectJob
DetectJob.objects.filter(id=job_id).update(**fields)
def update_detect_job(job_id: UUID, **fields) -> None:
with get_session() as session:
job = session.get(DetectJob, job_id)
if not job:
return
for k, v in fields.items():
setattr(job, k, v)
session.commit()
def list_detect_jobs(
parent_job_id: Optional[UUID] = None,
status: Optional[str] = None,
):
from admin.mpr.media_assets.models import DetectJob
qs = DetectJob.objects.all()
if parent_job_id:
qs = qs.filter(parent_job_id=parent_job_id)
if status:
qs = qs.filter(status=status)
return list(qs)
) -> list[DetectJob]:
with get_session() as session:
stmt = select(DetectJob)
if parent_job_id:
stmt = stmt.where(DetectJob.parent_job_id == parent_job_id)
if status:
stmt = stmt.where(DetectJob.status == status)
return list(session.exec(stmt).all())
# ---------------------------------------------------------------------------
# StageCheckpoint
# ---------------------------------------------------------------------------
def save_stage_checkpoint(**fields):
from admin.mpr.media_assets.models import StageCheckpoint
return StageCheckpoint.objects.create(**fields)
def save_stage_checkpoint(**fields) -> StageCheckpoint:
with get_session() as session:
# Upsert: replace if same job_id + stage
job_id = fields.get("job_id")
stage = fields.get("stage")
if job_id and stage:
stmt = select(StageCheckpoint).where(
StageCheckpoint.job_id == job_id,
StageCheckpoint.stage == stage,
)
existing = session.exec(stmt).first()
if existing:
for k, v in fields.items():
setattr(existing, k, v)
session.commit()
session.refresh(existing)
return existing
checkpoint = StageCheckpoint(**fields)
session.add(checkpoint)
session.commit()
session.refresh(checkpoint)
return checkpoint
def get_stage_checkpoint(job_id: UUID, stage: str):
from admin.mpr.media_assets.models import StageCheckpoint
return StageCheckpoint.objects.get(job_id=job_id, stage=stage)
def get_stage_checkpoint(job_id: UUID, stage: str) -> StageCheckpoint | None:
with get_session() as session:
stmt = select(StageCheckpoint).where(
StageCheckpoint.job_id == job_id,
StageCheckpoint.stage == stage,
)
return session.exec(stmt).first()
def list_stage_checkpoints(job_id: UUID) -> list[str]:
from admin.mpr.media_assets.models import StageCheckpoint
stages = (
StageCheckpoint.objects
.filter(job_id=job_id)
.order_by("stage_index")
.values_list("stage", flat=True)
)
return list(stages)
with get_session() as session:
stmt = (
select(StageCheckpoint.stage)
.where(StageCheckpoint.job_id == job_id)
.order_by(StageCheckpoint.stage_index)
)
return list(session.exec(stmt).all())
def delete_stage_checkpoints(job_id: UUID):
from admin.mpr.media_assets.models import StageCheckpoint
StageCheckpoint.objects.filter(job_id=job_id).delete()
def delete_stage_checkpoints(job_id: UUID) -> None:
with get_session() as session:
stmt = select(StageCheckpoint).where(StageCheckpoint.job_id == job_id)
for cp in session.exec(stmt).all():
session.delete(cp)
session.commit()
# ---------------------------------------------------------------------------
# KnownBrand
# ---------------------------------------------------------------------------
def get_or_create_brand(canonical_name: str, aliases: list[str] | None = None,
source: str = "ocr") -> tuple:
"""Get existing brand or create new one. Returns (brand, created)."""
from admin.mpr.media_assets.models import KnownBrand
import uuid
def get_or_create_brand(canonical_name: str, aliases: Optional[list[str]] = None,
source: str = "ocr") -> tuple[KnownBrand, bool]:
normalized = canonical_name.strip()
brand = KnownBrand.objects.filter(canonical_name__iexact=normalized).first()
if brand:
return brand, False
with get_session() as session:
stmt = select(KnownBrand).where(KnownBrand.canonical_name.ilike(normalized))
brand = session.exec(stmt).first()
if brand:
return brand, False
# Check aliases of existing brands
for existing in KnownBrand.objects.all():
existing_aliases = [a.lower() for a in (existing.aliases or [])]
if normalized.lower() in existing_aliases:
return existing, False
brand = KnownBrand.objects.create(
id=uuid.uuid4(),
canonical_name=normalized,
aliases=aliases or [],
first_source=source,
)
return brand, True
brand = KnownBrand(
canonical_name=normalized,
aliases=aliases or [],
first_source=source,
)
session.add(brand)
session.commit()
session.refresh(brand)
return brand, True
def find_brand_by_text(text: str) -> Optional[object]:
"""Find a known brand by canonical name or alias (case-insensitive)."""
from admin.mpr.media_assets.models import KnownBrand
def find_brand_by_text(text: str) -> KnownBrand | None:
normalized = text.strip().lower()
# Exact canonical match
brand = KnownBrand.objects.filter(canonical_name__iexact=normalized).first()
if brand:
return brand
# Search aliases (jsonb contains)
for brand in KnownBrand.objects.all():
brand_aliases = [a.lower() for a in (brand.aliases or [])]
if normalized in brand_aliases:
with get_session() as session:
stmt = select(KnownBrand).where(KnownBrand.canonical_name.ilike(normalized))
brand = session.exec(stmt).first()
if brand:
return brand
return None
# Alias search — check if normalized is in any brand's aliases
all_brands = session.exec(select(KnownBrand)).all()
for b in all_brands:
if normalized in [a.lower() for a in (b.aliases or [])]:
return b
return None
def list_all_brands() -> list:
from admin.mpr.media_assets.models import KnownBrand
return list(KnownBrand.objects.all().order_by("canonical_name"))
def list_all_brands() -> list[KnownBrand]:
with get_session() as session:
return list(session.exec(select(KnownBrand).order_by(KnownBrand.canonical_name)).all())
def update_brand(brand_id: UUID, **fields):
from admin.mpr.media_assets.models import KnownBrand
KnownBrand.objects.filter(id=brand_id).update(**fields)
def update_brand(brand_id: UUID, **fields) -> None:
with get_session() as session:
brand = session.get(KnownBrand, brand_id)
if not brand:
return
for k, v in fields.items():
setattr(brand, k, v)
session.commit()
# ---------------------------------------------------------------------------
# SourceBrandSighting
# ---------------------------------------------------------------------------
def get_source_sightings(source_asset_id: UUID) -> list:
"""Get all brand sightings for a specific source video."""
from admin.mpr.media_assets.models import SourceBrandSighting
return list(
SourceBrandSighting.objects
.filter(source_asset_id=source_asset_id)
.order_by("-occurrences")
)
def get_source_sightings(source_asset_id: UUID) -> list[SourceBrandSighting]:
with get_session() as session:
stmt = (
select(SourceBrandSighting)
.where(SourceBrandSighting.source_asset_id == source_asset_id)
.order_by(SourceBrandSighting.occurrences.desc())
)
return list(session.exec(stmt).all())
def record_sighting(source_asset_id: UUID, brand_id: UUID, brand_name: str,
timestamp: float, confidence: float, source: str = "ocr"):
"""Record or update a brand sighting for a source."""
from admin.mpr.media_assets.models import SourceBrandSighting
import uuid
timestamp: float, confidence: float, source: str = "ocr") -> SourceBrandSighting:
with get_session() as session:
stmt = select(SourceBrandSighting).where(
SourceBrandSighting.source_asset_id == source_asset_id,
SourceBrandSighting.brand_id == brand_id,
)
sighting = session.exec(stmt).first()
sighting = SourceBrandSighting.objects.filter(
source_asset_id=source_asset_id,
brand_id=brand_id,
).first()
if sighting:
total_conf = sighting.avg_confidence * sighting.occurrences + confidence
sighting.occurrences += 1
sighting.last_seen_timestamp = timestamp
sighting.avg_confidence = total_conf / sighting.occurrences
session.commit()
session.refresh(sighting)
return sighting
if sighting:
sighting.occurrences += 1
sighting.last_seen_timestamp = timestamp
total_conf = sighting.avg_confidence * (sighting.occurrences - 1) + confidence
sighting.avg_confidence = total_conf / sighting.occurrences
sighting.save()
sighting = SourceBrandSighting(
source_asset_id=source_asset_id,
brand_id=brand_id,
brand_name=brand_name,
first_seen_timestamp=timestamp,
last_seen_timestamp=timestamp,
occurrences=1,
detection_source=source,
avg_confidence=confidence,
)
session.add(sighting)
session.commit()
session.refresh(sighting)
return sighting
sighting = SourceBrandSighting.objects.create(
id=uuid.uuid4(),
source_asset_id=source_asset_id,
brand_id=brand_id,
brand_name=brand_name,
first_seen_timestamp=timestamp,
last_seen_timestamp=timestamp,
occurrences=1,
detection_source=source,
avg_confidence=confidence,
)
return sighting

View File

@@ -1,40 +1,49 @@
"""Database operations for TranscodeJob."""
"""Database operations for TranscodeJob — SQLModel."""
from __future__ import annotations
from typing import Optional
from uuid import UUID
from sqlmodel import select
def list_jobs(status: Optional[str] = None, source_asset_id: Optional[UUID] = None):
from admin.mpr.media_assets.models import TranscodeJob
qs = TranscodeJob.objects.all()
if status:
qs = qs.filter(status=status)
if source_asset_id:
qs = qs.filter(source_asset_id=source_asset_id)
return list(qs)
from .connection import get_session
from .models import TranscodeJob
def get_job(id: UUID):
from admin.mpr.media_assets.models import TranscodeJob
return TranscodeJob.objects.get(id=id)
def list_jobs(status: Optional[str] = None, source_asset_id: Optional[UUID] = None) -> list[TranscodeJob]:
with get_session() as session:
stmt = select(TranscodeJob)
if status:
stmt = stmt.where(TranscodeJob.status == status)
if source_asset_id:
stmt = stmt.where(TranscodeJob.source_asset_id == source_asset_id)
return list(session.exec(stmt).all())
def create_job(**fields):
from admin.mpr.media_assets.models import TranscodeJob
return TranscodeJob.objects.create(**fields)
def get_job(id: UUID) -> TranscodeJob | None:
with get_session() as session:
return session.get(TranscodeJob, id)
def update_job(job, **fields):
for key, value in fields.items():
setattr(job, key, value)
job.save(update_fields=list(fields.keys()))
return job
def create_job(**fields) -> TranscodeJob:
job = TranscodeJob(**fields)
with get_session() as session:
session.add(job)
session.commit()
session.refresh(job)
return job
def update_job_fields(job_id, **fields):
from admin.mpr.media_assets.models import TranscodeJob
def update_job(id: UUID, **fields) -> None:
with get_session() as session:
job = session.get(TranscodeJob, id)
if not job:
return
for k, v in fields.items():
setattr(job, k, v)
session.commit()
TranscodeJob.objects.filter(id=job_id).update(**fields)
def update_job_fields(job_id: UUID, **fields) -> None:
update_job(job_id, **fields)

233
core/db/models.py Normal file
View File

@@ -0,0 +1,233 @@
"""
SQLModel Table Models - GENERATED FILE
Do not edit directly. Regenerate using modelgen.
"""
from datetime import datetime
from enum import Enum
from typing import Any, Dict, List, Optional
from uuid import UUID, uuid4
from sqlmodel import SQLModel, Field, Column
from sqlalchemy import JSON
class AssetStatus(str, Enum):
PENDING = "pending"
READY = "ready"
ERROR = "error"
class JobStatus(str, Enum):
PENDING = "pending"
PROCESSING = "processing"
COMPLETED = "completed"
FAILED = "failed"
CANCELLED = "cancelled"
class ChunkJobStatus(str, Enum):
PENDING = "pending"
CHUNKING = "chunking"
PROCESSING = "processing"
COLLECTING = "collecting"
COMPLETED = "completed"
FAILED = "failed"
CANCELLED = "cancelled"
class DetectJobStatus(str, Enum):
PENDING = "pending"
RUNNING = "running"
PAUSED = "paused"
COMPLETED = "completed"
FAILED = "failed"
CANCELLED = "cancelled"
class RunType(str, Enum):
INITIAL = "initial"
REPLAY = "replay"
RETRY = "retry"
class BrandSource(str, Enum):
OCR = "ocr"
VLM = "local_vlm"
CLOUD = "cloud_llm"
MANUAL = "manual"
class SourceType(str, Enum):
CHUNK_JOB = "chunk_job"
UPLOAD = "upload"
DEVICE = "device"
STREAM = "stream"
class MediaAsset(SQLModel, table=True):
"""A video/audio file registered in the system."""
__tablename__ = "media_assets"
id: UUID = Field(default_factory=uuid4, primary_key=True)
filename: str
file_path: str
status: AssetStatus = "pending"
error_message: Optional[str] = None
file_size: Optional[int] = None
duration: Optional[float] = None
video_codec: Optional[str] = None
audio_codec: Optional[str] = None
width: Optional[int] = None
height: Optional[int] = None
framerate: Optional[float] = None
bitrate: Optional[int] = None
properties: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON, nullable=False, server_default='{}'))
comments: str = ""
tags: List[str] = Field(default_factory=list, sa_column=Column(JSON, nullable=False, server_default='[]'))
created_at: Optional[datetime] = Field(default_factory=datetime.utcnow)
updated_at: Optional[datetime] = Field(default_factory=datetime.utcnow)
class TranscodePreset(SQLModel, table=True):
"""A reusable transcoding configuration (like Handbrake presets)."""
__tablename__ = "transcode_presets"
id: UUID = Field(default_factory=uuid4, primary_key=True)
name: str
description: str = ""
is_builtin: bool = False
container: str = "mp4"
video_codec: str = "libx264"
video_bitrate: Optional[str] = None
video_crf: Optional[int] = None
video_preset: Optional[str] = None
resolution: Optional[str] = None
framerate: Optional[float] = None
audio_codec: str = "aac"
audio_bitrate: Optional[str] = None
audio_channels: Optional[int] = None
audio_samplerate: Optional[int] = None
extra_args: List[str] = Field(default_factory=list, sa_column=Column(JSON, nullable=False, server_default='[]'))
created_at: Optional[datetime] = Field(default_factory=datetime.utcnow)
updated_at: Optional[datetime] = Field(default_factory=datetime.utcnow)
class TranscodeJob(SQLModel, table=True):
"""A transcoding or trimming job in the queue."""
__tablename__ = "transcode_jobs"
id: UUID = Field(default_factory=uuid4, primary_key=True)
source_asset_id: UUID = Field(index=True)
preset_id: Optional[UUID] = None
preset_snapshot: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON, nullable=False, server_default='{}'))
trim_start: Optional[float] = None
trim_end: Optional[float] = None
output_filename: str = ""
output_path: Optional[str] = None
output_asset_id: Optional[UUID] = None
status: JobStatus = "pending"
progress: float = 0.0
current_frame: Optional[int] = None
current_time: Optional[float] = None
speed: Optional[str] = None
error_message: Optional[str] = None
celery_task_id: Optional[str] = None
execution_arn: Optional[str] = None
priority: int = 0
created_at: Optional[datetime] = Field(default_factory=datetime.utcnow)
started_at: Optional[datetime] = None
completed_at: Optional[datetime] = None
class ChunkJob(SQLModel, table=True):
"""A chunk pipeline job — splits a media file into chunks and processes them"""
__tablename__ = "chunk_jobs"
id: UUID = Field(default_factory=uuid4, primary_key=True)
source_asset_id: UUID = Field(index=True)
chunk_duration: float = 10.0
num_workers: int = 4
max_retries: int = 3
processor_type: str = "ffmpeg"
status: ChunkJobStatus = "pending"
progress: float = 0.0
total_chunks: int = 0
processed_chunks: int = 0
failed_chunks: int = 0
retry_count: int = 0
error_message: Optional[str] = None
throughput_mbps: Optional[float] = None
elapsed_seconds: Optional[float] = None
celery_task_id: Optional[str] = None
priority: int = 0
created_at: Optional[datetime] = Field(default_factory=datetime.utcnow)
started_at: Optional[datetime] = None
completed_at: Optional[datetime] = None
class DetectJob(SQLModel, table=True):
"""A detection pipeline job."""
__tablename__ = "detect_jobs"
id: UUID = Field(default_factory=uuid4, primary_key=True)
source_asset_id: UUID = Field(index=True)
video_path: str
profile_name: str = "soccer_broadcast"
parent_job_id: Optional[UUID] = Field(default=None, index=True)
run_type: RunType = "initial"
replay_from_stage: Optional[str] = None
config_overrides: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON, nullable=False, server_default='{}'))
status: DetectJobStatus = "pending"
current_stage: Optional[str] = None
progress: float = 0.0
error_message: Optional[str] = None
total_detections: int = 0
brands_found: int = 0
cloud_llm_calls: int = 0
estimated_cost_usd: float = 0.0
celery_task_id: Optional[str] = None
priority: int = 0
created_at: Optional[datetime] = Field(default_factory=datetime.utcnow)
started_at: Optional[datetime] = None
completed_at: Optional[datetime] = None
class StageCheckpoint(SQLModel, table=True):
"""A checkpoint saved after a pipeline stage completes."""
__tablename__ = "stage_checkpoints"
id: UUID = Field(default_factory=uuid4, primary_key=True)
job_id: UUID = Field(index=True)
stage: str
stage_index: int
frames_prefix: str = ""
frames_manifest: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON, nullable=False, server_default='{}'))
frames_meta: List[str] = Field(default_factory=list, sa_column=Column(JSON, nullable=False, server_default='[]'))
filtered_frame_sequences: List[int] = Field(default_factory=list, sa_column=Column(JSON, nullable=False, server_default='[]'))
boxes_by_frame: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON, nullable=False, server_default='{}'))
text_candidates: List[str] = Field(default_factory=list, sa_column=Column(JSON, nullable=False, server_default='[]'))
unresolved_candidates: List[str] = Field(default_factory=list, sa_column=Column(JSON, nullable=False, server_default='[]'))
detections: List[str] = Field(default_factory=list, sa_column=Column(JSON, nullable=False, server_default='[]'))
stats: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON, nullable=False, server_default='{}'))
config_snapshot: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON, nullable=False, server_default='{}'))
config_overrides: Dict[str, Any] = Field(default_factory=dict, sa_column=Column(JSON, nullable=False, server_default='{}'))
video_path: str = ""
profile_name: str = ""
created_at: Optional[datetime] = Field(default_factory=datetime.utcnow)
class KnownBrand(SQLModel, table=True):
"""A brand discovered or registered in the system."""
__tablename__ = "known_brands"
id: UUID = Field(default_factory=uuid4, primary_key=True)
canonical_name: str = Field(index=True)
aliases: List[str] = Field(default_factory=list, sa_column=Column(JSON, nullable=False, server_default='[]'))
first_source: BrandSource = "ocr"
total_occurrences: int = 0
confirmed: bool = False
created_at: Optional[datetime] = Field(default_factory=datetime.utcnow)
updated_at: Optional[datetime] = Field(default_factory=datetime.utcnow)
class SourceBrandSighting(SQLModel, table=True):
"""A brand seen in a specific source (video/asset)."""
__tablename__ = "source_brand_sightings"
id: UUID = Field(default_factory=uuid4, primary_key=True)
source_asset_id: UUID = Field(index=True)
brand_id: UUID
brand_name: str
first_seen_timestamp: float = 0.0
last_seen_timestamp: float = 0.0
occurrences: int = 0
detection_source: BrandSource = "ocr"
avg_confidence: float = 0.0
created_at: Optional[datetime] = Field(default_factory=datetime.utcnow)

View File

@@ -1,15 +1,20 @@
"""Database operations for TranscodePreset."""
"""Database operations for TranscodePreset — SQLModel."""
from __future__ import annotations
from uuid import UUID
from sqlmodel import select
def list_presets():
from admin.mpr.media_assets.models import TranscodePreset
return list(TranscodePreset.objects.all())
from .connection import get_session
from .models import TranscodePreset
def get_preset(id: UUID):
from admin.mpr.media_assets.models import TranscodePreset
def list_presets() -> list[TranscodePreset]:
with get_session() as session:
return list(session.exec(select(TranscodePreset)).all())
return TranscodePreset.objects.get(id=id)
def get_preset(id: UUID) -> TranscodePreset | None:
with get_session() as session:
return session.get(TranscodePreset, id)