chunker and ui

This commit is contained in:
2026-03-13 14:29:38 -03:00
parent 3eeedebb15
commit ccc478fbaa
69 changed files with 6481 additions and 282 deletions

View File

@@ -23,12 +23,12 @@ from .grpc import (
ProgressUpdate,
WorkerStatus,
)
from .jobs import JobStatus, TranscodeJob
from .jobs import ChunkJob, ChunkJobStatus, JobStatus, TranscodeJob
from .media import AssetStatus, MediaAsset
from .presets import BUILTIN_PRESETS, TranscodePreset
# Core domain models - generates Django, Pydantic, TypeScript
DATACLASSES = [MediaAsset, TranscodePreset, TranscodeJob]
DATACLASSES = [MediaAsset, TranscodePreset, TranscodeJob, ChunkJob]
# API request/response models - generates TypeScript only (no Django)
# WorkerStatus from grpc.py is reused here
@@ -42,7 +42,7 @@ API_MODELS = [
]
# Status enums - included in generated code
ENUMS = [AssetStatus, JobStatus]
ENUMS = [AssetStatus, JobStatus, ChunkJobStatus]
# gRPC messages - generates Proto
GRPC_MESSAGES = [
@@ -61,6 +61,7 @@ __all__ = [
"MediaAsset",
"TranscodePreset",
"TranscodeJob",
"ChunkJob",
# API Models
"CreateJobRequest",
"UpdateAssetRequest",
@@ -70,6 +71,7 @@ __all__ = [
# Enums
"AssetStatus",
"JobStatus",
"ChunkJobStatus",
# gRPC
"GRPC_SERVICE",
"JobRequest",

View File

@@ -1,13 +1,14 @@
"""
TranscodeJob Schema Definition
Job Schema Definitions
Source of truth for job data model.
Source of truth for job data models.
TranscodeJob and ChunkJob share common lifecycle fields by convention.
"""
from dataclasses import dataclass, field
from datetime import datetime
from enum import Enum
from typing import Any, Dict, Optional
from typing import Any, Dict, List, Optional
from uuid import UUID
@@ -77,3 +78,56 @@ class TranscodeJob:
return self.preset_id is None and (
self.trim_start is not None or self.trim_end is not None
)
class ChunkJobStatus(str, Enum):
"""Status of a chunk pipeline job."""
PENDING = "pending"
CHUNKING = "chunking"
PROCESSING = "processing"
COLLECTING = "collecting"
COMPLETED = "completed"
FAILED = "failed"
CANCELLED = "cancelled"
@dataclass
class ChunkJob:
"""
A chunk pipeline job — splits a media file into chunks and processes them
through a concurrent worker pool.
"""
id: UUID
# Input
source_asset_id: UUID
# Configuration
chunk_duration: float = 10.0 # seconds
num_workers: int = 4
max_retries: int = 3
processor_type: str = "ffmpeg" # "ffmpeg", "checksum", "simulated_decode", "composite"
# Status & Progress
status: ChunkJobStatus = ChunkJobStatus.PENDING
progress: float = 0.0 # 0.0 to 100.0
total_chunks: int = 0
processed_chunks: int = 0
failed_chunks: int = 0
retry_count: int = 0
error_message: Optional[str] = None
# Result stats
throughput_mbps: Optional[float] = None
elapsed_seconds: Optional[float] = None
# Worker tracking
celery_task_id: Optional[str] = None
priority: int = 0 # Lower = higher priority
# Timestamps
created_at: Optional[datetime] = None
started_at: Optional[datetime] = None
completed_at: Optional[datetime] = None