chunker ui redo
This commit is contained in:
@@ -2,22 +2,24 @@
|
||||
ChunkHandler — job handler that wraps the chunker Pipeline.
|
||||
|
||||
Downloads source from S3/MinIO, runs FFmpeg chunking pipeline,
|
||||
uploads mp4 segments + manifest back to S3/MinIO.
|
||||
writes mp4 segments + manifest to media/out/chunks/{job_id}/.
|
||||
Pushes real-time events to Redis for SSE consumption.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from typing import Any, Callable, Dict, Optional
|
||||
|
||||
from core.events import push_event as push_chunk_event
|
||||
from core.chunker import Pipeline
|
||||
from core.storage import BUCKET_IN, BUCKET_OUT, download_to_temp, upload_file
|
||||
from core.storage import BUCKET_IN, download_to_temp
|
||||
|
||||
from .base import Handler
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
MEDIA_OUT_DIR = os.environ.get("MEDIA_OUT_DIR", "/app/media/out")
|
||||
|
||||
|
||||
class ChunkHandler(Handler):
|
||||
"""
|
||||
@@ -44,14 +46,19 @@ class ChunkHandler(Handler):
|
||||
logger.info(f"ChunkHandler starting job {job_id}: {source_key}")
|
||||
|
||||
# Download source from S3/MinIO
|
||||
push_chunk_event(job_id, "pipeline_start", {"status": "downloading", "source_key": source_key})
|
||||
tmp_source = download_to_temp(BUCKET_IN, source_key)
|
||||
|
||||
# Create temp output directory for chunks
|
||||
tmp_output_dir = tempfile.mkdtemp(prefix=f"chunks-{job_id}-")
|
||||
# Output directory: media/out/chunks/{job_id}/
|
||||
output_dir = os.path.join(MEDIA_OUT_DIR, "chunks", job_id)
|
||||
if processor_type == "ffmpeg":
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
try:
|
||||
def event_bridge(event_type: str, data: Dict[str, Any]) -> None:
|
||||
"""Bridge pipeline events to the job progress callback."""
|
||||
"""Bridge pipeline events to Redis + optional progress callback."""
|
||||
push_chunk_event(job_id, event_type, data)
|
||||
|
||||
if progress_callback and event_type == "pipeline_complete":
|
||||
progress_callback(100, data)
|
||||
elif progress_callback and event_type == "chunk_done":
|
||||
@@ -68,29 +75,28 @@ class ChunkHandler(Handler):
|
||||
processor_type=processor_type,
|
||||
queue_size=payload.get("queue_size", 10),
|
||||
event_callback=event_bridge,
|
||||
output_dir=tmp_output_dir if processor_type == "ffmpeg" else None,
|
||||
output_dir=output_dir if processor_type == "ffmpeg" else None,
|
||||
start_time=payload.get("start_time"),
|
||||
end_time=payload.get("end_time"),
|
||||
)
|
||||
|
||||
result = pipeline.run()
|
||||
|
||||
# Upload chunks + manifest to S3/MinIO
|
||||
# Files are already in media/out/chunks/{job_id}/
|
||||
output_prefix = f"chunks/{job_id}"
|
||||
uploaded_files = []
|
||||
output_files = [
|
||||
f"{output_prefix}/{os.path.basename(f)}"
|
||||
for f in result.chunk_files
|
||||
]
|
||||
|
||||
for chunk_file in result.chunk_files:
|
||||
filename = os.path.basename(chunk_file)
|
||||
output_key = f"{output_prefix}/{filename}"
|
||||
upload_file(chunk_file, BUCKET_OUT, output_key)
|
||||
uploaded_files.append(output_key)
|
||||
logger.info(f"Uploaded {output_key}")
|
||||
|
||||
# Upload manifest
|
||||
manifest_path = os.path.join(tmp_output_dir, "manifest.json")
|
||||
if os.path.exists(manifest_path):
|
||||
manifest_key = f"{output_prefix}/manifest.json"
|
||||
upload_file(manifest_path, BUCKET_OUT, manifest_key)
|
||||
uploaded_files.append(manifest_key)
|
||||
logger.info(f"Uploaded {manifest_key}")
|
||||
push_chunk_event(job_id, "pipeline_complete", {
|
||||
"status": "completed",
|
||||
"total_chunks": result.total_chunks,
|
||||
"processed": result.processed,
|
||||
"failed": result.failed,
|
||||
"elapsed": result.elapsed_time,
|
||||
"throughput_mbps": result.throughput_mbps,
|
||||
})
|
||||
|
||||
return {
|
||||
"status": "completed" if result.failed == 0 else "completed_with_errors",
|
||||
@@ -104,16 +110,16 @@ class ChunkHandler(Handler):
|
||||
"errors": result.errors,
|
||||
"chunks_in_order": result.chunks_in_order,
|
||||
"output_prefix": output_prefix,
|
||||
"uploaded_files": uploaded_files,
|
||||
"output_files": output_files,
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
push_chunk_event(job_id, "pipeline_error", {"status": "failed", "error": str(e)})
|
||||
raise
|
||||
|
||||
finally:
|
||||
# Cleanup temp files
|
||||
# Cleanup temp source file only (output dir is persistent)
|
||||
try:
|
||||
os.unlink(tmp_source)
|
||||
except OSError:
|
||||
pass
|
||||
try:
|
||||
shutil.rmtree(tmp_output_dir, ignore_errors=True)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
Reference in New Issue
Block a user