chunker ui redo

This commit is contained in:
2026-03-15 16:03:53 -03:00
parent d5a3372d6b
commit b40bd68411
62 changed files with 5460 additions and 1493 deletions

View File

@@ -11,6 +11,7 @@ service WorkerService {
rpc StreamProgress(ProgressRequest) returns (stream ProgressUpdate);
rpc CancelJob(CancelRequest) returns (CancelResponse);
rpc GetWorkerStatus(Empty) returns (WorkerStatus);
rpc StreamChunkPipeline(ChunkStreamRequest) returns (stream ChunkPipelineEvent);
}
message JobRequest {
@@ -62,3 +63,24 @@ message WorkerStatus {
message Empty {
// Empty
}
message ChunkStreamRequest {
string job_id = 1;
}
message ChunkPipelineEvent {
string job_id = 1;
string event_type = 2;
int32 sequence = 3;
string worker_id = 4;
string state = 5;
int32 queue_size = 6;
float elapsed = 7;
float throughput_mbps = 8;
int32 total_chunks = 9;
int32 processed_chunks = 10;
int32 failed_chunks = 11;
string error = 12;
float processing_time = 13;
int32 retries = 14;
}

View File

@@ -173,6 +173,43 @@ class WorkerServicer(worker_pb2_grpc.WorkerServiceServicer):
message="Job not found",
)
def StreamChunkPipeline(self, request, context) -> Iterator[worker_pb2.ChunkPipelineEvent]:
"""Stream chunk pipeline events for a job."""
from core.events import poll_events
job_id = request.job_id
logger.info(f"StreamChunkPipeline: {job_id}")
cursor = 0
timeout = time.monotonic() + 600 # 10 min max
while context.is_active() and time.monotonic() < timeout:
events, cursor = poll_events(job_id, cursor)
for data in events:
event_type = data.pop("event", "")
yield worker_pb2.ChunkPipelineEvent(
job_id=job_id,
event_type=event_type,
sequence=data.get("sequence", 0),
worker_id=data.get("worker_id", ""),
state=data.get("state", ""),
queue_size=data.get("queue_size", 0),
elapsed=data.get("elapsed", 0.0),
throughput_mbps=data.get("throughput_mbps", 0.0),
total_chunks=data.get("total_chunks", 0),
processed_chunks=data.get("processed_chunks", 0),
failed_chunks=data.get("failed_chunks", 0),
error=data.get("error", ""),
processing_time=data.get("processing_time", 0.0),
retries=data.get("retries", 0),
)
if event_type in ("pipeline_complete", "pipeline_error"):
return
time.sleep(0.05)
def GetWorkerStatus(self, request, context):
"""Get worker health and capabilities."""
try:

View File

@@ -24,7 +24,7 @@ _sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cworker.proto\x12\nmpr.worker\"\xa7\x01\n\nJobRequest\x12\x0e\n\x06job_id\x18\x01 \x01(\t\x12\x13\n\x0bsource_path\x18\x02 \x01(\t\x12\x13\n\x0boutput_path\x18\x03 \x01(\t\x12\x13\n\x0bpreset_json\x18\x04 \x01(\t\x12\x17\n\ntrim_start\x18\x05 \x01(\x02H\x00\x88\x01\x01\x12\x15\n\x08trim_end\x18\x06 \x01(\x02H\x01\x88\x01\x01\x42\r\n\x0b_trim_startB\x0b\n\t_trim_end\"@\n\x0bJobResponse\x12\x0e\n\x06job_id\x18\x01 \x01(\t\x12\x10\n\x08\x61\x63\x63\x65pted\x18\x02 \x01(\x08\x12\x0f\n\x07message\x18\x03 \x01(\t\"!\n\x0fProgressRequest\x12\x0e\n\x06job_id\x18\x01 \x01(\t\"\x9c\x01\n\x0eProgressUpdate\x12\x0e\n\x06job_id\x18\x01 \x01(\t\x12\x10\n\x08progress\x18\x02 \x01(\x05\x12\x15\n\rcurrent_frame\x18\x03 \x01(\x05\x12\x14\n\x0c\x63urrent_time\x18\x04 \x01(\x02\x12\r\n\x05speed\x18\x05 \x01(\x02\x12\x0e\n\x06status\x18\x06 \x01(\t\x12\x12\n\x05\x65rror\x18\x07 \x01(\tH\x00\x88\x01\x01\x42\x08\n\x06_error\"\x1f\n\rCancelRequest\x12\x0e\n\x06job_id\x18\x01 \x01(\t\"D\n\x0e\x43\x61ncelResponse\x12\x0e\n\x06job_id\x18\x01 \x01(\t\x12\x11\n\tcancelled\x18\x02 \x01(\x08\x12\x0f\n\x07message\x18\x03 \x01(\t\"g\n\x0cWorkerStatus\x12\x11\n\tavailable\x18\x01 \x01(\x08\x12\x13\n\x0b\x61\x63tive_jobs\x18\x02 \x01(\x05\x12\x18\n\x10supported_codecs\x18\x03 \x03(\t\x12\x15\n\rgpu_available\x18\x04 \x01(\x08\"\x07\n\x05\x45mpty2\x9e\x02\n\rWorkerService\x12<\n\tSubmitJob\x12\x16.mpr.worker.JobRequest\x1a\x17.mpr.worker.JobResponse\x12K\n\x0eStreamProgress\x12\x1b.mpr.worker.ProgressRequest\x1a\x1a.mpr.worker.ProgressUpdate0\x01\x12\x42\n\tCancelJob\x12\x19.mpr.worker.CancelRequest\x1a\x1a.mpr.worker.CancelResponse\x12>\n\x0fGetWorkerStatus\x12\x11.mpr.worker.Empty\x1a\x18.mpr.worker.WorkerStatusb\x06proto3')
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cworker.proto\x12\nmpr.worker\"\xa7\x01\n\nJobRequest\x12\x0e\n\x06job_id\x18\x01 \x01(\t\x12\x13\n\x0bsource_path\x18\x02 \x01(\t\x12\x13\n\x0boutput_path\x18\x03 \x01(\t\x12\x13\n\x0bpreset_json\x18\x04 \x01(\t\x12\x17\n\ntrim_start\x18\x05 \x01(\x02H\x00\x88\x01\x01\x12\x15\n\x08trim_end\x18\x06 \x01(\x02H\x01\x88\x01\x01\x42\r\n\x0b_trim_startB\x0b\n\t_trim_end\"@\n\x0bJobResponse\x12\x0e\n\x06job_id\x18\x01 \x01(\t\x12\x10\n\x08\x61\x63\x63\x65pted\x18\x02 \x01(\x08\x12\x0f\n\x07message\x18\x03 \x01(\t\"!\n\x0fProgressRequest\x12\x0e\n\x06job_id\x18\x01 \x01(\t\"\x9c\x01\n\x0eProgressUpdate\x12\x0e\n\x06job_id\x18\x01 \x01(\t\x12\x10\n\x08progress\x18\x02 \x01(\x05\x12\x15\n\rcurrent_frame\x18\x03 \x01(\x05\x12\x14\n\x0c\x63urrent_time\x18\x04 \x01(\x02\x12\r\n\x05speed\x18\x05 \x01(\x02\x12\x0e\n\x06status\x18\x06 \x01(\t\x12\x12\n\x05\x65rror\x18\x07 \x01(\tH\x00\x88\x01\x01\x42\x08\n\x06_error\"\x1f\n\rCancelRequest\x12\x0e\n\x06job_id\x18\x01 \x01(\t\"D\n\x0e\x43\x61ncelResponse\x12\x0e\n\x06job_id\x18\x01 \x01(\t\x12\x11\n\tcancelled\x18\x02 \x01(\x08\x12\x0f\n\x07message\x18\x03 \x01(\t\"g\n\x0cWorkerStatus\x12\x11\n\tavailable\x18\x01 \x01(\x08\x12\x13\n\x0b\x61\x63tive_jobs\x18\x02 \x01(\x05\x12\x18\n\x10supported_codecs\x18\x03 \x03(\t\x12\x15\n\rgpu_available\x18\x04 \x01(\x08\"\x07\n\x05\x45mpty\"$\n\x12\x43hunkStreamRequest\x12\x0e\n\x06job_id\x18\x01 \x01(\t\"\xaa\x02\n\x12\x43hunkPipelineEvent\x12\x0e\n\x06job_id\x18\x01 \x01(\t\x12\x12\n\nevent_type\x18\x02 \x01(\t\x12\x10\n\x08sequence\x18\x03 \x01(\x05\x12\x11\n\tworker_id\x18\x04 \x01(\t\x12\r\n\x05state\x18\x05 \x01(\t\x12\x12\n\nqueue_size\x18\x06 \x01(\x05\x12\x0f\n\x07\x65lapsed\x18\x07 \x01(\x02\x12\x17\n\x0fthroughput_mbps\x18\x08 \x01(\x02\x12\x14\n\x0ctotal_chunks\x18\t \x01(\x05\x12\x18\n\x10processed_chunks\x18\n \x01(\x05\x12\x15\n\rfailed_chunks\x18\x0b \x01(\x05\x12\r\n\x05\x65rror\x18\x0c \x01(\t\x12\x17\n\x0fprocessing_time\x18\r \x01(\x02\x12\x0f\n\x07retries\x18\x0e \x01(\x05\x32\xf7\x02\n\rWorkerService\x12<\n\tSubmitJob\x12\x16.mpr.worker.JobRequest\x1a\x17.mpr.worker.JobResponse\x12K\n\x0eStreamProgress\x12\x1b.mpr.worker.ProgressRequest\x1a\x1a.mpr.worker.ProgressUpdate0\x01\x12\x42\n\tCancelJob\x12\x19.mpr.worker.CancelRequest\x1a\x1a.mpr.worker.CancelResponse\x12>\n\x0fGetWorkerStatus\x12\x11.mpr.worker.Empty\x1a\x18.mpr.worker.WorkerStatus\x12W\n\x13StreamChunkPipeline\x12\x1e.mpr.worker.ChunkStreamRequest\x1a\x1e.mpr.worker.ChunkPipelineEvent0\x01\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
@@ -47,6 +47,10 @@ if not _descriptor._USE_C_DESCRIPTORS:
_globals['_WORKERSTATUS']._serialized_end=664
_globals['_EMPTY']._serialized_start=666
_globals['_EMPTY']._serialized_end=673
_globals['_WORKERSERVICE']._serialized_start=676
_globals['_WORKERSERVICE']._serialized_end=962
_globals['_CHUNKSTREAMREQUEST']._serialized_start=675
_globals['_CHUNKSTREAMREQUEST']._serialized_end=711
_globals['_CHUNKPIPELINEEVENT']._serialized_start=714
_globals['_CHUNKPIPELINEEVENT']._serialized_end=1012
_globals['_WORKERSERVICE']._serialized_start=1015
_globals['_WORKERSERVICE']._serialized_end=1390
# @@protoc_insertion_point(module_scope)

View File

@@ -5,7 +5,7 @@ import warnings
from . import worker_pb2 as worker__pb2
GRPC_GENERATED_VERSION = '1.76.0'
GRPC_GENERATED_VERSION = '1.78.0'
GRPC_VERSION = grpc.__version__
_version_not_supported = False
@@ -54,6 +54,11 @@ class WorkerServiceStub(object):
request_serializer=worker__pb2.Empty.SerializeToString,
response_deserializer=worker__pb2.WorkerStatus.FromString,
_registered_method=True)
self.StreamChunkPipeline = channel.unary_stream(
'/mpr.worker.WorkerService/StreamChunkPipeline',
request_serializer=worker__pb2.ChunkStreamRequest.SerializeToString,
response_deserializer=worker__pb2.ChunkPipelineEvent.FromString,
_registered_method=True)
class WorkerServiceServicer(object):
@@ -83,6 +88,12 @@ class WorkerServiceServicer(object):
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def StreamChunkPipeline(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_WorkerServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
@@ -106,6 +117,11 @@ def add_WorkerServiceServicer_to_server(servicer, server):
request_deserializer=worker__pb2.Empty.FromString,
response_serializer=worker__pb2.WorkerStatus.SerializeToString,
),
'StreamChunkPipeline': grpc.unary_stream_rpc_method_handler(
servicer.StreamChunkPipeline,
request_deserializer=worker__pb2.ChunkStreamRequest.FromString,
response_serializer=worker__pb2.ChunkPipelineEvent.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'mpr.worker.WorkerService', rpc_method_handlers)
@@ -224,3 +240,30 @@ class WorkerService(object):
timeout,
metadata,
_registered_method=True)
@staticmethod
def StreamChunkPipeline(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(
request,
target,
'/mpr.worker.WorkerService/StreamChunkPipeline',
worker__pb2.ChunkStreamRequest.SerializeToString,
worker__pb2.ChunkPipelineEvent.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True)