major refactor

This commit is contained in:
2026-03-13 01:07:02 -03:00
parent eaaf2ad60c
commit 3eeedebb15
61 changed files with 441 additions and 242 deletions

10
core/rpc/__init__.py Normal file
View File

@@ -0,0 +1,10 @@
"""
MPR gRPC Module
Provides gRPC server and client for worker communication.
Generated stubs (worker_pb2.py, worker_pb2_grpc.py) are created by:
python schema/generate.py --proto
Requires: grpcio, grpcio-tools
"""

204
core/rpc/client.py Normal file
View File

@@ -0,0 +1,204 @@
"""
gRPC Client - Used by FastAPI to communicate with workers.
"""
import json
import logging
import os
from typing import Callable, Iterator, Optional
import grpc
# Generated stubs - run `python schema/generate.py --proto` if missing
from . import worker_pb2, worker_pb2_grpc
logger = logging.getLogger(__name__)
# Configuration from environment
GRPC_HOST = os.environ.get("GRPC_HOST", "grpc")
GRPC_PORT = int(os.environ.get("GRPC_PORT", "50051"))
class WorkerClient:
"""gRPC client for worker communication."""
def __init__(self, host: str = None, port: int = None):
"""
Initialize the client.
Args:
host: gRPC server host (defaults to GRPC_HOST env var)
port: gRPC server port (defaults to GRPC_PORT env var)
"""
self.host = host or GRPC_HOST
self.port = port or GRPC_PORT
self.address = f"{self.host}:{self.port}"
self._channel: Optional[grpc.Channel] = None
self._stub: Optional[worker_pb2_grpc.WorkerServiceStub] = None
def _ensure_connected(self) -> worker_pb2_grpc.WorkerServiceStub:
"""Ensure channel is connected and return stub."""
if self._channel is None:
self._channel = grpc.insecure_channel(self.address)
self._stub = worker_pb2_grpc.WorkerServiceStub(self._channel)
return self._stub
def close(self) -> None:
"""Close the channel."""
if self._channel:
self._channel.close()
self._channel = None
self._stub = None
def __enter__(self):
self._ensure_connected()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def submit_job(
self,
job_id: str,
source_path: str,
output_path: str,
preset: Optional[dict] = None,
trim_start: Optional[float] = None,
trim_end: Optional[float] = None,
) -> tuple[bool, str]:
"""
Submit a job to the worker.
Args:
job_id: Unique job identifier
source_path: Path to source file
output_path: Path for output file
preset: Transcode preset dict (optional)
trim_start: Trim start time in seconds (optional)
trim_end: Trim end time in seconds (optional)
Returns:
Tuple of (accepted: bool, message: str)
"""
stub = self._ensure_connected()
request = worker_pb2.JobRequest(
job_id=job_id,
source_path=source_path,
output_path=output_path,
preset_json=json.dumps(preset) if preset else "",
)
if trim_start is not None:
request.trim_start = trim_start
if trim_end is not None:
request.trim_end = trim_end
try:
response = stub.SubmitJob(request)
return response.accepted, response.message
except grpc.RpcError as e:
logger.error(f"SubmitJob RPC failed: {e}")
return False, str(e)
def stream_progress(
self,
job_id: str,
callback: Optional[Callable[[dict], None]] = None,
) -> Iterator[dict]:
"""
Stream progress updates for a job.
Args:
job_id: Job identifier
callback: Optional callback for each update
Yields:
Progress update dicts
"""
stub = self._ensure_connected()
request = worker_pb2.ProgressRequest(job_id=job_id)
try:
for update in stub.StreamProgress(request):
progress = {
"job_id": update.job_id,
"progress": update.progress,
"current_frame": update.current_frame,
"current_time": update.current_time,
"speed": update.speed,
"status": update.status,
"error": update.error if update.HasField("error") else None,
}
if callback:
callback(progress)
yield progress
if update.status in ("completed", "failed", "cancelled"):
break
except grpc.RpcError as e:
logger.error(f"StreamProgress RPC failed: {e}")
yield {
"job_id": job_id,
"progress": 0,
"status": "error",
"error": str(e),
}
def cancel_job(self, job_id: str) -> tuple[bool, str]:
"""
Cancel a running job.
Args:
job_id: Job identifier
Returns:
Tuple of (cancelled: bool, message: str)
"""
stub = self._ensure_connected()
request = worker_pb2.CancelRequest(job_id=job_id)
try:
response = stub.CancelJob(request)
return response.cancelled, response.message
except grpc.RpcError as e:
logger.error(f"CancelJob RPC failed: {e}")
return False, str(e)
def get_worker_status(self) -> Optional[dict]:
"""
Get worker status and capabilities.
Returns:
Status dict or None on error
"""
stub = self._ensure_connected()
try:
response = stub.GetWorkerStatus(worker_pb2.Empty())
return {
"available": response.available,
"active_jobs": response.active_jobs,
"supported_codecs": list(response.supported_codecs),
"gpu_available": response.gpu_available,
}
except grpc.RpcError as e:
logger.error(f"GetWorkerStatus RPC failed: {e}")
return None
# Singleton client instance
_client: Optional[WorkerClient] = None
def get_client() -> WorkerClient:
"""Get or create the singleton client (uses env vars for config)."""
global _client
if _client is None:
_client = WorkerClient()
return _client

View File

@@ -0,0 +1,64 @@
// Protocol Buffer Definitions - GENERATED FILE
//
// Do not edit directly. Regenerate using modelgen.
syntax = "proto3";
package mpr.worker;
service WorkerService {
rpc SubmitJob(JobRequest) returns (JobResponse);
rpc StreamProgress(ProgressRequest) returns (stream ProgressUpdate);
rpc CancelJob(CancelRequest) returns (CancelResponse);
rpc GetWorkerStatus(Empty) returns (WorkerStatus);
}
message JobRequest {
string job_id = 1;
string source_path = 2;
string output_path = 3;
string preset_json = 4;
optional float trim_start = 5;
optional float trim_end = 6;
}
message JobResponse {
string job_id = 1;
bool accepted = 2;
string message = 3;
}
message ProgressRequest {
string job_id = 1;
}
message ProgressUpdate {
string job_id = 1;
int32 progress = 2;
int32 current_frame = 3;
float current_time = 4;
float speed = 5;
string status = 6;
optional string error = 7;
}
message CancelRequest {
string job_id = 1;
}
message CancelResponse {
string job_id = 1;
bool cancelled = 2;
string message = 3;
}
message WorkerStatus {
bool available = 1;
int32 active_jobs = 2;
repeated string supported_codecs = 3;
bool gpu_available = 4;
}
message Empty {
// Empty
}

273
core/rpc/server.py Normal file
View File

@@ -0,0 +1,273 @@
"""
gRPC Server - Worker Service Implementation
Runs in the worker process to handle job submissions and progress streaming.
"""
import json
import logging
import os
import time
from concurrent import futures
from typing import Iterator
import grpc
# Configuration from environment
GRPC_PORT = int(os.environ.get("GRPC_PORT", "50051"))
GRPC_MAX_WORKERS = int(os.environ.get("GRPC_MAX_WORKERS", "10"))
# Generated stubs - run `python schema/generate.py --proto` if missing
from . import worker_pb2, worker_pb2_grpc
logger = logging.getLogger(__name__)
# Active jobs progress tracking (shared state for streaming)
_active_jobs: dict[str, dict] = {}
class WorkerServicer(worker_pb2_grpc.WorkerServiceServicer):
"""gRPC service implementation for worker operations."""
def __init__(self, celery_app=None):
"""
Initialize the servicer.
Args:
celery_app: Optional Celery app for task dispatch
"""
self.celery_app = celery_app
def SubmitJob(self, request, context):
"""Submit a transcode/trim job to the worker."""
job_id = request.job_id
logger.info(f"SubmitJob: {job_id}")
try:
# Parse preset
preset = json.loads(request.preset_json) if request.preset_json else None
# Initialize progress tracking
_active_jobs[job_id] = {
"status": "pending",
"progress": 0,
"current_frame": 0,
"current_time": 0.0,
"speed": 0.0,
"error": None,
}
# Dispatch to Celery if available
if self.celery_app:
from core.task.tasks import run_transcode_job
task = run_transcode_job.delay(
job_id=job_id,
source_path=request.source_path,
output_path=request.output_path,
preset=preset,
trim_start=request.trim_start
if request.HasField("trim_start")
else None,
trim_end=request.trim_end if request.HasField("trim_end") else None,
)
_active_jobs[job_id]["celery_task_id"] = task.id
return worker_pb2.JobResponse(
job_id=job_id,
accepted=True,
message="Job submitted",
)
except Exception as e:
logger.exception(f"SubmitJob failed: {e}")
return worker_pb2.JobResponse(
job_id=job_id,
accepted=False,
message=str(e),
)
def StreamProgress(self, request, context) -> Iterator[worker_pb2.ProgressUpdate]:
"""Stream progress updates for a job."""
job_id = request.job_id
logger.info(f"StreamProgress: {job_id}")
# Check if job exists
if job_id not in _active_jobs:
yield worker_pb2.ProgressUpdate(
job_id=job_id,
progress=0,
status="not_found",
error="Job not found",
)
return
# Stream updates until job completes
last_progress = -1
while True:
if context.cancelled():
logger.info(f"StreamProgress cancelled: {job_id}")
break
job_state = _active_jobs.get(job_id)
if not job_state:
break
# Only yield if progress changed
if job_state["progress"] != last_progress:
last_progress = job_state["progress"]
yield worker_pb2.ProgressUpdate(
job_id=job_id,
progress=job_state["progress"],
current_frame=job_state.get("current_frame", 0),
current_time=job_state.get("current_time", 0.0),
speed=job_state.get("speed", 0.0),
status=job_state["status"],
error=job_state.get("error"),
)
# Exit if job is done
if job_state["status"] in ("completed", "failed", "cancelled"):
break
# Small delay to avoid busy loop
time.sleep(0.1)
# Cleanup completed jobs
if job_id in _active_jobs:
status = _active_jobs[job_id].get("status")
if status in ("completed", "failed", "cancelled"):
_active_jobs.pop(job_id, None)
def CancelJob(self, request, context):
"""Cancel a running job."""
job_id = request.job_id
logger.info(f"CancelJob: {job_id}")
if job_id in _active_jobs:
_active_jobs[job_id]["status"] = "cancelled"
# Revoke Celery task if available
if self.celery_app:
task_id = _active_jobs[job_id].get("celery_task_id")
if task_id:
self.celery_app.control.revoke(task_id, terminate=True)
return worker_pb2.CancelResponse(
job_id=job_id,
cancelled=True,
message="Job cancelled",
)
return worker_pb2.CancelResponse(
job_id=job_id,
cancelled=False,
message="Job not found",
)
def GetWorkerStatus(self, request, context):
"""Get worker health and capabilities."""
try:
from core.ffmpeg import get_encoders
encoders = get_encoders()
codec_names = [e["name"] for e in encoders.get("video", [])]
except Exception:
codec_names = []
# Check for GPU encoders
gpu_available = any(
"nvenc" in name or "vaapi" in name or "qsv" in name for name in codec_names
)
return worker_pb2.WorkerStatus(
available=True,
active_jobs=len(_active_jobs),
supported_codecs=codec_names[:20], # Limit to 20
gpu_available=gpu_available,
)
def update_job_progress(
job_id: str,
progress: int,
current_frame: int = 0,
current_time: float = 0.0,
speed: float = 0.0,
status: str = "processing",
error: str = None,
) -> None:
"""
Update job progress (called from worker tasks).
Updates both the in-memory gRPC state and the Django database.
"""
if job_id in _active_jobs:
_active_jobs[job_id].update(
{
"progress": progress,
"current_frame": current_frame,
"current_time": current_time,
"speed": speed,
"status": status,
"error": error,
}
)
# Update Django database
try:
from django.utils import timezone
from core.db import update_job_fields
updates = {
"progress": progress,
"current_frame": current_frame,
"current_time": current_time,
"speed": str(speed),
"status": status,
}
if error:
updates["error_message"] = error
if status == "processing":
updates["started_at"] = timezone.now()
elif status in ("completed", "failed"):
updates["completed_at"] = timezone.now()
update_job_fields(job_id, **updates)
except Exception as e:
logger.warning(f"Failed to update job {job_id} in DB: {e}")
def serve(port: int = None, celery_app=None) -> grpc.Server:
"""
Start the gRPC server.
Args:
port: Port to listen on (defaults to GRPC_PORT env var)
celery_app: Optional Celery app for task dispatch
Returns:
The running gRPC server
"""
if port is None:
port = GRPC_PORT
server = grpc.server(futures.ThreadPoolExecutor(max_workers=GRPC_MAX_WORKERS))
worker_pb2_grpc.add_WorkerServiceServicer_to_server(
WorkerServicer(celery_app=celery_app),
server,
)
server.add_insecure_port(f"[::]:{port}")
server.start()
logger.info(f"gRPC server started on port {port}")
return server
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
server = serve()
server.wait_for_termination()

52
core/rpc/worker_pb2.py Normal file
View File

@@ -0,0 +1,52 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# NO CHECKED-IN PROTOBUF GENCODE
# source: worker.proto
# Protobuf Python Version: 6.31.1
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import runtime_version as _runtime_version
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
_runtime_version.ValidateProtobufRuntimeVersion(
_runtime_version.Domain.PUBLIC,
6,
31,
1,
'',
'worker.proto'
)
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cworker.proto\x12\nmpr.worker\"\xa7\x01\n\nJobRequest\x12\x0e\n\x06job_id\x18\x01 \x01(\t\x12\x13\n\x0bsource_path\x18\x02 \x01(\t\x12\x13\n\x0boutput_path\x18\x03 \x01(\t\x12\x13\n\x0bpreset_json\x18\x04 \x01(\t\x12\x17\n\ntrim_start\x18\x05 \x01(\x02H\x00\x88\x01\x01\x12\x15\n\x08trim_end\x18\x06 \x01(\x02H\x01\x88\x01\x01\x42\r\n\x0b_trim_startB\x0b\n\t_trim_end\"@\n\x0bJobResponse\x12\x0e\n\x06job_id\x18\x01 \x01(\t\x12\x10\n\x08\x61\x63\x63\x65pted\x18\x02 \x01(\x08\x12\x0f\n\x07message\x18\x03 \x01(\t\"!\n\x0fProgressRequest\x12\x0e\n\x06job_id\x18\x01 \x01(\t\"\x9c\x01\n\x0eProgressUpdate\x12\x0e\n\x06job_id\x18\x01 \x01(\t\x12\x10\n\x08progress\x18\x02 \x01(\x05\x12\x15\n\rcurrent_frame\x18\x03 \x01(\x05\x12\x14\n\x0c\x63urrent_time\x18\x04 \x01(\x02\x12\r\n\x05speed\x18\x05 \x01(\x02\x12\x0e\n\x06status\x18\x06 \x01(\t\x12\x12\n\x05\x65rror\x18\x07 \x01(\tH\x00\x88\x01\x01\x42\x08\n\x06_error\"\x1f\n\rCancelRequest\x12\x0e\n\x06job_id\x18\x01 \x01(\t\"D\n\x0e\x43\x61ncelResponse\x12\x0e\n\x06job_id\x18\x01 \x01(\t\x12\x11\n\tcancelled\x18\x02 \x01(\x08\x12\x0f\n\x07message\x18\x03 \x01(\t\"g\n\x0cWorkerStatus\x12\x11\n\tavailable\x18\x01 \x01(\x08\x12\x13\n\x0b\x61\x63tive_jobs\x18\x02 \x01(\x05\x12\x18\n\x10supported_codecs\x18\x03 \x03(\t\x12\x15\n\rgpu_available\x18\x04 \x01(\x08\"\x07\n\x05\x45mpty2\x9e\x02\n\rWorkerService\x12<\n\tSubmitJob\x12\x16.mpr.worker.JobRequest\x1a\x17.mpr.worker.JobResponse\x12K\n\x0eStreamProgress\x12\x1b.mpr.worker.ProgressRequest\x1a\x1a.mpr.worker.ProgressUpdate0\x01\x12\x42\n\tCancelJob\x12\x19.mpr.worker.CancelRequest\x1a\x1a.mpr.worker.CancelResponse\x12>\n\x0fGetWorkerStatus\x12\x11.mpr.worker.Empty\x1a\x18.mpr.worker.WorkerStatusb\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'worker_pb2', _globals)
if not _descriptor._USE_C_DESCRIPTORS:
DESCRIPTOR._loaded_options = None
_globals['_JOBREQUEST']._serialized_start=29
_globals['_JOBREQUEST']._serialized_end=196
_globals['_JOBRESPONSE']._serialized_start=198
_globals['_JOBRESPONSE']._serialized_end=262
_globals['_PROGRESSREQUEST']._serialized_start=264
_globals['_PROGRESSREQUEST']._serialized_end=297
_globals['_PROGRESSUPDATE']._serialized_start=300
_globals['_PROGRESSUPDATE']._serialized_end=456
_globals['_CANCELREQUEST']._serialized_start=458
_globals['_CANCELREQUEST']._serialized_end=489
_globals['_CANCELRESPONSE']._serialized_start=491
_globals['_CANCELRESPONSE']._serialized_end=559
_globals['_WORKERSTATUS']._serialized_start=561
_globals['_WORKERSTATUS']._serialized_end=664
_globals['_EMPTY']._serialized_start=666
_globals['_EMPTY']._serialized_end=673
_globals['_WORKERSERVICE']._serialized_start=676
_globals['_WORKERSERVICE']._serialized_end=962
# @@protoc_insertion_point(module_scope)

226
core/rpc/worker_pb2_grpc.py Normal file
View File

@@ -0,0 +1,226 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import warnings
from . import worker_pb2 as worker__pb2
GRPC_GENERATED_VERSION = '1.76.0'
GRPC_VERSION = grpc.__version__
_version_not_supported = False
try:
from grpc._utilities import first_version_is_lower
_version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
except ImportError:
_version_not_supported = True
if _version_not_supported:
raise RuntimeError(
f'The grpc package installed is at version {GRPC_VERSION},'
+ ' but the generated code in worker_pb2_grpc.py depends on'
+ f' grpcio>={GRPC_GENERATED_VERSION}.'
+ f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
+ f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
)
class WorkerServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.SubmitJob = channel.unary_unary(
'/mpr.worker.WorkerService/SubmitJob',
request_serializer=worker__pb2.JobRequest.SerializeToString,
response_deserializer=worker__pb2.JobResponse.FromString,
_registered_method=True)
self.StreamProgress = channel.unary_stream(
'/mpr.worker.WorkerService/StreamProgress',
request_serializer=worker__pb2.ProgressRequest.SerializeToString,
response_deserializer=worker__pb2.ProgressUpdate.FromString,
_registered_method=True)
self.CancelJob = channel.unary_unary(
'/mpr.worker.WorkerService/CancelJob',
request_serializer=worker__pb2.CancelRequest.SerializeToString,
response_deserializer=worker__pb2.CancelResponse.FromString,
_registered_method=True)
self.GetWorkerStatus = channel.unary_unary(
'/mpr.worker.WorkerService/GetWorkerStatus',
request_serializer=worker__pb2.Empty.SerializeToString,
response_deserializer=worker__pb2.WorkerStatus.FromString,
_registered_method=True)
class WorkerServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def SubmitJob(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def StreamProgress(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CancelJob(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetWorkerStatus(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_WorkerServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'SubmitJob': grpc.unary_unary_rpc_method_handler(
servicer.SubmitJob,
request_deserializer=worker__pb2.JobRequest.FromString,
response_serializer=worker__pb2.JobResponse.SerializeToString,
),
'StreamProgress': grpc.unary_stream_rpc_method_handler(
servicer.StreamProgress,
request_deserializer=worker__pb2.ProgressRequest.FromString,
response_serializer=worker__pb2.ProgressUpdate.SerializeToString,
),
'CancelJob': grpc.unary_unary_rpc_method_handler(
servicer.CancelJob,
request_deserializer=worker__pb2.CancelRequest.FromString,
response_serializer=worker__pb2.CancelResponse.SerializeToString,
),
'GetWorkerStatus': grpc.unary_unary_rpc_method_handler(
servicer.GetWorkerStatus,
request_deserializer=worker__pb2.Empty.FromString,
response_serializer=worker__pb2.WorkerStatus.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'mpr.worker.WorkerService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
server.add_registered_method_handlers('mpr.worker.WorkerService', rpc_method_handlers)
# This class is part of an EXPERIMENTAL API.
class WorkerService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def SubmitJob(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(
request,
target,
'/mpr.worker.WorkerService/SubmitJob',
worker__pb2.JobRequest.SerializeToString,
worker__pb2.JobResponse.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True)
@staticmethod
def StreamProgress(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(
request,
target,
'/mpr.worker.WorkerService/StreamProgress',
worker__pb2.ProgressRequest.SerializeToString,
worker__pb2.ProgressUpdate.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True)
@staticmethod
def CancelJob(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(
request,
target,
'/mpr.worker.WorkerService/CancelJob',
worker__pb2.CancelRequest.SerializeToString,
worker__pb2.CancelResponse.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True)
@staticmethod
def GetWorkerStatus(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(
request,
target,
'/mpr.worker.WorkerService/GetWorkerStatus',
worker__pb2.Empty.SerializeToString,
worker__pb2.WorkerStatus.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True)