fix race condition issue

put AIAvailabilityEnum.ENABLED to the end of model conversion
This commit is contained in:
Oleksandr Bezdieniezhnykh
2025-09-04 15:39:43 +03:00
parent be77a81875
commit 7d68f7faee
4 changed files with 7 additions and 4 deletions
@@ -49,7 +49,7 @@ public class InferenceClient : IInferenceClient
Arguments = $"-p {_inferenceClientConfig.ZeroMqPort} -lp {_loaderClientConfig.ZeroMqPort} -a {_inferenceClientConfig.ApiUrl}", Arguments = $"-p {_inferenceClientConfig.ZeroMqPort} -lp {_loaderClientConfig.ZeroMqPort} -a {_inferenceClientConfig.ApiUrl}",
CreateNoWindow = true CreateNoWindow = true
}; };
//process.Start(); process.Start();
} }
catch (Exception e) catch (Exception e)
{ {
+2 -1
View File
@@ -14,12 +14,13 @@ cdef class Inference:
cdef dict[str, list(Detection)] _tile_detections cdef dict[str, list(Detection)] _tile_detections
cdef AIRecognitionConfig ai_config cdef AIRecognitionConfig ai_config
cdef bint stop_signal cdef bint stop_signal
cdef AIAvailabilityStatus ai_availability_status cdef public AIAvailabilityStatus ai_availability_status
cdef str model_input cdef str model_input
cdef int model_width cdef int model_width
cdef int model_height cdef int model_height
cdef bytes _converted_model_bytes
cdef bytes get_onnx_engine_bytes(self) cdef bytes get_onnx_engine_bytes(self)
cdef convert_and_upload_model(self, bytes onnx_engine_bytes, str engine_filename) cdef convert_and_upload_model(self, bytes onnx_engine_bytes, str engine_filename)
cdef init_ai(self) cdef init_ai(self)
+1
View File
@@ -87,6 +87,7 @@ cdef class Inference:
self.ai_availability_status.set_status(AIAvailabilityEnum.WARNING, <str>f"Failed to upload converted model: {res.err}") self.ai_availability_status.set_status(AIAvailabilityEnum.WARNING, <str>f"Failed to upload converted model: {res.err}")
self._converted_model_bytes = model_bytes self._converted_model_bytes = model_bytes
self.ai_availability_status.set_status(AIAvailabilityEnum.ENABLED)
except Exception as e: except Exception as e:
self.ai_availability_status.set_status(AIAvailabilityEnum.ERROR, <str> str(e)) self.ai_availability_status.set_status(AIAvailabilityEnum.ERROR, <str> str(e))
self._converted_model_bytes = None self._converted_model_bytes = None
+3 -2
View File
@@ -21,10 +21,11 @@ cdef class CommandProcessor:
def __init__(self, int zmq_port, str loader_zmq_host, int loader_zmq_port, str api_url): def __init__(self, int zmq_port, str loader_zmq_host, int loader_zmq_port, str api_url):
self.remote_handler = RemoteCommandHandler(zmq_port, self.on_command) self.remote_handler = RemoteCommandHandler(zmq_port, self.on_command)
self.inference_queue = Queue(maxsize=constants_inf.QUEUE_MAXSIZE) self.inference_queue = Queue(maxsize=constants_inf.QUEUE_MAXSIZE)
self.remote_handler.start()
self.running = True
self.loader_client = LoaderClient(loader_zmq_host, loader_zmq_port) self.loader_client = LoaderClient(loader_zmq_host, loader_zmq_port)
self.inference = Inference(self.loader_client, self.remote_handler) self.inference = Inference(self.loader_client, self.remote_handler)
self.running = True
self.remote_handler.start()
def start(self): def start(self):
while self.running: while self.running: