Files
annotations/Azaion.Inference/main.pyx
T
Alex Bezdieniezhnykh 7750025631 separate load functionality from inference client to loader client. Call loader client from inference to get the model.
remove dummy dlls, remove resource loader from c#.

TODO: Load dlls separately by Loader UI and loader client

WIP
2025-06-06 20:04:03 +03:00

69 lines
2.8 KiB
Cython

import queue
import traceback
from queue import Queue
cimport constants
from threading import Thread
from annotation cimport Annotation
from inference cimport Inference
from loader_client cimport LoaderClient
from remote_command cimport RemoteCommand, CommandType
from remote_command_handler cimport RemoteCommandHandler
cdef class CommandProcessor:
cdef RemoteCommandHandler remote_handler
cdef object inference_queue
cdef bint running
cdef Inference inference
cdef LoaderClient loader_client
def __init__(self, int zmq_port, str loader_zmq_host, int loader_zmq_port, str api_url):
self.remote_handler = RemoteCommandHandler(zmq_port, self.on_command)
self.inference_queue = Queue(maxsize=constants.QUEUE_MAXSIZE)
self.remote_handler.start()
self.running = True
self.loader_client = LoaderClient(loader_zmq_host, loader_zmq_port)
#TODO: replace api_client to azaion_loader.exe call
self.inference = Inference(self.loader_client, self.on_annotation)
def start(self):
while self.running:
try:
command = self.inference_queue.get(timeout=0.5)
self.inference.run_inference(command)
end_inference_command = RemoteCommand(CommandType.INFERENCE_DATA, None, 'DONE')
self.remote_handler.send(command.client_id, end_inference_command.serialize())
except queue.Empty:
continue
except Exception as e:
traceback.print_exc()
print('EXIT!')
cdef on_command(self, RemoteCommand command):
try:
if command.command_type == CommandType.INFERENCE:
self.inference_queue.put(command)
elif command.command_type == CommandType.AI_AVAILABILITY_CHECK:
self.inference.build_tensor_engine(lambda status: self.remote_handler.send(
command.client_id, RemoteCommand(CommandType.AI_AVAILABILITY_RESULT, None, status).serialize()))
self.remote_handler.send(command.client_id, RemoteCommand(CommandType.AI_AVAILABILITY_RESULT, None, 'enabled').serialize())
elif command.command_type == CommandType.STOP_INFERENCE:
self.inference.stop()
elif command.command_type == CommandType.EXIT:
t = Thread(target=self.stop) # non-block worker:
t.start()
else:
pass
except Exception as e:
print(f"Error handling client: {e}")
cdef on_annotation(self, RemoteCommand cmd, Annotation annotation):
cdef RemoteCommand response = RemoteCommand(CommandType.INFERENCE_DATA, annotation.serialize())
self.remote_handler.send(cmd.client_id, response.serialize())
def stop(self):
self.inference.stop()
self.remote_handler.stop()
self.running = False