import queue import traceback from queue import Queue cimport constants from threading import Thread import yaml from api_client cimport ApiClient from annotation cimport Annotation from inference cimport Inference from remote_command cimport RemoteCommand, CommandType from remote_command_handler cimport RemoteCommandHandler from credentials cimport Credentials from file_data cimport FileData cdef class CommandProcessor: cdef ApiClient api_client cdef RemoteCommandHandler remote_handler cdef object inference_queue cdef bint running cdef Inference inference def __init__(self, int zmq_port, str api_url): self.api_client = ApiClient(api_url) self.remote_handler = RemoteCommandHandler(zmq_port, self.on_command) self.inference_queue = Queue(maxsize=constants.QUEUE_MAXSIZE) self.remote_handler.start() self.running = True self.inference = Inference(self.api_client, self.on_annotation) def start(self): while self.running: try: command = self.inference_queue.get(timeout=0.5) self.inference.run_inference(command) end_inference_command = RemoteCommand(CommandType.INFERENCE_DATA, None, 'DONE') self.remote_handler.send(command.client_id, end_inference_command.serialize()) except queue.Empty: continue except Exception as e: traceback.print_exc() print('EXIT!') cdef on_command(self, RemoteCommand command): try: if command.command_type == CommandType.LOGIN: self.api_client.set_credentials(Credentials.from_msgpack(command.data)) elif command.command_type == CommandType.LOAD: self.load_file(command) elif command.command_type == CommandType.INFERENCE: self.inference_queue.put(command) elif command.command_type == CommandType.AI_AVAILABILITY_CHECK: self.build_tensor_engine(command.client_id) elif command.command_type == CommandType.STOP_INFERENCE: self.inference.stop() elif command.command_type == CommandType.EXIT: t = Thread(target=self.stop) # non-block worker: t.start() else: pass except Exception as e: print(f"Error handling client: {e}") cdef build_tensor_engine(self, client_id): self.inference.build_tensor_engine(lambda status: self.build_tensor_status_updater(client_id, status)) self.remote_handler.send(client_id, RemoteCommand(CommandType.AI_AVAILABILITY_RESULT, None, 'enabled').serialize()) cdef build_tensor_status_updater(self, bytes client_id, str status): self.remote_handler.send(client_id, RemoteCommand(CommandType.AI_AVAILABILITY_RESULT, None, status).serialize()) cdef load_file(self, RemoteCommand command): cdef RemoteCommand response cdef FileData file_data cdef bytes file_bytes try: file_data = FileData.from_msgpack(command.data) file_bytes = self.api_client.load_bytes(file_data.filename, file_data.folder) response = RemoteCommand(CommandType.DATA_BYTES, file_bytes) except Exception as e: response = RemoteCommand(CommandType.DATA_BYTES, None, str(e)) self.remote_handler.send(command.client_id, response.serialize()) cdef on_annotation(self, RemoteCommand cmd, Annotation annotation): cdef RemoteCommand response = RemoteCommand(CommandType.INFERENCE_DATA, annotation.serialize()) self.remote_handler.send(cmd.client_id, response.serialize()) def stop(self): self.inference.stop() self.remote_handler.stop() self.running = False