mirror of
https://github.com/azaion/annotations.git
synced 2026-04-22 15:06:29 +00:00
Errors sending to UI
notifying client of AI model conversion
This commit is contained in:
+23
-10
@@ -34,7 +34,8 @@ cdef class CommandProcessor:
|
||||
try:
|
||||
command = self.inference_queue.get(timeout=0.5)
|
||||
self.inference.run_inference(command)
|
||||
self.remote_handler.send(command.client_id, <bytes>'DONE'.encode('utf-8'))
|
||||
end_inference_command = RemoteCommand(CommandType.INFERENCE_DATA, None, 'DONE')
|
||||
self.remote_handler.send(command.client_id, end_inference_command.serialize())
|
||||
except queue.Empty:
|
||||
continue
|
||||
except Exception as e:
|
||||
@@ -44,11 +45,13 @@ cdef class CommandProcessor:
|
||||
cdef on_command(self, RemoteCommand command):
|
||||
try:
|
||||
if command.command_type == CommandType.LOGIN:
|
||||
self.login(command)
|
||||
self.api_client.set_credentials(Credentials.from_msgpack(command.data))
|
||||
elif command.command_type == CommandType.LOAD:
|
||||
self.load_file(command)
|
||||
elif command.command_type == CommandType.INFERENCE:
|
||||
self.inference_queue.put(command)
|
||||
elif command.command_type == CommandType.AI_AVAILABILITY_CHECK:
|
||||
self.build_tensor_engine(command.client_id)
|
||||
elif command.command_type == CommandType.STOP_INFERENCE:
|
||||
self.inference.stop()
|
||||
elif command.command_type == CommandType.EXIT:
|
||||
@@ -59,18 +62,28 @@ cdef class CommandProcessor:
|
||||
except Exception as e:
|
||||
print(f"Error handling client: {e}")
|
||||
|
||||
cdef login(self, RemoteCommand command):
|
||||
self.api_client.set_credentials(Credentials.from_msgpack(command.data))
|
||||
Thread(target=self.inference.build_tensor_engine).start() # build AI engine in non-blocking thread
|
||||
cdef build_tensor_engine(self, client_id):
|
||||
self.inference.build_tensor_engine(lambda status: self.build_tensor_status_updater(client_id, status))
|
||||
self.remote_handler.send(client_id, RemoteCommand(CommandType.AI_AVAILABILITY_RESULT, None, 'enabled').serialize())
|
||||
|
||||
cdef build_tensor_status_updater(self, bytes client_id, str status):
|
||||
self.remote_handler.send(client_id, RemoteCommand(CommandType.AI_AVAILABILITY_RESULT, None, status).serialize())
|
||||
|
||||
cdef load_file(self, RemoteCommand command):
|
||||
cdef FileData file_data = FileData.from_msgpack(command.data)
|
||||
response = self.api_client.load_bytes(file_data.filename, file_data.folder)
|
||||
self.remote_handler.send(command.client_id, response)
|
||||
cdef RemoteCommand response
|
||||
cdef FileData file_data
|
||||
cdef bytes file_bytes
|
||||
try:
|
||||
file_data = FileData.from_msgpack(command.data)
|
||||
file_bytes = self.api_client.load_bytes(file_data.filename, file_data.folder)
|
||||
response = RemoteCommand(CommandType.DATA_BYTES, file_bytes)
|
||||
except Exception as e:
|
||||
response = RemoteCommand(CommandType.DATA_BYTES, None, str(e))
|
||||
self.remote_handler.send(command.client_id, response.serialize())
|
||||
|
||||
cdef on_annotation(self, RemoteCommand cmd, Annotation annotation):
|
||||
data = annotation.serialize()
|
||||
self.remote_handler.send(cmd.client_id, data)
|
||||
cdef RemoteCommand response = RemoteCommand(CommandType.INFERENCE_DATA, annotation.serialize())
|
||||
self.remote_handler.send(cmd.client_id, response.serialize())
|
||||
|
||||
def stop(self):
|
||||
self.inference.stop()
|
||||
|
||||
Reference in New Issue
Block a user