fix inference bug in loading model

This commit is contained in:
Alex Bezdieniezhnykh
2025-06-11 07:23:14 +03:00
parent f9815a0a3f
commit 904bc688ca
2 changed files with 9 additions and 5 deletions
+8 -4
View File
@@ -102,11 +102,15 @@ cdef class Inference:
time.sleep(1)
engine_filename = TensorRTEngine.get_engine_filename(0)
model_bytes = self.loader_client.load_big_small_resource(engine_filename, models_dir)
self.engine = TensorRTEngine(model_bytes)
res = self.loader_client.load_big_small_resource(engine_filename, models_dir)
if res.err is not None:
raise Exception(res.err)
self.engine = TensorRTEngine(res.data)
else:
model_bytes = self.loader_client.load_big_small_resource(constants.AI_ONNX_MODEL_FILE, models_dir)
self.engine = OnnxEngine(model_bytes)
res = self.loader_client.load_big_small_resource(constants.AI_ONNX_MODEL_FILE, models_dir)
if res.err is not None:
raise Exception(res.err)
self.engine = OnnxEngine(res.data)
self.model_height, self.model_width = self.engine.get_input_shape()