write logs for inference and loader to file

This commit is contained in:
Alex Bezdieniezhnykh
2025-06-14 16:08:32 +03:00
parent 8aa2f563a4
commit 6f297c4ebf
30 changed files with 218 additions and 140 deletions
+3 -2
View File
@@ -1,5 +1,6 @@
from inference_engine cimport InferenceEngine
import onnxruntime as onnx
cimport constants
cdef class OnnxEngine(InferenceEngine):
def __init__(self, model_bytes: bytes, batch_size: int = 1, **kwargs):
@@ -10,9 +11,9 @@ cdef class OnnxEngine(InferenceEngine):
self.input_name = self.model_inputs[0].name
self.input_shape = self.model_inputs[0].shape
self.batch_size = self.input_shape[0] if self.input_shape[0] != -1 else batch_size
print(f'AI detection model input: {self.model_inputs} {self.input_shape}')
constants.log(f'AI detection model input: {self.model_inputs} {self.input_shape}')
model_meta = self.session.get_modelmeta()
print("Metadata:", model_meta.custom_metadata_map)
constants.log(f"Metadata: {model_meta.custom_metadata_map}")
cpdef tuple get_input_shape(self):
shape = self.input_shape