Minor fixes to NCNN inference

- reduced logging
- emit also empty inference results to AiEngine
This commit is contained in:
Tuomas Järvinen
2024-10-24 18:57:49 +02:00
parent e3643ea622
commit de63892725
@@ -1,6 +1,5 @@
#include <QDebug> #include <QDebug>
#include <QThread> #include <QThread>
#include <iostream>
#include <vector> #include <vector>
#include "aiengineinferencencnn.h" #include "aiengineinferencencnn.h"
@@ -23,7 +22,7 @@ char* getCharPointerCopy(const QString& modelPath) {
AiEngineInferencevNcnn::AiEngineInferencevNcnn(QString modelPath, QObject *parent) : AiEngineInferencevNcnn::AiEngineInferencevNcnn(QString modelPath, QObject *parent) :
AiEngineInference{modelPath, parent} AiEngineInference{modelPath, parent}
{ {
qDebug() << "TUOMAS AiEngineInferencevNcnn() mModelPath=" << mModelPath; qDebug() << "AiEngineInferencevNcnn() mModelPath=" << mModelPath;
yolov8.opt.num_threads = 4; yolov8.opt.num_threads = 4;
yolov8.opt.use_vulkan_compute = false; yolov8.opt.use_vulkan_compute = false;
@@ -32,9 +31,6 @@ AiEngineInferencevNcnn::AiEngineInferencevNcnn(QString modelPath, QObject *paren
char *model = getCharPointerCopy(modelPath); char *model = getCharPointerCopy(modelPath);
char *param = getCharPointerCopy(paramPath); char *param = getCharPointerCopy(paramPath);
qDebug() << "model:" << model;
qDebug() << "param:" << param;
yolov8.load_param(param); yolov8.load_param(param);
yolov8.load_model(model); yolov8.load_model(model);
} }
@@ -229,8 +225,6 @@ int AiEngineInferencevNcnn::detect_yolov8(const cv::Mat& bgr, std::vector<Object
const float norm_vals[3] = {1 / 255.f, 1 / 255.f, 1 / 255.f}; const float norm_vals[3] = {1 / 255.f, 1 / 255.f, 1 / 255.f};
in_pad.substract_mean_normalize(0, norm_vals); in_pad.substract_mean_normalize(0, norm_vals);
auto start = std::chrono::high_resolution_clock::now();
ncnn::Extractor ex = yolov8.create_extractor(); ncnn::Extractor ex = yolov8.create_extractor();
ex.input("in0", in_pad); ex.input("in0", in_pad);
@@ -283,9 +277,6 @@ int AiEngineInferencevNcnn::detect_yolov8(const cv::Mat& bgr, std::vector<Object
objects[i].rect.height = y1 - y0; objects[i].rect.height = y1 - y0;
} }
auto end = std::chrono::high_resolution_clock::now();
auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(end - start);
std::cout << "Time taken: " << duration.count() << " milliseconds" << std::endl;
return 0; return 0;
} }
@@ -340,8 +331,7 @@ static cv::Mat draw_objects(const cv::Mat& bgr, const std::vector<Object>& objec
cv::Scalar cc(color[0], color[1], color[2]); cv::Scalar cc(color[0], color[1], color[2]);
fprintf(stderr, "%d = %.5f at %.2f %.2f %.2f x %.2f\n", obj.label, obj.prob, //fprintf(stderr, "%d = %.5f at %.2f %.2f %.2f x %.2f\n", obj.label, obj.prob, obj.rect.x, obj.rect.y, obj.rect.width, obj.rect.height);
obj.rect.x, obj.rect.y, obj.rect.width, obj.rect.height);
cv::rectangle(image, obj.rect, cc, 2); cv::rectangle(image, obj.rect, cc, 2);
@@ -378,7 +368,6 @@ void AiEngineInferencevNcnn::performInferenceSlot(cv::Mat frame)
std::vector<Object> objects; std::vector<Object> objects;
detect_yolov8(scaledImage, objects); detect_yolov8(scaledImage, objects);
if (objects.empty() == false) {
AiEngineInferenceResult result; AiEngineInferenceResult result;
result.frame = draw_objects(scaledImage, objects); result.frame = draw_objects(scaledImage, objects);
@@ -396,8 +385,6 @@ void AiEngineInferencevNcnn::performInferenceSlot(cv::Mat frame)
} }
emit resultsReady(result); emit resultsReady(result);
}
mActive = false; mActive = false;
} }