Fixed PC build to work with ONNX Yolov10 files

This commit is contained in:
Tuomas Järvinen
2024-07-09 20:34:21 +02:00
parent c4af0cc461
commit 607ac22b46
3 changed files with 24 additions and 11 deletions
+3 -4
View File
@@ -47,12 +47,11 @@ void AiEngine::inferenceResultsReceivedSlot(AiEngineInferenceResult result)
{ {
//qDebug() << "AiEngine got inference results in thread: " << QThread::currentThreadId(); //qDebug() << "AiEngine got inference results in thread: " << QThread::currentThreadId();
#ifdef OPI5_BUILD
mGimbalControl->inferenceResultSlot(result); mGimbalControl->inferenceResultSlot(result);
cv::imshow("Received Frame", result.frame); #endif
//#ifndef OPI5_BUILD cv::imshow("Received Frame", result.frame);
//cv::imshow("Received Frame", result.frame);
//#endif
} }
+4 -3
View File
@@ -5,16 +5,17 @@ PKGCONFIG += opencv4
MOC_DIR = moc MOC_DIR = moc
OBJECTS_DIR = obj OBJECTS_DIR = obj
SOURCES = $$PWD/*.cpp SOURCES = $$PWD/*.cpp $$PWD/../../misc/camera/a8_remote/remoteControl.cpp
HEADERS = $$PWD/*.h HEADERS = $$PWD/*.h
INCLUDEPATH += $$PWD/../../misc/camera/a8_remote
opi5 { opi5 {
message("OPI5 build") message("OPI5 build")
PKGCONFIG += opencv4 librga stb libturbojpeg PKGCONFIG += opencv4 librga stb libturbojpeg
INCLUDEPATH += /usr/include/rga $$PWD/../../misc/camera/a8_remote INCLUDEPATH += /usr/include/rga
QMAKE_CXXFLAGS += -DOPI5_BUILD QMAKE_CXXFLAGS += -DOPI5_BUILD
LIBS += /usr/local/lib/librknnrt.so LIBS += /usr/local/lib/librknnrt.so
SOURCES += $$PWD/src-opi5/*.c $$PWD/src-opi5/*.cpp $$PWD/src-opi5/*.cc $$PWD/../../misc/camera/a8_remote/remoteControl.cpp SOURCES += $$PWD/src-opi5/*.c $$PWD/src-opi5/*.cpp $$PWD/src-opi5/*.cc
HEADERS += $$PWD/src-opi5/*.h HEADERS += $$PWD/src-opi5/*.h
} else { } else {
message("ONNX build") message("ONNX build")
@@ -2,17 +2,18 @@
#include <QThread> #include <QThread>
#include "aiengineinferenceonnx.h" #include "aiengineinferenceonnx.h"
AiEngineInferenceOnnx::AiEngineInferenceOnnx(QString modelPath, QObject *parent) AiEngineInferenceOnnx::AiEngineInferenceOnnx(QString modelPath, QObject *parent)
: AiEngineInference{modelPath, parent} : AiEngineInference{modelPath, parent}
{ {
qDebug() << "TUOMAS test mModelPath=" << mModelPath; //qDebug() << "TUOMAS test mModelPath=" << mModelPath;
mEngine = new InferenceEngine(modelPath.toStdString()); mEngine = new InferenceEngine(modelPath.toStdString());
} }
void AiEngineInferenceOnnx::performInferenceSlot(cv::Mat frame) void AiEngineInferenceOnnx::performInferenceSlot(cv::Mat frame)
{ {
qDebug() << "performInferenceSlot() in thread: " << QThread::currentThreadId(); //qDebug() << "performInferenceSlot() in thread: " << QThread::currentThreadId();
mActive = true; mActive = true;
@@ -20,12 +21,24 @@ void AiEngineInferenceOnnx::performInferenceSlot(cv::Mat frame)
int orig_height = frame.rows; int orig_height = frame.rows;
std::vector<float> input_tensor_values = mEngine->preprocessImage(frame); std::vector<float> input_tensor_values = mEngine->preprocessImage(frame);
std::vector<float> results = mEngine->runInference(input_tensor_values); std::vector<float> results = mEngine->runInference(input_tensor_values);
float confidence_threshold = 0.5; float confidence_threshold = 0.4;
std::vector<Detection> detections = mEngine->filterDetections(results, confidence_threshold, mEngine->input_shape[2], mEngine->input_shape[3], orig_width, orig_height); std::vector<Detection> detections = mEngine->filterDetections(results, confidence_threshold, mEngine->input_shape[2], mEngine->input_shape[3], orig_width, orig_height);
AiEngineInferenceResult result; AiEngineInferenceResult result;
for (uint32_t i = 0; i < detections.size(); i++) {
const Detection &detection = detections[i];
AiEngineObject object;
object.classId = detection.class_id;
object.propability = detection.confidence;
object.rectangle.top = detection.bbox.y;
object.rectangle.left = detection.bbox.x;
object.rectangle.bottom = detection.bbox.y + detection.bbox.height;
object.rectangle.right = detection.bbox.x + detection.bbox.width;
result.objects.append(object);
}
result.frame = mEngine->draw_labels(frame.clone(), detections); result.frame = mEngine->draw_labels(frame.clone(), detections);
result.objects = 1;
emit resultsReady(result); emit resultsReady(result);
mActive = false; mActive = false;