From 683f2d538b955100dd7ce4564ce0c0ba9c97160c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tuomas=20J=C3=A4rvinen?= Date: Wed, 10 Jul 2024 19:53:19 +0200 Subject: [PATCH] Chaneges to opi_rtsp test application - refactoring - can use normal YOLOv8 files converted to ONNX format - does not work with azaion ONNX files! --- tmp/opi_rtsp/aiengine.cpp | 4 ++-- tmp/opi_rtsp/opi_rtsp.pro | 6 +++--- .../aiengineinferenceopencvonnx.cpp} | 21 +++++-------------- .../aiengineinferenceopencvonnx.h} | 6 +++--- .../inference.cpp | 0 .../{src-onnx => src-opencv-onnx}/inference.h | 0 6 files changed, 13 insertions(+), 24 deletions(-) rename tmp/opi_rtsp/{src-onnx/aiengineinferenceonnx.cpp => src-opencv-onnx/aiengineinferenceopencvonnx.cpp} (75%) rename tmp/opi_rtsp/{src-onnx/aiengineinferenceonnx.h => src-opencv-onnx/aiengineinferenceopencvonnx.h} (54%) rename tmp/opi_rtsp/{src-onnx => src-opencv-onnx}/inference.cpp (100%) rename tmp/opi_rtsp/{src-onnx => src-opencv-onnx}/inference.h (100%) diff --git a/tmp/opi_rtsp/aiengine.cpp b/tmp/opi_rtsp/aiengine.cpp index 5c75e28..d184dd6 100644 --- a/tmp/opi_rtsp/aiengine.cpp +++ b/tmp/opi_rtsp/aiengine.cpp @@ -6,7 +6,7 @@ #ifdef OPI5_BUILD #include "src-opi5/aiengineinferenceopi5.h" #else -#include "src-onnx/aiengineinferenceonnx.h" +#include "src-opencv-onnx/aiengineinferenceopencvonnx.h" #endif AiEngine::AiEngine(QString modelPath, QObject *parent) @@ -18,7 +18,7 @@ AiEngine::AiEngine(QString modelPath, QObject *parent) #ifdef OPI5_BUILD mInference = new AiEngineInferenceOpi5(modelPath); #else - mInference = new AiEngineInferenceOnnx(modelPath); + mInference = new AiEngineInferenceOpencvOnnx(modelPath); #endif QThread *inferenceThread = new QThread(this); diff --git a/tmp/opi_rtsp/opi_rtsp.pro b/tmp/opi_rtsp/opi_rtsp.pro index 1b8318c..4bc6ce8 100644 --- a/tmp/opi_rtsp/opi_rtsp.pro +++ b/tmp/opi_rtsp/opi_rtsp.pro @@ -19,12 +19,12 @@ opi5 { HEADERS += $$PWD/src-opi5/*.h } else { message("ONNX build") - message("You must use YOLOv10 ONNX files") + message("You must use YOLOv8 ONNX files") QMAKE_CXXFLAGS += -DONNX_BUILD INCLUDEPATH += /opt/onnxruntime-linux-x64-1.18.0/include LIBS += /opt/onnxruntime-linux-x64-1.18.0/lib/libonnxruntime.so.1.18.0 QMAKE_LFLAGS += -Wl,-rpath,/opt/onnxruntime-linux-x64-1.18.0/lib QMAKE_LFLAGS += -Wl,-rpath,/usr/local/lib - SOURCES += $$PWD/src-onnx/*.cpp - HEADERS += $$PWD/src-onnx/*.h + SOURCES += $$PWD/src-opencv-onnx/*.cpp + HEADERS += $$PWD/src-opencv-onnx/*.h } diff --git a/tmp/opi_rtsp/src-onnx/aiengineinferenceonnx.cpp b/tmp/opi_rtsp/src-opencv-onnx/aiengineinferenceopencvonnx.cpp similarity index 75% rename from tmp/opi_rtsp/src-onnx/aiengineinferenceonnx.cpp rename to tmp/opi_rtsp/src-opencv-onnx/aiengineinferenceopencvonnx.cpp index 24d6c32..b5bf0c2 100644 --- a/tmp/opi_rtsp/src-onnx/aiengineinferenceonnx.cpp +++ b/tmp/opi_rtsp/src-opencv-onnx/aiengineinferenceopencvonnx.cpp @@ -1,13 +1,13 @@ #include #include -#include "aiengineinferenceonnx.h" +#include "aiengineinferenceopencvonnx.h" const int INFERENCE_SQUARE_WIDTH = 640; const int INFERENCE_SQUARE_HEIGHT = 640; -AiEngineInferenceOnnx::AiEngineInferenceOnnx(QString modelPath, QObject *parent) +AiEngineInferenceOpencvOnnx::AiEngineInferenceOpencvOnnx(QString modelPath, QObject *parent) : AiEngineInference{modelPath, parent}, mInference(modelPath.toStdString(), cv::Size(640, 640), "classes.txt") { @@ -53,7 +53,7 @@ cv::Mat resizeAndPad(const cv::Mat& src) } -void AiEngineInferenceOnnx::performInferenceSlot(cv::Mat frame) +void AiEngineInferenceOpencvOnnx::performInferenceSlot(cv::Mat frame) { try { //qDebug() << "performInferenceSlot() in thread: " << QThread::currentThreadId(); @@ -64,6 +64,8 @@ void AiEngineInferenceOnnx::performInferenceSlot(cv::Mat frame) std::vector detections = mInference.runInference(scaledImage); AiEngineInferenceResult result; + //qDebug() << "performInferenceSlot() found " << detections.size() << " objects"; + for (uint i = 0; i < detections.size(); ++i) { const Detection &detection = detections[i]; @@ -76,19 +78,6 @@ void AiEngineInferenceOnnx::performInferenceSlot(cv::Mat frame) object.rectangle.bottom = detection.box.y + detection.box.height; object.rectangle.right = detection.box.x + detection.box.width; result.objects.append(object); - - /* - // Draw box and text - cv::Rect box = detection.box; - cv::Scalar color = detection.color; - cv::rectangle(frame, box, color, 2); - std::string classString = detection.className + ' ' + std::to_string(detection.confidence).substr(0, 4); - //std::cout << "classString:" << classString << std::endl; - cv::Size textSize = cv::getTextSize(classString, cv::FONT_HERSHEY_DUPLEX, 1, 2, 0); - cv::Rect textBox(box.x, box.y - 40, textSize.width + 10, textSize.height + 20); - cv::rectangle(scaledImage, textBox, color, cv::FILLED); - cv::putText(scaledImage, classString, cv::Point(box.x + 5, box.y - 10), cv::FONT_HERSHEY_DUPLEX, 1, cv::Scalar(0, 0, 0), 2, 0); - */ } if (result.objects.empty() == false) { diff --git a/tmp/opi_rtsp/src-onnx/aiengineinferenceonnx.h b/tmp/opi_rtsp/src-opencv-onnx/aiengineinferenceopencvonnx.h similarity index 54% rename from tmp/opi_rtsp/src-onnx/aiengineinferenceonnx.h rename to tmp/opi_rtsp/src-opencv-onnx/aiengineinferenceopencvonnx.h index 19269f8..7c46666 100644 --- a/tmp/opi_rtsp/src-onnx/aiengineinferenceonnx.h +++ b/tmp/opi_rtsp/src-opencv-onnx/aiengineinferenceopencvonnx.h @@ -2,13 +2,13 @@ #include #include "aiengineinference.h" -#include "src-onnx/inference.h" +#include "src-opencv-onnx/inference.h" -class AiEngineInferenceOnnx : public AiEngineInference +class AiEngineInferenceOpencvOnnx : public AiEngineInference { Q_OBJECT public: - explicit AiEngineInferenceOnnx(QString modelPath, QObject *parent = nullptr); + explicit AiEngineInferenceOpencvOnnx(QString modelPath, QObject *parent = nullptr); public slots: void performInferenceSlot(cv::Mat frame) override; diff --git a/tmp/opi_rtsp/src-onnx/inference.cpp b/tmp/opi_rtsp/src-opencv-onnx/inference.cpp similarity index 100% rename from tmp/opi_rtsp/src-onnx/inference.cpp rename to tmp/opi_rtsp/src-opencv-onnx/inference.cpp diff --git a/tmp/opi_rtsp/src-onnx/inference.h b/tmp/opi_rtsp/src-opencv-onnx/inference.h similarity index 100% rename from tmp/opi_rtsp/src-onnx/inference.h rename to tmp/opi_rtsp/src-opencv-onnx/inference.h