- addedd NCNN model support to rtsp_ai_player

- added printing of inference FPS
- simple AI test bench which can be used to compare models
This commit is contained in:
Tuomas Järvinen
2024-10-02 19:15:49 +02:00
parent ef137fbc4b
commit d4779b1bb0
12 changed files with 555 additions and 763 deletions
+13
View File
@@ -1,5 +1,6 @@
#include <QDebug>
#include <opencv2/highgui.hpp>
#include "aiengine.h"
#include "aiengineinference.h"
#include "aiengineimagesaver.h"
@@ -8,10 +9,14 @@
#include "src-opi5/aiengineinferenceopi5.h"
#elif defined(OPENCV_BUILD)
#include "src-opencv-onnx/aiengineinferenceopencvonnx.h"
#elif defined(NCNN_BUILD)
#include "src-ncnn/aiengineinferencencnn.h"
#else
#include "src-onnx-runtime/aiengineinferenceonnxruntime.h"
#endif
AiEngine::AiEngine(QString modelPath, QObject *parent)
: QObject{parent}
{
@@ -27,6 +32,8 @@ AiEngine::AiEngine(QString modelPath, QObject *parent)
mInference3->initialize(2);
#elif defined(OPENCV_BUILD)
mInference = new AiEngineInferenceOpencvOnnx(modelPath);
#elif defined(NCNN_BUILD)
mInference = new AiEngineInferencevNcnn(modelPath);
#else
mInference = new AiEngineInferencevOnnxRuntime(modelPath);
#endif
@@ -76,6 +83,7 @@ void AiEngine::inferenceResultsReceivedSlot(AiEngineInferenceResult result)
{
mFrameCounter++;
qDebug() << "FPS = " << (mFrameCounter / (mElapsedTimer.elapsed()/1000.0f));
//qDebug() << "DEBUG. inference frame counter:" << mFrameCounter;
//qDebug() << "AiEngine got inference results in thread: " << QThread::currentThreadId();
if (mGimbalClient != nullptr) {
@@ -97,19 +105,24 @@ void AiEngine::frameReceivedSlot(cv::Mat frame)
{
//qDebug() << "AiEngine got frame from RTSP listener in thread: " << QThread::currentThreadId();
//cv::imshow("Received Frame", frame);
static int framecounter = 0;
//qDebug() << "DEBUG. RTSP frame counter:" << framecounter;
if (mInference->isActive() == false) {
//qDebug() << "AiEngine. Inference thread is free. Sending frame to it.";
emit inferenceFrame(frame);
framecounter++;
}
#ifdef OPI5_BUILD
else if (mInference2->isActive() == false) {
//qDebug() << "AiEngine. Inference thread is free. Sending frame to it.";
emit inferenceFrame2(frame);
framecounter++;
}
else if (mInference3->isActive() == false) {
//qDebug() << "AiEngine. Inference thread is free. Sending frame to it.";
emit inferenceFrame3(frame);
framecounter++;
}
#endif
}