Small fixes to AI Controller

- removed QtSerialPort from the Qt CONFIG parameters
- remove compiler warnings
- reduced logging
- fixed FPS to show what AI really analyzed
- RTSP reader tries to connect to the stream once per second until it succeeds
This commit is contained in:
Tuomas Järvinen
2024-10-24 18:36:12 +02:00
parent 45c19baa45
commit e3643ea622
5 changed files with 26 additions and 32 deletions
+1 -1
View File
@@ -1,4 +1,4 @@
QT += core network serialport QT += core network
QT -= gui QT -= gui
CONFIG += concurrent console c++17 CONFIG += concurrent console c++17
MOC_DIR = moc MOC_DIR = moc
+13 -18
View File
@@ -3,7 +3,9 @@
#include "aiengine.h" #include "aiengine.h"
#include "aiengineinference.h" #include "aiengineinference.h"
#ifdef SAVE_IMAGES
#include "aiengineimagesaver.h" #include "aiengineimagesaver.h"
#endif
#if defined(OPI5_BUILD) #if defined(OPI5_BUILD)
#include "src-opi5/aiengineinferenceopi5.h" #include "src-opi5/aiengineinferenceopi5.h"
@@ -17,8 +19,11 @@
AiEngine::AiEngine(QString modelPath, QObject *parent) AiEngine::AiEngine(QString modelPath, QObject *parent) :
: QObject{parent} QObject{parent},
mRtspFrameCounter(0),
mInferenceFrameCounter(0)
{ {
mRtspListener = new AiEngineRtspListener(this); mRtspListener = new AiEngineRtspListener(this);
connect(mRtspListener, &AiEngineRtspListener::frameReceived, this, &AiEngine::frameReceivedSlot); connect(mRtspListener, &AiEngineRtspListener::frameReceived, this, &AiEngine::frameReceivedSlot);
@@ -69,7 +74,7 @@ AiEngine::AiEngine(QString modelPath, QObject *parent)
void AiEngine::start(void) void AiEngine::start(void)
{ {
mRtspListener->startListening(); mRtspListener->startListening();
mElapsedTimer.start(); mRtspElapsedTimer.start();
} }
@@ -81,16 +86,15 @@ void AiEngine::stop(void)
void AiEngine::inferenceResultsReceivedSlot(AiEngineInferenceResult result) void AiEngine::inferenceResultsReceivedSlot(AiEngineInferenceResult result)
{ {
mFrameCounter++; mInferenceFrameCounter++;
qDebug() << "FPS = " << (mFrameCounter / (mElapsedTimer.elapsed()/1000.0f)); float fps =mRtspElapsedTimer.elapsed() == 0 ? 0 : (mInferenceFrameCounter / (mRtspElapsedTimer.elapsed()/1000.0f));
//qDebug() << "DEBUG. inference frame counter:" << mFrameCounter; printf("Analyzed %d/%d frames with AI. FPS=%.1f\n", mInferenceFrameCounter, mRtspFrameCounter, fps);
//qDebug() << "AiEngine got inference results in thread: " << QThread::currentThreadId();
if (mGimbalClient != nullptr) { if (mGimbalClient != nullptr) {
mGimbalClient->inferenceResultSlot(result); mGimbalClient->inferenceResultSlot(result);
} }
cv::imshow("Received Frame", result.frame); cv::imshow("AI Engine", result.frame);
#ifdef SAVE_IMAGES #ifdef SAVE_IMAGES
static int imageCounter = 0; static int imageCounter = 0;
@@ -103,26 +107,17 @@ void AiEngine::inferenceResultsReceivedSlot(AiEngineInferenceResult result)
void AiEngine::frameReceivedSlot(cv::Mat frame) void AiEngine::frameReceivedSlot(cv::Mat frame)
{ {
//qDebug() << "AiEngine got frame from RTSP listener in thread: " << QThread::currentThreadId(); mRtspFrameCounter++;
//cv::imshow("Received Frame", frame);
static int framecounter = 0;
//qDebug() << "DEBUG. RTSP frame counter:" << framecounter;
if (mInference->isActive() == false) { if (mInference->isActive() == false) {
//qDebug() << "AiEngine. Inference thread is free. Sending frame to it.";
emit inferenceFrame(frame); emit inferenceFrame(frame);
framecounter++;
} }
#ifdef OPI5_BUILD #ifdef OPI5_BUILD
else if (mInference2->isActive() == false) { else if (mInference2->isActive() == false) {
//qDebug() << "AiEngine. Inference thread is free. Sending frame to it.";
emit inferenceFrame2(frame); emit inferenceFrame2(frame);
framecounter++;
} }
else if (mInference3->isActive() == false) { else if (mInference3->isActive() == false) {
//qDebug() << "AiEngine. Inference thread is free. Sending frame to it.";
emit inferenceFrame3(frame); emit inferenceFrame3(frame);
framecounter++;
} }
#endif #endif
} }
+4 -3
View File
@@ -1,7 +1,7 @@
#pragma once #pragma once
#include <QObject>
#include <QElapsedTimer> #include <QElapsedTimer>
#include <QObject>
#include <opencv2/core.hpp> #include <opencv2/core.hpp>
#include <opencv2/videoio.hpp> #include <opencv2/videoio.hpp>
#include "aienginertsplistener.h" #include "aienginertsplistener.h"
@@ -26,8 +26,9 @@ signals:
void inferenceFrame3(cv::Mat frame); void inferenceFrame3(cv::Mat frame);
private: private:
QElapsedTimer mElapsedTimer; uint32_t mRtspFrameCounter;
uint32_t mFrameCounter = 0; uint32_t mInferenceFrameCounter;
QElapsedTimer mRtspElapsedTimer;
AiEngineRtspListener *mRtspListener; AiEngineRtspListener *mRtspListener;
AiEngineInference *mInference; AiEngineInference *mInference;
AiEngineInference *mInference2; AiEngineInference *mInference2;
+5 -1
View File
@@ -82,7 +82,11 @@ void AiEngineRtspListener::listenLoop(void)
#else #else
qDebug() << "AiEngineRtspListener loop running in thread: " << QThread::currentThreadId(); qDebug() << "AiEngineRtspListener loop running in thread: " << QThread::currentThreadId();
mCap.open(rtspVideoUrl.toStdString()); while (mCap.open(rtspVideoUrl.toStdString()) == false) {
qDebug() << "AiEngineRtspListener can't open video stream:" << rtspVideoUrl;
QThread::msleep(1000);
}
cv::Mat frame; cv::Mat frame;
while (mIsListening) { while (mIsListening) {
@@ -13,7 +13,7 @@ AiEngineInferencevOnnxRuntime::AiEngineInferencevOnnxRuntime(QString modelPath,
AiEngineInference{modelPath, parent}, AiEngineInference{modelPath, parent},
mPredictor(modelPath.toStdString(), confThreshold, iouThreshold, maskThreshold) mPredictor(modelPath.toStdString(), confThreshold, iouThreshold, maskThreshold)
{ {
qDebug() << "TUOMAS AiEngineInferencevOnnxRuntime() mModelPath=" << mModelPath; qDebug() << "AiEngineInferencevOnnxRuntime() mModelPath=" << mModelPath;
qDebug() << "AiEngineInferencevOnnxRuntime() mClassNames.size() =" << mClassNames.size(); qDebug() << "AiEngineInferencevOnnxRuntime() mClassNames.size() =" << mClassNames.size();
} }
@@ -49,7 +49,6 @@ cv::Mat AiEngineInferencevOnnxRuntime::drawLabels(const cv::Mat &image, const st
cv::Scalar(0, 0, 0), cv::Scalar(0, 0, 0),
1, 1,
cv::LINE_AA); cv::LINE_AA);
} }
return result; return result;
@@ -58,8 +57,6 @@ cv::Mat AiEngineInferencevOnnxRuntime::drawLabels(const cv::Mat &image, const st
void AiEngineInferencevOnnxRuntime::performInferenceSlot(cv::Mat frame) void AiEngineInferencevOnnxRuntime::performInferenceSlot(cv::Mat frame)
{ {
qDebug() << __PRETTY_FUNCTION__;
try { try {
mActive = true; mActive = true;
cv::Mat scaledImage = resizeAndPad(frame); cv::Mat scaledImage = resizeAndPad(frame);
@@ -106,11 +103,8 @@ void AiEngineInferencevOnnxRuntime::performInferenceSlot(cv::Mat frame)
result.objects.append(object); result.objects.append(object);
} }
if (result.objects.empty() == false) { result.frame = drawLabels(scaledImage, detections);
result.frame = drawLabels(scaledImage, detections); emit resultsReady(result);
emit resultsReady(result);
}
mActive = false; mActive = false;
} }
catch (const cv::Exception& e) { catch (const cv::Exception& e) {