Files
autopilot/ai_controller/aiengine.cpp
T
Tuomas Järvinen e3643ea622 Small fixes to AI Controller
- removed QtSerialPort from the Qt CONFIG parameters
- remove compiler warnings
- reduced logging
- fixed FPS to show what AI really analyzed
- RTSP reader tries to connect to the stream once per second until it succeeds
2024-10-24 18:36:12 +02:00

124 lines
3.7 KiB
C++

#include <QDebug>
#include <opencv2/highgui.hpp>
#include "aiengine.h"
#include "aiengineinference.h"
#ifdef SAVE_IMAGES
#include "aiengineimagesaver.h"
#endif
#if defined(OPI5_BUILD)
#include "src-opi5/aiengineinferenceopi5.h"
#elif defined(OPENCV_BUILD)
#include "src-opencv-onnx/aiengineinferenceopencvonnx.h"
#elif defined(NCNN_BUILD)
#include "src-ncnn/aiengineinferencencnn.h"
#else
#include "src-onnx-runtime/aiengineinferenceonnxruntime.h"
#endif
AiEngine::AiEngine(QString modelPath, QObject *parent) :
QObject{parent},
mRtspFrameCounter(0),
mInferenceFrameCounter(0)
{
mRtspListener = new AiEngineRtspListener(this);
connect(mRtspListener, &AiEngineRtspListener::frameReceived, this, &AiEngine::frameReceivedSlot);
#if defined(OPI5_BUILD)
mInference = new AiEngineInferenceOpi5(modelPath);
mInference->initialize(0);
mInference2 = new AiEngineInferenceOpi5(modelPath);
mInference2->initialize(1);
mInference3 = new AiEngineInferenceOpi5(modelPath);
mInference3->initialize(2);
#elif defined(OPENCV_BUILD)
mInference = new AiEngineInferenceOpencvOnnx(modelPath);
#elif defined(NCNN_BUILD)
mInference = new AiEngineInferencevNcnn(modelPath);
#else
mInference = new AiEngineInferencevOnnxRuntime(modelPath);
#endif
QThread *inferenceThread = new QThread(this);
mInference->moveToThread(inferenceThread);
connect(mInference, &AiEngineInference::resultsReady, this, &AiEngine::inferenceResultsReceivedSlot, Qt::QueuedConnection);
connect(this, &AiEngine::inferenceFrame, mInference, &AiEngineInference::performInferenceSlot, Qt::QueuedConnection);
inferenceThread->start();
#ifdef OPI5_BUILD
QThread *inferenceThread2 = new QThread(this);
mInference2->moveToThread(inferenceThread2);
connect(mInference2, &AiEngineInference::resultsReady, this, &AiEngine::inferenceResultsReceivedSlot, Qt::QueuedConnection);
connect(this, &AiEngine::inferenceFrame2, mInference2, &AiEngineInference::performInferenceSlot, Qt::QueuedConnection);
inferenceThread2->start();
QThread *inferenceThread3 = new QThread(this);
mInference3->moveToThread(inferenceThread3);
connect(mInference3, &AiEngineInference::resultsReady, this, &AiEngine::inferenceResultsReceivedSlot, Qt::QueuedConnection);
connect(this, &AiEngine::inferenceFrame3, mInference3, &AiEngineInference::performInferenceSlot, Qt::QueuedConnection);
inferenceThread3->start();
#endif
#ifdef GIMBAL
mGimbalClient = new AiEngineGimbalClient(this);
#else
mGimbalClient = nullptr;
#endif
}
void AiEngine::start(void)
{
mRtspListener->startListening();
mRtspElapsedTimer.start();
}
void AiEngine::stop(void)
{
mRtspListener->stopListening();
}
void AiEngine::inferenceResultsReceivedSlot(AiEngineInferenceResult result)
{
mInferenceFrameCounter++;
float fps =mRtspElapsedTimer.elapsed() == 0 ? 0 : (mInferenceFrameCounter / (mRtspElapsedTimer.elapsed()/1000.0f));
printf("Analyzed %d/%d frames with AI. FPS=%.1f\n", mInferenceFrameCounter, mRtspFrameCounter, fps);
if (mGimbalClient != nullptr) {
mGimbalClient->inferenceResultSlot(result);
}
cv::imshow("AI Engine", result.frame);
#ifdef SAVE_IMAGES
static int imageCounter = 0;
AiEngineImageSaver *saver = new AiEngineImageSaver(result.frame, ++imageCounter);
saver->start();
connect(saver, &AiEngineImageSaver::finished, saver, &QObject::deleteLater);
#endif
}
void AiEngine::frameReceivedSlot(cv::Mat frame)
{
mRtspFrameCounter++;
if (mInference->isActive() == false) {
emit inferenceFrame(frame);
}
#ifdef OPI5_BUILD
else if (mInference2->isActive() == false) {
emit inferenceFrame2(frame);
}
else if (mInference3->isActive() == false) {
emit inferenceFrame3(frame);
}
#endif
}