mirror of
https://github.com/azaion/autopilot.git
synced 2026-04-22 08:26:33 +00:00
5f22931e0d
reorganised logs for inference output, remove some logs for more clear output
129 lines
4.2 KiB
C++
129 lines
4.2 KiB
C++
#include <QDebug>
|
|
#include <opencv2/highgui.hpp>
|
|
|
|
#include "aiengine.h"
|
|
#include "aiengineinference.h"
|
|
#include "aiengineimagesaver.h"
|
|
|
|
#if defined(OPI5_BUILD)
|
|
#include "src-opi5/aiengineinferenceopi5.h"
|
|
#elif defined(OPENCV_BUILD)
|
|
#include "src-opencv-onnx/aiengineinferenceopencvonnx.h"
|
|
#elif defined(NCNN_BUILD)
|
|
#include "src-ncnn/aiengineinferencencnn.h"
|
|
#else
|
|
#include "src-onnx-runtime/aiengineinferenceonnxruntime.h"
|
|
#endif
|
|
|
|
|
|
|
|
AiEngine::AiEngine(QString modelPath, QObject *parent)
|
|
: QObject{parent}
|
|
{
|
|
mRtspListener = new AiEngineRtspListener(this);
|
|
connect(mRtspListener, &AiEngineRtspListener::frameReceived, this, &AiEngine::frameReceivedSlot);
|
|
|
|
#if defined(OPI5_BUILD)
|
|
mInference = new AiEngineInferenceOpi5(modelPath);
|
|
mInference->initialize(0);
|
|
mInference2 = new AiEngineInferenceOpi5(modelPath);
|
|
mInference2->initialize(1);
|
|
mInference3 = new AiEngineInferenceOpi5(modelPath);
|
|
mInference3->initialize(2);
|
|
#elif defined(OPENCV_BUILD)
|
|
mInference = new AiEngineInferenceOpencvOnnx(modelPath);
|
|
#elif defined(NCNN_BUILD)
|
|
mInference = new AiEngineInferencevNcnn(modelPath);
|
|
#else
|
|
mInference = new AiEngineInferencevOnnxRuntime(modelPath);
|
|
#endif
|
|
|
|
QThread *inferenceThread = new QThread(this);
|
|
mInference->moveToThread(inferenceThread);
|
|
connect(mInference, &AiEngineInference::resultsReady, this, &AiEngine::inferenceResultsReceivedSlot, Qt::QueuedConnection);
|
|
connect(this, &AiEngine::inferenceFrame, mInference, &AiEngineInference::performInferenceSlot, Qt::QueuedConnection);
|
|
inferenceThread->start();
|
|
|
|
#ifdef OPI5_BUILD
|
|
QThread *inferenceThread2 = new QThread(this);
|
|
mInference2->moveToThread(inferenceThread2);
|
|
connect(mInference2, &AiEngineInference::resultsReady, this, &AiEngine::inferenceResultsReceivedSlot, Qt::QueuedConnection);
|
|
connect(this, &AiEngine::inferenceFrame2, mInference2, &AiEngineInference::performInferenceSlot, Qt::QueuedConnection);
|
|
inferenceThread2->start();
|
|
|
|
QThread *inferenceThread3 = new QThread(this);
|
|
mInference3->moveToThread(inferenceThread3);
|
|
connect(mInference3, &AiEngineInference::resultsReady, this, &AiEngine::inferenceResultsReceivedSlot, Qt::QueuedConnection);
|
|
connect(this, &AiEngine::inferenceFrame3, mInference3, &AiEngineInference::performInferenceSlot, Qt::QueuedConnection);
|
|
inferenceThread3->start();
|
|
#endif
|
|
|
|
#ifdef GIMBAL
|
|
mGimbalClient = new AiEngineGimbalClient(this);
|
|
#else
|
|
mGimbalClient = nullptr;
|
|
#endif
|
|
}
|
|
|
|
|
|
void AiEngine::start(void)
|
|
{
|
|
mRtspListener->startListening();
|
|
mElapsedTimer.start();
|
|
}
|
|
|
|
|
|
void AiEngine::stop(void)
|
|
{
|
|
mRtspListener->stopListening();
|
|
}
|
|
|
|
|
|
void AiEngine::inferenceResultsReceivedSlot(AiEngineInferenceResult result)
|
|
{
|
|
mFrameCounter++;
|
|
//qDebug() << "FPS = " << (mFrameCounter / (mElapsedTimer.elapsed()/1000.0f));
|
|
//qDebug() << "DEBUG. inference frame counter:" << mFrameCounter;
|
|
|
|
//qDebug() << "AiEngine got inference results in thread: " << QThread::currentThreadId();
|
|
if (mGimbalClient != nullptr) {
|
|
mGimbalClient->inferenceResultSlot(result);
|
|
}
|
|
|
|
cv::imshow("Received Frame", result.frame);
|
|
|
|
#ifdef SAVE_IMAGES
|
|
static int imageCounter = 0;
|
|
AiEngineImageSaver *saver = new AiEngineImageSaver(result.frame, ++imageCounter);
|
|
saver->start();
|
|
connect(saver, &AiEngineImageSaver::finished, saver, &QObject::deleteLater);
|
|
#endif
|
|
}
|
|
|
|
|
|
void AiEngine::frameReceivedSlot(cv::Mat frame)
|
|
{
|
|
//qDebug() << "AiEngine got frame from RTSP listener in thread: " << QThread::currentThreadId();
|
|
//cv::imshow("Received Frame", frame);
|
|
static int framecounter = 0;
|
|
//qDebug() << "DEBUG. RTSP frame counter:" << framecounter;
|
|
|
|
if (mInference->isActive() == false) {
|
|
//qDebug() << "AiEngine. Inference thread is free. Sending frame to it.";
|
|
emit inferenceFrame(frame);
|
|
framecounter++;
|
|
}
|
|
#ifdef OPI5_BUILD
|
|
else if (mInference2->isActive() == false) {
|
|
//qDebug() << "AiEngine. Inference thread is free. Sending frame to it.";
|
|
emit inferenceFrame2(frame);
|
|
framecounter++;
|
|
}
|
|
else if (mInference3->isActive() == false) {
|
|
//qDebug() << "AiEngine. Inference thread is free. Sending frame to it.";
|
|
emit inferenceFrame3(frame);
|
|
framecounter++;
|
|
}
|
|
#endif
|
|
}
|