Enables use of multiple TPUs in OPI5

This commit is contained in:
Tuomas Järvinen
2024-07-27 11:28:47 +03:00
parent 147213cec6
commit 7052a05d55
17 changed files with 336 additions and 65 deletions
+34 -1
View File
@@ -18,7 +18,12 @@ AiEngine::AiEngine(QString modelPath, QObject *parent)
connect(mRtspListener, &AiEngineRtspListener::frameReceived, this, &AiEngine::frameReceivedSlot);
#if defined(OPI5_BUILD)
mInference = new AiEngineInferenceOpi5(modelPath);
mInference = new AiEngineInferenceOpi5(modelPath);
mInference->initialize(0);
mInference2 = new AiEngineInferenceOpi5(modelPath);
mInference2->initialize(1);
mInference3 = new AiEngineInferenceOpi5(modelPath);
mInference3->initialize(2);
#elif defined(OPENCV_BUILD)
mInference = new AiEngineInferenceOpencvOnnx(modelPath);
#else
@@ -31,6 +36,20 @@ AiEngine::AiEngine(QString modelPath, QObject *parent)
connect(this, &AiEngine::inferenceFrame, mInference, &AiEngineInference::performInferenceSlot, Qt::QueuedConnection);
inferenceThread->start();
#ifdef OPI5_BUILD
QThread *inferenceThread2 = new QThread(this);
mInference2->moveToThread(inferenceThread2);
connect(mInference2, &AiEngineInference::resultsReady, this, &AiEngine::inferenceResultsReceivedSlot, Qt::QueuedConnection);
connect(this, &AiEngine::inferenceFrame2, mInference2, &AiEngineInference::performInferenceSlot, Qt::QueuedConnection);
inferenceThread2->start();
QThread *inferenceThread3 = new QThread(this);
mInference3->moveToThread(inferenceThread3);
connect(mInference3, &AiEngineInference::resultsReady, this, &AiEngine::inferenceResultsReceivedSlot, Qt::QueuedConnection);
connect(this, &AiEngine::inferenceFrame3, mInference3, &AiEngineInference::performInferenceSlot, Qt::QueuedConnection);
inferenceThread3->start();
#endif
mGimbalClient = new AiEngineGimbalClient(this);
}
@@ -38,6 +57,7 @@ AiEngine::AiEngine(QString modelPath, QObject *parent)
void AiEngine::start(void)
{
mRtspListener->startListening();
mElapsedTimer.start();
}
@@ -49,6 +69,9 @@ void AiEngine::stop(void)
void AiEngine::inferenceResultsReceivedSlot(AiEngineInferenceResult result)
{
mFrameCounter++;
qDebug() << "FPS = " << (mFrameCounter / (mElapsedTimer.elapsed()/1000.0f));
//qDebug() << "AiEngine got inference results in thread: " << QThread::currentThreadId();
mGimbalClient->inferenceResultSlot(result);
cv::imshow("Received Frame", result.frame);
@@ -64,4 +87,14 @@ void AiEngine::frameReceivedSlot(cv::Mat frame)
//qDebug() << "AiEngine. Inference thread is free. Sending frame to it.";
emit inferenceFrame(frame.clone());
}
#ifdef OPI5_BUILD
else if (mInference2->isActive() == false) {
//qDebug() << "AiEngine. Inference thread is free. Sending frame to it.";
emit inferenceFrame2(frame.clone());
}
else if (mInference3->isActive() == false) {
//qDebug() << "AiEngine. Inference thread is free. Sending frame to it.";
emit inferenceFrame3(frame.clone());
}
#endif
}