Small fixes to AI Controller

- removed QtSerialPort from the Qt CONFIG parameters
- remove compiler warnings
- reduced logging
- fixed FPS to show what AI really analyzed
- RTSP reader tries to connect to the stream once per second until it succeeds
This commit is contained in:
Tuomas Järvinen
2024-10-24 18:36:12 +02:00
parent 45c19baa45
commit e3643ea622
5 changed files with 26 additions and 32 deletions
@@ -13,7 +13,7 @@ AiEngineInferencevOnnxRuntime::AiEngineInferencevOnnxRuntime(QString modelPath,
AiEngineInference{modelPath, parent},
mPredictor(modelPath.toStdString(), confThreshold, iouThreshold, maskThreshold)
{
qDebug() << "TUOMAS AiEngineInferencevOnnxRuntime() mModelPath=" << mModelPath;
qDebug() << "AiEngineInferencevOnnxRuntime() mModelPath=" << mModelPath;
qDebug() << "AiEngineInferencevOnnxRuntime() mClassNames.size() =" << mClassNames.size();
}
@@ -49,7 +49,6 @@ cv::Mat AiEngineInferencevOnnxRuntime::drawLabels(const cv::Mat &image, const st
cv::Scalar(0, 0, 0),
1,
cv::LINE_AA);
}
return result;
@@ -58,8 +57,6 @@ cv::Mat AiEngineInferencevOnnxRuntime::drawLabels(const cv::Mat &image, const st
void AiEngineInferencevOnnxRuntime::performInferenceSlot(cv::Mat frame)
{
qDebug() << __PRETTY_FUNCTION__;
try {
mActive = true;
cv::Mat scaledImage = resizeAndPad(frame);
@@ -106,11 +103,8 @@ void AiEngineInferencevOnnxRuntime::performInferenceSlot(cv::Mat frame)
result.objects.append(object);
}
if (result.objects.empty() == false) {
result.frame = drawLabels(scaledImage, detections);
emit resultsReady(result);
}
result.frame = drawLabels(scaledImage, detections);
emit resultsReady(result);
mActive = false;
}
catch (const cv::Exception& e) {