1 Commits

Author SHA1 Message Date
Alex Bezdieniezhnykh 5f22931e0d use INFERENCE_SQUARE_WIDTH and INFERENCE_SQUARE_HEIGHT defined in aiengineinference.h for resizing in opi5 (set them to 1280)
reorganised logs for inference output, remove some logs for more clear output
2024-10-25 17:05:16 +03:00
25 changed files with 313 additions and 607 deletions
-7
View File
@@ -32,13 +32,6 @@ wget https://github.com/mavlink/MAVSDK/releases/download/v2.12.10/libmavsdk-dev_
sudo dpkg -i libmavsdk-dev_2.12.10_ubuntu22.04_amd64.deb
```
## Install MAVSDK for Ubuntu 24.04 (libmavsdk-dev3.0.0)
```bash
wget https://github.com/mavlink/MAVSDK/releases/download/v3.0.0/libmavsdk-dev_3.0.0_ubuntu24.04_amd64.deb
sudo dpkg -i libmavsdk-dev_3.0.0_ubuntu24.04_amd64.deb
```
## Install ONNX Runtime for Ubuntu (not required for embedded platforms)
### With GPU inference
+1 -1
View File
@@ -1,4 +1,4 @@
QT += core network
QT += core network serialport
QT -= gui
CONFIG += concurrent console c++17
MOC_DIR = moc
+18 -13
View File
@@ -3,9 +3,7 @@
#include "aiengine.h"
#include "aiengineinference.h"
#ifdef SAVE_IMAGES
#include "aiengineimagesaver.h"
#endif
#if defined(OPI5_BUILD)
#include "src-opi5/aiengineinferenceopi5.h"
@@ -19,11 +17,8 @@
AiEngine::AiEngine(QString modelPath, QObject *parent) :
QObject{parent},
mRtspFrameCounter(0),
mInferenceFrameCounter(0)
AiEngine::AiEngine(QString modelPath, QObject *parent)
: QObject{parent}
{
mRtspListener = new AiEngineRtspListener(this);
connect(mRtspListener, &AiEngineRtspListener::frameReceived, this, &AiEngine::frameReceivedSlot);
@@ -74,7 +69,7 @@ AiEngine::AiEngine(QString modelPath, QObject *parent) :
void AiEngine::start(void)
{
mRtspListener->startListening();
mRtspElapsedTimer.start();
mElapsedTimer.start();
}
@@ -86,15 +81,16 @@ void AiEngine::stop(void)
void AiEngine::inferenceResultsReceivedSlot(AiEngineInferenceResult result)
{
mInferenceFrameCounter++;
float fps =mRtspElapsedTimer.elapsed() == 0 ? 0 : (mInferenceFrameCounter / (mRtspElapsedTimer.elapsed()/1000.0f));
printf("Analyzed %d/%d frames with AI. FPS=%.1f\n", mInferenceFrameCounter, mRtspFrameCounter, fps);
mFrameCounter++;
//qDebug() << "FPS = " << (mFrameCounter / (mElapsedTimer.elapsed()/1000.0f));
//qDebug() << "DEBUG. inference frame counter:" << mFrameCounter;
//qDebug() << "AiEngine got inference results in thread: " << QThread::currentThreadId();
if (mGimbalClient != nullptr) {
mGimbalClient->inferenceResultSlot(result);
}
cv::imshow("AI Engine", result.frame);
cv::imshow("Received Frame", result.frame);
#ifdef SAVE_IMAGES
static int imageCounter = 0;
@@ -107,17 +103,26 @@ void AiEngine::inferenceResultsReceivedSlot(AiEngineInferenceResult result)
void AiEngine::frameReceivedSlot(cv::Mat frame)
{
mRtspFrameCounter++;
//qDebug() << "AiEngine got frame from RTSP listener in thread: " << QThread::currentThreadId();
//cv::imshow("Received Frame", frame);
static int framecounter = 0;
//qDebug() << "DEBUG. RTSP frame counter:" << framecounter;
if (mInference->isActive() == false) {
//qDebug() << "AiEngine. Inference thread is free. Sending frame to it.";
emit inferenceFrame(frame);
framecounter++;
}
#ifdef OPI5_BUILD
else if (mInference2->isActive() == false) {
//qDebug() << "AiEngine. Inference thread is free. Sending frame to it.";
emit inferenceFrame2(frame);
framecounter++;
}
else if (mInference3->isActive() == false) {
//qDebug() << "AiEngine. Inference thread is free. Sending frame to it.";
emit inferenceFrame3(frame);
framecounter++;
}
#endif
}
+3 -4
View File
@@ -1,7 +1,7 @@
#pragma once
#include <QElapsedTimer>
#include <QObject>
#include <QElapsedTimer>
#include <opencv2/core.hpp>
#include <opencv2/videoio.hpp>
#include "aienginertsplistener.h"
@@ -26,9 +26,8 @@ signals:
void inferenceFrame3(cv::Mat frame);
private:
uint32_t mRtspFrameCounter;
uint32_t mInferenceFrameCounter;
QElapsedTimer mRtspElapsedTimer;
QElapsedTimer mElapsedTimer;
uint32_t mFrameCounter = 0;
AiEngineRtspListener *mRtspListener;
AiEngineInference *mInference;
AiEngineInference *mInference2;
+5 -65
View File
@@ -11,12 +11,10 @@ AiEngineGimbalServer::AiEngineGimbalServer(QObject *parent)
mIsAvailable = false;
qDebug() << "Initial is available: " << mIsAvailable;
// Commented out, might be required later
// Making camera available is currently do in processUdpCommands() with command bringCameraDown
//QTimer::singleShot(5000, this, [this]() {
// mIsAvailable = true;
// qDebug() << "Initial is available: " << mIsAvailable;
//});
QTimer::singleShot(5000, this, [this]() {
mIsAvailable = true;
qDebug() << "Initial is available: " << mIsAvailable;
});
mActions.setup(&mUdpSocket, &mUdpCommand, &mUdpResponse, &mGimbalStatus);
@@ -28,13 +26,6 @@ AiEngineGimbalServer::AiEngineGimbalServer(QObject *parent)
mDronePosition.yaw = 90.0;
connect(&mActions, &AiEngineGimbalServerActions::aiTargetZoomed, this, &AiEngineGimbalServer::aiTargetZoomed);
// Create and bind the new UDP socket for receiving commands
mReceiveUdpSocket = new QUdpSocket(this);
mReceiveUdpSocket->bind(QHostAddress::LocalHost, 45454);
// Connect the socket to handle incoming messages
connect(mReceiveUdpSocket, &QUdpSocket::readyRead, this, &AiEngineGimbalServer::processUdpCommands);
}
@@ -92,10 +83,7 @@ void AiEngineGimbalServer::zoomToAiTargetSlot(AiEngineCameraTarget target)
// Allow calls
int delay4 = delay3 + 3000; // Adjust this value as needed
QTimer::singleShot(delay4, this, [this]() {
// Only make camera available for commands if it is allowed
if (mActions.getAllowCameraCommands() == true) {
mIsAvailable = true;
}
mIsAvailable = true;
qDebug() << "Is available: " << mIsAvailable;
});
}
@@ -113,51 +101,3 @@ void AiEngineGimbalServer::cameraPositionSlot(AiEngineCameraPosition position)
{
qDebug() << "AiEngineGimbalServer::cameraPositionSlot() Move camera to:" << position.pitch << position.yaw << "zoom:" << position.zoom;
}
void AiEngineGimbalServer::processUdpCommands(void) {
while (mReceiveUdpSocket->hasPendingDatagrams()) {
QByteArray datagram;
datagram.resize(mReceiveUdpSocket->pendingDatagramSize());
mReceiveUdpSocket->readDatagram(datagram.data(), datagram.size());
QString command = QString::fromUtf8(datagram);
qDebug() << "Received command:" << command;
if (command == "bringCameraDown") {
mActions.setAllowCameraCommands(true); // This will allow other camera commands
mActions.goToInitialOrientation();
// 10 second delay to let camera get ready for AI
QTimer::singleShot(10000, this, [this]() {
mIsAvailable = true;
qDebug() << "Camera set available and to initial position";
});
}
else if (command == "bringCameraUp") {
// No delay, set camera unavailable
mActions.setAllowCameraCommands(false); // This will prevent any ongoing commands
mIsAvailable = false;
mActions.goToInitialOrientation();
qDebug() << "Camera set unavailable and to initial position";
}
}
/*
// How to call this:
#include <QUdpSocket>
void sendCommand(const QString &command) {
QUdpSocket udpSocket;
QByteArray datagram = command.toUtf8();
udpSocket.writeDatagram(datagram, QHostAddress::LocalHost, 45454);
}
int main() {
sendCommand("bringCameraDown");
sendCommand("bringCameraUp");
return 0;
}
*/
}
-5
View File
@@ -2,7 +2,6 @@
#include <QObject>
#include <QMap>
#include <QUdpSocket>
#include "aienginedefinitions.h"
#include "aienginegimbalserveractions.h"
#include "aienginegimbalserverudpcommand.h"
@@ -35,8 +34,4 @@ private:
AiEngineGimbalServerUDPResponse mUdpResponse;
AiEngineGimbalServerActions mActions;
bool mIsAvailable;
QUdpSocket mReceiveUdpSocket; // UDP socket for receiving commands
private slots:
void processPendingDatagrams(void); // Handles incoming UDP messages
};
+70 -98
View File
@@ -17,7 +17,6 @@ void AiEngineGimbalServerActions::setup(AiEngineGimbalServerUDP *udpSocket, AiEn
mUdpCommand = udpCommand;
mUdpResponse = udpResponse;
mGimbalStatus = gimbalStatus;
mAllowCameraCommands = false;
// Set initial position and update status
QByteArray tempCommand;
@@ -69,8 +68,24 @@ void AiEngineGimbalServerActions::setup(AiEngineGimbalServerUDP *udpSocket, AiEn
float maxZoom = responseValues["zoom"].toFloat();
mGimbalStatus->maxZoom = maxZoom > 10 ? 10 : maxZoom;
// Go to initial orientation and zoom
goToInitialOrientation();
// Go to initial orientation
tempCommand = mUdpCommand->getCommand(UDP_COMMAND_ID::TURN_TO_DEGREES);
int16_t degreesVal = AI_ENGINE_GIMBAL_INITIAL_YAW * 10;
tempCommand[8] = degreesVal & 0xFF;
tempCommand[9] = degreesVal >> 8;
degreesVal = AI_ENGINE_GIMBAL_INITIAL_PITCH * 10;
tempCommand[10] = degreesVal & 0xFF;
tempCommand[11] = degreesVal >> 8;
mUdpSocket->sendCommand(tempCommand);
// Go to initial zoom
tempCommand = mUdpCommand->getCommand(UDP_COMMAND_ID::ZOOM_TO_X);
uint8_t integerPart = static_cast<uint8_t>(AI_ENGINE_CAMERA_INITIAL_ZOOM);
float fractionalPart = AI_ENGINE_CAMERA_INITIAL_ZOOM - integerPart;
uint8_t scaledFractional = uint8_t(fractionalPart * 10);
tempCommand[8] = integerPart;
tempCommand[9] = scaledFractional;
mUdpSocket->sendCommand(tempCommand);
mGimbalStatus->currentPitch = AI_ENGINE_GIMBAL_INITIAL_PITCH;
mGimbalStatus->currentRoll = AI_ENGINE_GIMBAL_INITIAL_ROLL;
@@ -163,56 +178,52 @@ void AiEngineGimbalServerActions::getAnglesToOnScreenTarget(int targetX, int tar
void AiEngineGimbalServerActions::turnToTarget(AiEngineRectangleProperties rectangle)
{
if (mAllowCameraCommands == true) {
qDebug().noquote().nospace() << "Turning to target";
qDebug().noquote().nospace() << "Turning to target";
float resultYaw = 0.0f;
float resultPitch = 0.0f;
getAnglesToOnScreenTarget(rectangle.middleX, rectangle.middleY, resultYaw, resultPitch);
float resultYaw = 0.0f;
float resultPitch = 0.0f;
getAnglesToOnScreenTarget(rectangle.middleX, rectangle.middleY, resultYaw, resultPitch);
QByteArray serialCommandTurn = mUdpCommand->getCommand(UDP_COMMAND_ID::TURN_TO_PIXEL);
QByteArray serialCommandTurn = mUdpCommand->getCommand(UDP_COMMAND_ID::TURN_TO_PIXEL);
int16_t degreesVal = resultYaw * 10;
serialCommandTurn[8] = degreesVal & 0xFF;
serialCommandTurn[9] = degreesVal >> 8;
int16_t degreesVal = resultYaw * 10;
serialCommandTurn[8] = degreesVal & 0xFF;
serialCommandTurn[9] = degreesVal >> 8;
degreesVal = resultPitch * 10;
serialCommandTurn[10] = degreesVal & 0xFF;
serialCommandTurn[11] = degreesVal >> 8;
degreesVal = resultPitch * 10;
serialCommandTurn[10] = degreesVal & 0xFF;
serialCommandTurn[11] = degreesVal >> 8;
mUdpSocket->sendCommand(serialCommandTurn);
}
mUdpSocket->sendCommand(serialCommandTurn);
}
void AiEngineGimbalServerActions::zoomToTarget(AiEngineRectangleProperties rectangle)
{
if (mAllowCameraCommands == true) {
qDebug().noquote().nospace() << "Zooming to target";
qDebug().noquote().nospace() << "Zooming to target";
float fillRatio = 0.5;
float targetPixelWidth = rectangle.width;
float targetPixelHeight = rectangle.height;
float adjustedObjectWidth = targetPixelWidth / fillRatio;
float adjustedObjectHeight = targetPixelHeight / fillRatio;
float zoomWidth = static_cast<double>(mGimbalStatus->resolutionX) / adjustedObjectWidth;
float zoomHeight = static_cast<double>(mGimbalStatus->resolutionY) / adjustedObjectHeight;
float zoom = std::min(zoomWidth, zoomHeight);
if (zoom < 1.0f) {
zoom = 1.0f;
}
QByteArray serialCommandNewZoom = mUdpCommand->getCommand(UDP_COMMAND_ID::ZOOM_TO_X);
uint8_t integerPart = static_cast<uint8_t>(zoom);
float fractionalPart = zoom - integerPart;
uint8_t scaledFractional = uint8_t(fractionalPart * 10);
serialCommandNewZoom[8] = integerPart;
serialCommandNewZoom[9] = scaledFractional;
mUdpSocket->sendCommand(serialCommandNewZoom);
float fillRatio = 0.5;
float targetPixelWidth = rectangle.width;
float targetPixelHeight = rectangle.height;
float adjustedObjectWidth = targetPixelWidth / fillRatio;
float adjustedObjectHeight = targetPixelHeight / fillRatio;
float zoomWidth = static_cast<double>(mGimbalStatus->resolutionX) / adjustedObjectWidth;
float zoomHeight = static_cast<double>(mGimbalStatus->resolutionY) / adjustedObjectHeight;
float zoom = std::min(zoomWidth, zoomHeight);
if (zoom < 1.0f) {
zoom = 1.0f;
}
QByteArray serialCommandNewZoom = mUdpCommand->getCommand(UDP_COMMAND_ID::ZOOM_TO_X);
uint8_t integerPart = static_cast<uint8_t>(zoom);
float fractionalPart = zoom - integerPart;
uint8_t scaledFractional = uint8_t(fractionalPart * 10);
serialCommandNewZoom[8] = integerPart;
serialCommandNewZoom[9] = scaledFractional;
mUdpSocket->sendCommand(serialCommandNewZoom);
}
@@ -240,28 +251,26 @@ void AiEngineGimbalServerActions::getLocation(AiEngineDronePosition dronePositio
void AiEngineGimbalServerActions::restoreOrientationAndZoom(AiEngineGimbalStatus gimbalStatus)
{
if (mAllowCameraCommands == true) {
QByteArray tempCommand;
QByteArray tempCommand;
// Go to initial orientation
tempCommand = mUdpCommand->getCommand(UDP_COMMAND_ID::TURN_TO_DEGREES);
int16_t degreesVal = gimbalStatus.currentYaw * 10;
tempCommand[8] = degreesVal & 0xFF;
tempCommand[9] = degreesVal >> 8;
degreesVal = gimbalStatus.currentPitch * 10;
tempCommand[10] = degreesVal & 0xFF;
tempCommand[11] = degreesVal >> 8;
mUdpSocket->sendCommand(tempCommand);
// Go to initial orientation
tempCommand = mUdpCommand->getCommand(UDP_COMMAND_ID::TURN_TO_DEGREES);
int16_t degreesVal = gimbalStatus.currentYaw * 10;
tempCommand[8] = degreesVal & 0xFF;
tempCommand[9] = degreesVal >> 8;
degreesVal = gimbalStatus.currentPitch * 10;
tempCommand[10] = degreesVal & 0xFF;
tempCommand[11] = degreesVal >> 8;
mUdpSocket->sendCommand(tempCommand);
// Go to initial zoom
tempCommand = mUdpCommand->getCommand(UDP_COMMAND_ID::ZOOM_TO_X);
uint8_t integerPart = static_cast<uint8_t>(gimbalStatus.currentZoom);
float fractionalPart = gimbalStatus.currentZoom - integerPart;
uint8_t scaledFractional = uint8_t(fractionalPart * 10);
tempCommand[8] = integerPart;
tempCommand[9] = scaledFractional;
mUdpSocket->sendCommand(tempCommand);
}
// Go to initial zoom
tempCommand = mUdpCommand->getCommand(UDP_COMMAND_ID::ZOOM_TO_X);
uint8_t integerPart = static_cast<uint8_t>(gimbalStatus.currentZoom);
float fractionalPart = gimbalStatus.currentZoom - integerPart;
uint8_t scaledFractional = uint8_t(fractionalPart * 10);
tempCommand[8] = integerPart;
tempCommand[9] = scaledFractional;
mUdpSocket->sendCommand(tempCommand);
// TODO: Maybe send signal that all done?
}
@@ -345,40 +354,3 @@ float AiEngineGimbalServerActions::degreesToRadians(float degrees)
{
return degrees * M_PI / 180.0f;
}
void AiEngineGimbalServerActions::goToInitialOrientation(void)
{
// These camera commands should always be allowed
// Go to initial orientation
tempCommand = mUdpCommand->getCommand(UDP_COMMAND_ID::TURN_TO_DEGREES);
int16_t degreesVal = AI_ENGINE_GIMBAL_INITIAL_YAW * 10;
tempCommand[8] = degreesVal & 0xFF;
tempCommand[9] = degreesVal >> 8;
degreesVal = AI_ENGINE_GIMBAL_INITIAL_PITCH * 10;
tempCommand[10] = degreesVal & 0xFF;
tempCommand[11] = degreesVal >> 8;
mUdpSocket->sendCommand(tempCommand);
// Go to initial zoom
tempCommand = mUdpCommand->getCommand(UDP_COMMAND_ID::ZOOM_TO_X);
uint8_t integerPart = static_cast<uint8_t>(AI_ENGINE_CAMERA_INITIAL_ZOOM);
float fractionalPart = AI_ENGINE_CAMERA_INITIAL_ZOOM - integerPart;
uint8_t scaledFractional = uint8_t(fractionalPart * 10);
tempCommand[8] = integerPart;
tempCommand[9] = scaledFractional;
mUdpSocket->sendCommand(tempCommand);
}
bool AiEngineGimbalServerActions::getAllowCameraCommands(void)
{
return mAllowCameraCommands;
}
void AiEngineGimbalServerActions::setAllowCameraCommands(bool allow)
{
mAllowCameraCommands = allow;
}
@@ -49,9 +49,6 @@ public slots:
void zoomToTarget(AiEngineRectangleProperties rectangle);
void getLocation(AiEngineDronePosition dronePosition, int targetIndex);
void restoreOrientationAndZoom(AiEngineGimbalStatus gimbalStatus);
void goToInitialOrientation(void);
bool getAllowCameraCommands(void);
void setAllowCameraCommands(bool allow);
signals:
void aiTargetZoomed(AiEngineTargetPosition);
@@ -61,7 +58,6 @@ private:
AiEngineGimbalServerUDPCommand *mUdpCommand;
AiEngineGimbalServerUDPResponse *mUdpResponse;
AiEngineGimbalStatus *mGimbalStatus;
bool mAllowCameraCommands;
private slots:
CameraData getCameraData(void);
+2 -2
View File
@@ -8,8 +8,8 @@
#include "aienginedefinitions.h"
const int INFERENCE_SQUARE_WIDTH = 640;
const int INFERENCE_SQUARE_HEIGHT = 640;
const int INFERENCE_SQUARE_WIDTH = 1280;
const int INFERENCE_SQUARE_HEIGHT = 1280;
class AiEngineObject {
+1 -5
View File
@@ -82,11 +82,7 @@ void AiEngineRtspListener::listenLoop(void)
#else
qDebug() << "AiEngineRtspListener loop running in thread: " << QThread::currentThreadId();
while (mCap.open(rtspVideoUrl.toStdString()) == false) {
qDebug() << "AiEngineRtspListener can't open video stream:" << rtspVideoUrl;
QThread::msleep(1000);
}
mCap.open(rtspVideoUrl.toStdString());
cv::Mat frame;
while (mIsListening) {
@@ -1,5 +1,6 @@
#include <QDebug>
#include <QThread>
#include <iostream>
#include <vector>
#include "aiengineinferencencnn.h"
@@ -22,7 +23,7 @@ char* getCharPointerCopy(const QString& modelPath) {
AiEngineInferencevNcnn::AiEngineInferencevNcnn(QString modelPath, QObject *parent) :
AiEngineInference{modelPath, parent}
{
qDebug() << "AiEngineInferencevNcnn() mModelPath=" << mModelPath;
qDebug() << "TUOMAS AiEngineInferencevNcnn() mModelPath=" << mModelPath;
yolov8.opt.num_threads = 4;
yolov8.opt.use_vulkan_compute = false;
@@ -31,6 +32,9 @@ AiEngineInferencevNcnn::AiEngineInferencevNcnn(QString modelPath, QObject *paren
char *model = getCharPointerCopy(modelPath);
char *param = getCharPointerCopy(paramPath);
qDebug() << "model:" << model;
qDebug() << "param:" << param;
yolov8.load_param(param);
yolov8.load_model(model);
}
@@ -225,6 +229,8 @@ int AiEngineInferencevNcnn::detect_yolov8(const cv::Mat& bgr, std::vector<Object
const float norm_vals[3] = {1 / 255.f, 1 / 255.f, 1 / 255.f};
in_pad.substract_mean_normalize(0, norm_vals);
auto start = std::chrono::high_resolution_clock::now();
ncnn::Extractor ex = yolov8.create_extractor();
ex.input("in0", in_pad);
@@ -277,6 +283,9 @@ int AiEngineInferencevNcnn::detect_yolov8(const cv::Mat& bgr, std::vector<Object
objects[i].rect.height = y1 - y0;
}
auto end = std::chrono::high_resolution_clock::now();
auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(end - start);
std::cout << "Time taken: " << duration.count() << " milliseconds" << std::endl;
return 0;
}
@@ -331,7 +340,8 @@ static cv::Mat draw_objects(const cv::Mat& bgr, const std::vector<Object>& objec
cv::Scalar cc(color[0], color[1], color[2]);
//fprintf(stderr, "%d = %.5f at %.2f %.2f %.2f x %.2f\n", obj.label, obj.prob, obj.rect.x, obj.rect.y, obj.rect.width, obj.rect.height);
fprintf(stderr, "%d = %.5f at %.2f %.2f %.2f x %.2f\n", obj.label, obj.prob,
obj.rect.x, obj.rect.y, obj.rect.width, obj.rect.height);
cv::rectangle(image, obj.rect, cc, 2);
@@ -368,23 +378,26 @@ void AiEngineInferencevNcnn::performInferenceSlot(cv::Mat frame)
std::vector<Object> objects;
detect_yolov8(scaledImage, objects);
AiEngineInferenceResult result;
result.frame = draw_objects(scaledImage, objects);
if (objects.empty() == false) {
AiEngineInferenceResult result;
result.frame = draw_objects(scaledImage, objects);
for (uint i = 0; i < objects.size(); i++) {
const Object &detection = objects[i];
AiEngineObject object;
object.classId = detection.label;
object.classStr = mClassNames[detection.label];
object.propability = detection.prob;
object.rectangle.top = detection.rect.y;
object.rectangle.left = detection.rect.x;
object.rectangle.bottom = detection.rect.y + detection.rect.height;
object.rectangle.right = detection.rect.x + detection.rect.width;
result.objects.append(object);
for (uint i = 0; i < objects.size(); i++) {
const Object &detection = objects[i];
AiEngineObject object;
object.classId = detection.label;
object.classStr = mClassNames[detection.label];
object.propability = detection.prob;
object.rectangle.top = detection.rect.y;
object.rectangle.left = detection.rect.x;
object.rectangle.bottom = detection.rect.y + detection.rect.height;
object.rectangle.right = detection.rect.x + detection.rect.width;
result.objects.append(object);
}
emit resultsReady(result);
}
emit resultsReady(result);
mActive = false;
}
@@ -13,7 +13,7 @@ AiEngineInferencevOnnxRuntime::AiEngineInferencevOnnxRuntime(QString modelPath,
AiEngineInference{modelPath, parent},
mPredictor(modelPath.toStdString(), confThreshold, iouThreshold, maskThreshold)
{
qDebug() << "AiEngineInferencevOnnxRuntime() mModelPath=" << mModelPath;
qDebug() << "TUOMAS AiEngineInferencevOnnxRuntime() mModelPath=" << mModelPath;
qDebug() << "AiEngineInferencevOnnxRuntime() mClassNames.size() =" << mClassNames.size();
}
@@ -49,6 +49,7 @@ cv::Mat AiEngineInferencevOnnxRuntime::drawLabels(const cv::Mat &image, const st
cv::Scalar(0, 0, 0),
1,
cv::LINE_AA);
}
return result;
@@ -57,6 +58,8 @@ cv::Mat AiEngineInferencevOnnxRuntime::drawLabels(const cv::Mat &image, const st
void AiEngineInferencevOnnxRuntime::performInferenceSlot(cv::Mat frame)
{
qDebug() << __PRETTY_FUNCTION__;
try {
mActive = true;
cv::Mat scaledImage = resizeAndPad(frame);
@@ -103,8 +106,11 @@ void AiEngineInferencevOnnxRuntime::performInferenceSlot(cv::Mat frame)
result.objects.append(object);
}
result.frame = drawLabels(scaledImage, detections);
emit resultsReady(result);
if (result.objects.empty() == false) {
result.frame = drawLabels(scaledImage, detections);
emit resultsReady(result);
}
mActive = false;
}
catch (const cv::Exception& e) {
@@ -68,7 +68,7 @@ void AiEngineInferenceOpi5::freeImageBuffer(image_buffer_t& imgBuffer)
}
cv::Mat AiEngineInferenceOpi5::resizeToHalfAndAssigntoTopLeft640x640(const cv::Mat& inputFrame)
cv::Mat AiEngineInferenceOpi5::resizeAndAssignToTopLeft(const int toWidth, const int toHeight, const cv::Mat& inputFrame)
{
/*
// Resize input frame to half size
@@ -85,19 +85,17 @@ cv::Mat AiEngineInferenceOpi5::resizeToHalfAndAssigntoTopLeft640x640(const cv::M
return outputFrame;
*/
const int targetWidth = 640;
const int targetHeight = 640;
float aspectRatio = static_cast<float>(inputFrame.cols) / static_cast<float>(inputFrame.rows);
int newWidth = targetWidth;
int newHeight = static_cast<int>(targetWidth / aspectRatio);
if (newHeight > targetHeight) {
newHeight = targetHeight;
newWidth = static_cast<int>(targetHeight * aspectRatio);
int newWidth = toWidth;
int newHeight = static_cast<int>(toWidth / aspectRatio);
if (newHeight > toHeight) {
newHeight = toHeight;
newWidth = static_cast<int>(toHeight * aspectRatio);
}
cv::Mat resizedFrame;
cv::resize(inputFrame, resizedFrame, cv::Size(newWidth, newHeight));
cv::Mat outputFrame = cv::Mat::zeros(targetHeight, targetWidth, inputFrame.type());
cv::Mat outputFrame = cv::Mat::zeros(toHeight, toWidth, inputFrame.type());
cv::Rect roi(cv::Point(0, 0), resizedFrame.size());
resizedFrame.copyTo(outputFrame(roi));
@@ -116,19 +114,24 @@ void AiEngineInferenceOpi5::drawObjects(cv::Mat& image, const object_detect_resu
//continue;
}
fprintf(stderr, "Inference[%d] probability = %f\n", i, result.prop * 100);
//fprintf(stderr, "Inference[%d] probability = %f\n", i, result.prop * 100);
int left = result.box.left;
int top = result.box.top;
int right = result.box.right;
int bottom = result.box.bottom;
QString cName = mClassNames[result.cls_id % mClassNames.size()];
qDebug() << "x:" << left << "y:" << top
<< "| size:" << right - left << "," << bottom - top
<< "| " << qPrintable(cName) << ":" << (int)(result.prop * 100) << "% \n";
cv::rectangle(image, cv::Point(left, top), cv::Point(right, bottom), cv::Scalar(255, 0, 0), 2);
// Text
char c_text[256];
//sprintf(c_text, "%s %d%%", coco_cls_to_name(result.cls_id), (int)(round(result.prop * 100)));
sprintf(c_text, "%s %d%%", mClassNames[result.cls_id % mClassNames.size()].toStdString().c_str(), (int)(round(result.prop * 100)));
sprintf(c_text, "%s %d%%", cName.toStdString().c_str(), (int)(round(result.prop * 100)));
cv::Point textOrg(left, top - 5);
cv::putText(image, std::string(c_text), textOrg, cv::FONT_HERSHEY_COMPLEX, 0.70, cv::Scalar(0, 0, 255), 1, cv::LINE_AA);
}
@@ -141,7 +144,7 @@ void AiEngineInferenceOpi5::performInferenceSlot(cv::Mat frame)
mActive = true;
cv::Mat scaledFrame = resizeToHalfAndAssigntoTopLeft640x640(frame);
cv::Mat scaledFrame = resizeAndAssignToTopLeft(INFERENCE_SQUARE_WIDTH, INFERENCE_SQUARE_HEIGHT, frame);
image_buffer_t imgBuffer = convertCV2FrameToImageBuffer(scaledFrame);
object_detect_result_list od_results;
@@ -156,7 +159,6 @@ void AiEngineInferenceOpi5::performInferenceSlot(cv::Mat frame)
AiEngineInferenceResult result;
for (int i = 0; i < od_results.count; i++) {
object_detect_result *det_result = &(od_results.results[i]);
qDebug() << "____box:" << det_result->box.top << det_result->box.left << det_result->box.bottom << det_result->box.right << "\n";
AiEngineObject object;
object.classId = det_result->cls_id;
object.propability = det_result->prop;
@@ -19,7 +19,7 @@ public slots:
private:
image_buffer_t convertCV2FrameToImageBuffer(const cv::Mat& bgrFrame);
void freeImageBuffer(image_buffer_t& imgBuffer);
cv::Mat resizeToHalfAndAssigntoTopLeft640x640(const cv::Mat& inputFrame);
cv::Mat resizeAndAssignToTopLeft(const int toWidth, const int toHeight, const cv::Mat& inputFrame);
void drawObjects(cv::Mat& image, const object_detect_result_list& result_list);
rknn_app_context_t mRrknnAppCtx0;
+14 -14
View File
@@ -666,17 +666,17 @@ int convert_image(image_buffer_t* src_img, image_buffer_t* dst_img, image_rect_t
{
int ret;
printf("src width=%d height=%d fmt=0x%x virAddr=0x%p fd=%d\n",
src_img->width, src_img->height, src_img->format, src_img->virt_addr, src_img->fd);
printf("dst width=%d height=%d fmt=0x%x virAddr=0x%p fd=%d\n",
dst_img->width, dst_img->height, dst_img->format, dst_img->virt_addr, dst_img->fd);
if (src_box != NULL) {
printf("src_box=(%d %d %d %d)\n", src_box->left, src_box->top, src_box->right, src_box->bottom);
}
if (dst_box != NULL) {
printf("dst_box=(%d %d %d %d)\n", dst_box->left, dst_box->top, dst_box->right, dst_box->bottom);
}
printf("color=0x%x\n", color);
// printf("src width=%d height=%d fmt=0x%x virAddr=0x%p fd=%d\n",
// src_img->width, src_img->height, src_img->format, src_img->virt_addr, src_img->fd);
// printf("dst width=%d height=%d fmt=0x%x virAddr=0x%p fd=%d\n",
// dst_img->width, dst_img->height, dst_img->format, dst_img->virt_addr, dst_img->fd);
// if (src_box != NULL) {
// printf("src_box=(%d %d %d %d)\n", src_box->left, src_box->top, src_box->right, src_box->bottom);
// }
// if (dst_box != NULL) {
// printf("dst_box=(%d %d %d %d)\n", dst_box->left, dst_box->top, dst_box->right, dst_box->bottom);
// }
// printf("color=0x%x\n", color);
ret = convert_image_rga(src_img, dst_img, src_box, dst_box, color);
if (ret != 0) {
@@ -757,9 +757,9 @@ int convert_image_with_letterbox(image_buffer_t* src_image, image_buffer_t* dst_
dst_box.right = dst_box.left + resize_w - 1;
_left_offset = dst_box.left;
}
printf("scale=%f dst_box=(%d %d %d %d) allow_slight_change=%d _left_offset=%d _top_offset=%d padding_w=%d padding_h=%d\n",
scale, dst_box.left, dst_box.top, dst_box.right, dst_box.bottom, allow_slight_change,
_left_offset, _top_offset, padding_w, padding_h);
// printf("scale=%f dst_box=(%d %d %d %d) allow_slight_change=%d _left_offset=%d _top_offset=%d padding_w=%d padding_h=%d\n",
// scale, dst_box.left, dst_box.top, dst_box.right, dst_box.bottom, allow_slight_change,
// _left_offset, _top_offset, padding_w, padding_h);
//set offset and scale
if(letterbox != NULL){
+1 -1
View File
@@ -235,7 +235,7 @@ int inference_yolov8_model(rknn_app_context_t *app_ctx, image_buffer_t *img, obj
}
// Run
printf("rknn_run\n");
//printf("rknn_run\n");
ret = rknn_run(app_ctx->rknn_ctx, nullptr);
if (ret < 0)
{
+34 -55
View File
@@ -5,27 +5,33 @@
#include <QTimer>
#include <mavsdk/plugins/action/action.h>
#include <mavsdk/plugins/mission/mission.h>
#include <mavsdk/plugins/mission_raw/mission_raw.h>
#include <mavsdk/plugins/telemetry/telemetry.h>
#include "az_config.h"
#include "az_drone_controller.h"
AzDroneController::AzDroneController(AzMission &azMission, QObject *parent)
AzDroneController::AzDroneController(AzMission &mission, QObject *parent)
: QObject(parent)
, mMavsdk{Mavsdk::Configuration{Mavsdk::ComponentType::GroundStation}} // TODO!! Autopilot or CompanionComputer?
, mDroneState(AZ_DRONE_STATE_DISCONNECTED)
, mMissionController(nullptr)
, mAzMission(azMission)
, mMavsdk{Mavsdk::Configuration{Mavsdk::ComponentType::GroundStation}}
, // TODO!! Autopilot or CompanionComputer?
mDroneState(AZ_DRONE_STATE_DISCONNECTED)
{
mFirstPosition.relative_altitude_m = -10000;
mMissionController = new AzMissionController(mission, this);
// Mission progress from the AzMissionController. Slot will be used to find the targets later.
connect(
mMissionController,
&AzMissionController::missionProgressChanged,
this,
&AzDroneController::missionIndexChangedSlot);
// Mission controller signals end of the missions. This will be used to fly to the return point in JSON.
connect(mMissionController, &AzMissionController::finished, this, &AzDroneController::missionFinishedSlot);
// Healt info update from MAVSDK.
connect(this, &AzDroneController::newHealthInfo, this, &AzDroneController::newHealthInfoSlot, Qt::QueuedConnection);
isCopterType = QCoreApplication::arguments().at(2) == "quadcopter";
}
@@ -99,13 +105,11 @@ bool AzDroneController::stateGetTelemetryModule(void)
return false;
}
// Subscripe to position and heading updates. Updated comes from different MAVSDK thread. Send position
// or heading as signal to this class (Qt::QueuedConnection) so that it's handled in the main thread.
// Subscripe to position updates. Updated comes from different MAVSDK thread. Send position
// as signal to this class (Qt::QueuedConnection) so that it's handled in the main thread.
qRegisterMetaType<Telemetry::Position>("Telemetry::Position");
connect(this, &AzDroneController::newPosition, this, &AzDroneController::newPositionSlot, Qt::QueuedConnection);
connect(this, &AzDroneController::newHeading, this, &AzDroneController::newHeadingSlot, Qt::QueuedConnection);
mTelemetry->subscribe_position([this](Telemetry::Position position) { emit newPosition(position); });
mTelemetry->subscribe_heading([this](Telemetry::Heading heading) { emit newHeading(heading.heading_deg); });
return true;
}
@@ -115,7 +119,12 @@ bool AzDroneController::stateGetActionModule(void)
{
mAction = new Action(mSystem);
return mAction == nullptr ? false : true;
if (mAction != nullptr) {
mMissionController->setAction(mAction);
return true;
}
return false;
}
@@ -131,16 +140,6 @@ bool AzDroneController::stateHealthOk(void)
}
bool AzDroneController::stateUploadMission(void)
{
if (mMissionController == nullptr) {
mMissionController = new AzMissionController(*mSystem.get(), mAzMission, this);
}
return mMissionController->uploadMission();
}
bool AzDroneController::stateArm(void)
{
Action::Result result = mAction->arm();
@@ -157,6 +156,11 @@ bool AzDroneController::stateArm(void)
bool AzDroneController::stateTakeoff(void)
{
// Drone never reaches the target altitude with ArduPilot. This seems
// to be a bug in MAVSDK/ArduPilot, because this doesn't happen with PX4.
// Added extra check to AzDroneController::stateFlyMission() to not start the
// mission before 90% of AZ_RELATIVE_FLY_ALTITUDE is reached.
mAction->set_takeoff_altitude(AZ_RELATIVE_FLY_ALTITUDE);
Action::Result result = mAction->takeoff();
cout << "[CONTROLLER] MAVSDK::Action::takeoff() failed. Reason: " << result << endl;
@@ -166,20 +170,14 @@ bool AzDroneController::stateTakeoff(void)
bool AzDroneController::stateFlyMission(void)
{
// Drone never reaches the target altitude with ArduPilot. This seems
// to be a bug in MAVSDK/ArduPilot, because this doesn't happen with PX4.
// Added extra check to AzDroneController::stateFlyMission() to not start the
// mission before 90% of AZ_RELATIVE_FLY_ALTITUDE is reached.
if (isCopterType && mCurrentPosition.relative_altitude_m < AZ_RELATIVE_FLY_ALTITUDE * 0.90) {
if (mCurrentPosition.relative_altitude_m < AZ_RELATIVE_FLY_ALTITUDE * 0.90) {
cout << "[CONTROLLER] 90% of the takeoff altitide is not reached yet." << endl;
return false;
}
// TODO!! Check with the team about fly altitude. Is altitudes in JSON file absolute or relative?
//float flight_altitude_abs = AZ_RELATIVE_FLY_ALTITUDE + mFirstPosition.absolute_altitude_m;
//return mMissionController->startMissions(flight_altitude_abs);
return mMissionController->startMission();
float flight_altitude_abs = AZ_RELATIVE_FLY_ALTITUDE + mFirstPosition.absolute_altitude_m;
return mMissionController->startMissions(flight_altitude_abs);
}
@@ -246,19 +244,7 @@ void AzDroneController::droneStateMachineSlot(void)
void AzDroneController::newPositionSlot(Telemetry::Position position)
{
/*
static Telemetry::Position previousPosition;
// Only print position if it has changed.
if (position == previousPosition) {
return;
}
previousPosition = position;
*/
//cout << "[CONTROLLER] GPS position: " << position.latitude_deg << ", " << position.longitude_deg
// << " Altitudes: " << position.absolute_altitude_m << ", " << position.relative_altitude_m << endl;
cout << "[CONTROLLER] GPS position " << position.latitude_deg << ", " << position.longitude_deg << endl;
// Save first position. It will be used later to set altitude for missions.
// TODO!! Probably we want to use rangefinder or at least barometer with altitude from the map later.
@@ -270,19 +256,12 @@ void AzDroneController::newPositionSlot(Telemetry::Position position)
mCurrentPosition = position;
// Send new position to the mission controller.
if (mDroneState == AZ_DRONE_STATE_FLY_MISSION && mMissionController) {
if (mDroneState == AZ_DRONE_STATE_FLY_MISSION) {
mMissionController->newPosition(position);
}
}
void AzDroneController::newHeadingSlot(double heading)
{
(void)heading;
//cout << "[CONTROLLER] Heading: " << heading << endl;
}
void AzDroneController::missionIndexChangedSlot(int currentIndex, int totalIndexes)
{
cout << "[CONTROLLER] missionIndexChanged() " << currentIndex << "/" << totalIndexes << endl;
-23
View File
@@ -5,7 +5,6 @@
#include <mavsdk/mavsdk.h>
#include <mavsdk/plugins/action/action.h>
#include <mavsdk/plugins/mission_raw/mission_raw.h>
#include <mavsdk/plugins/telemetry/telemetry.h>
#include "az_mission.h"
@@ -20,7 +19,6 @@ typedef enum {
AZ_DRONE_STATE_GOT_TELEMETRY_MODULE,
AZ_DRONE_STATE_GOT_ACTION_MODULE,
AZ_DRONE_STATE_HEALTH_OK,
AZ_DRONE_STATE_MISSION_UPLOADED,
AZ_DRONE_STATE_ARMED,
AZ_DRONE_STATE_TAKE_OFF,
AZ_DRONE_STATE_AUTO_MODE_ACTIVATED,
@@ -46,18 +44,8 @@ protected:
bool stateGetActionModule(void);
bool stateHealthOk(void);
bool stateArm(void);
bool stateUploadMission(void);
bool stateTakeoff(void);
bool stateFlyMission(void);
void setRawMission(void);
MissionRaw::MissionItem makeRawMissionItem(float latitude_deg1e7,
float longitude_deg1e7,
int32_t altitude_m,
float do_photo,
MAV_FRAME frame,
MAV_CMD command,
float p2 = 0,
float p3 = 0);
// Slots that are called by the emitted Qt signals.
protected slots:
@@ -71,9 +59,6 @@ protected slots:
// Slot that is called when the newPosition() signal below is emitted.
void newPositionSlot(Telemetry::Position);
// Slot that is called when the newHeading() signal below is emitted.
void newHeadingSlot(double heading);
// A mission file contains several action points. This is called
// when the action point is reached. Indexing starts at 1.
void missionIndexChangedSlot(int currentIndex, int totalIndexes);
@@ -90,11 +75,6 @@ signals:
// captured in the main thread to avoid threading issues.
void newPosition(Telemetry::Position);
// Signal is emitted when Ardupilot sends a new heading from the
// MAVSDK thread. Signal goes through the main event loop and is
// captured in the main thread to avoid threading issues.
void newHeading(double heading);
// If Telemetry::health_all_ok() fails, then autopilot subscripes for the healt updates to
// see exactly what is wrong. This signal is emitted to catch updates in the main thread.
void newHealthInfo(Telemetry::Health);
@@ -108,7 +88,4 @@ protected:
Telemetry::Position mFirstPosition;
Telemetry::Position mCurrentPosition;
AzMissionController *mMissionController;
bool isCopterType;
int mMissionItemSeqNum;
AzMission &mAzMission;
};
@@ -34,8 +34,7 @@ void AzDroneControllerPlane::droneStateMachineSlot(void)
{AZ_DRONE_STATE_GOT_SYSTEM, &AzDroneControllerPlane::stateGetTelemetryModule, "Getting telemetry module", 1000},
{AZ_DRONE_STATE_GOT_TELEMETRY_MODULE, &AzDroneControllerPlane::stateGetActionModule, "Getting action module", 1000},
{AZ_DRONE_STATE_GOT_ACTION_MODULE, &AzDroneControllerPlane::stateHealthOk, "Drone health OK", 1000},
{AZ_DRONE_STATE_HEALTH_OK, &AzDroneControllerPlane::stateUploadMission, "Uploading the mission", 1000},
{AZ_DRONE_STATE_MISSION_UPLOADED, &AzDroneControllerPlane::stateWaitAutoSwitch, "Waiting user to switch to ArduPilot AUTO mode", 1000},
{AZ_DRONE_STATE_HEALTH_OK, &AzDroneControllerPlane::stateWaitAutoSwitch, "User switched to AUTO mode", 1000},
{AZ_DRONE_STATE_AUTO_MODE_ACTIVATED, &AzDroneControllerPlane::stateFlyMission, "Starting the mission", 1000},
{AZ_DRONE_STATE_FLY_MISSION, nullptr, "Mission started", 0}};
+90 -135
View File
@@ -1,166 +1,121 @@
#include <QCoreApplication>
#include <QDebug>
#include <mavsdk/mavsdk.h>
#include <mavsdk/plugins/action/action.h>
#include <mavsdk/plugins/mission/mission.h>
#include <mavsdk/plugins/telemetry/telemetry.h>
#include <mavsdk/plugins/mission_raw/mission_raw.h>
#include <mavsdk/plugins/offboard/offboard.h>
#include "az_mission.h"
#include "az_mission_controller.h"
#include "az_utils.h"
AzMissionController::AzMissionController(System &system, AzMission &mission, QObject *parent)
AzMissionController::AzMissionController(AzMission &mission, QObject *parent)
: QObject(parent)
, mAzMission(mission)
, mMission(mission)
, mAction(nullptr)
, mMissionStarted(false)
, mMissionItemSeqNum(0)
, mMissionRaw(nullptr)
, mSystem(system)
, mFlyingToReturnPoint(false)
, mAbsoluteAltitude(-10000)
{}
MissionRaw::MissionItem AzMissionController::makeRawMissionItem(
float latitudeDeg1e7,
float longitudeDeg1e7,
int32_t altitude,
float doPhoto,
MAV_FRAME frame,
MAV_CMD command,
float p2,
float p3)
void AzMissionController::setAction(Action *action)
{
(void)p2;
(void)p3;
MissionRaw::MissionItem new_item{};
new_item.seq = mMissionItemSeqNum;
new_item.frame = static_cast<uint32_t>(frame);
new_item.command = static_cast<uint32_t>(command);
new_item.param1 = doPhoto;
new_item.x = latitudeDeg1e7 * 1e7;
new_item.y = longitudeDeg1e7 * 1e7;
new_item.z = altitude;
new_item.mission_type = MAV_MISSION_TYPE_MISSION;
new_item.autocontinue = 1;
if (mMissionItemSeqNum == 1) {
new_item.current = 1;
}
mMissionItemSeqNum++;
return new_item;
mAction = action;
}
bool AzMissionController::uploadMission(void)
bool AzMissionController::startMissions(float absoluteAltitude)
{
cout << "[MISSION] stateUploadMission() Setting raw mission starts" << endl;
mCurrentMissionIndex = -1;
mMissionStarted = true;
mAbsoluteAltitude = absoluteAltitude; // TODO!! Use altitudes from the JSON file.
return flyToNextMissionItem();
}
mMissionItemSeqNum = 0;
void AzMissionController::stopMissions(void)
{
// TODO!! Needs to be improved. At least send signal to AzDroneController.
mMissionStarted = false;
}
if (mMissionRaw == nullptr) {
mMissionRaw = new MissionRaw(mSystem);
bool AzMissionController::flyToNextMissionItem(void)
{
mCurrentMissionIndex++;
if (mCurrentMissionIndex == (int) mMission.getActionPoints().size()) {
mFlyingToReturnPoint = true;
qDebug() << "AzMissionController::flyToNextMissionItem() All action points handled.";
qDebug() << "AzMissionController::flyToNextMissionItem() Flying to the JSON return point.";
const AzCoordinate &coordinate = mMission.getReturnPoint();
Action::Result result
= mAction->goto_location(coordinate.latitude, coordinate.longitude, mAbsoluteAltitude, 0.0f);
if (result == Action::Result::Success) {
return true;
}
else {
cerr << "mAction->goto_location() failed. Reason: " << result << endl;
return false;
}
}
auto clearResult = mMissionRaw->clear_mission();
if (clearResult != MissionRaw::Result::Success) {
std::cout << "[MISSION] stateUploadMission() Clearing mMissionRaw failed" << std::endl;
qDebug() << "AzMissionController::flyToNextMissionItem() flying to the item" << mCurrentMissionIndex + 1 << "/"
<< mMission.getActionPoints().size();
const AzCoordinate &coordinate = mMission.getActionPoints().at(mCurrentMissionIndex).getPoint();
qDebug() << "AzMissionController::flyToNextMissionItem() navigating to point" << coordinate.latitude
<< coordinate.longitude;
Action::Result result = mAction->goto_location(coordinate.latitude, coordinate.longitude, mAbsoluteAltitude, 0.0f);
// TODO!! Check return value and print warnings and errors.
if (result == Action::Result::Success) {
emit missionProgressChanged(mCurrentMissionIndex + 1, mMission.getActionPoints().size());
return true;
}
else {
cerr << "mAction->goto_location() failed. Reason: " << result << endl;
return false;
}
auto downloadResult = mMissionRaw->download_mission();
if (downloadResult.first != MissionRaw::Result::Success) {
std::cout << "[MISSION] stateUploadMission() Downloading mission failed" << std::endl;
return false;
}
// First point in case of ArduPilot is always home
auto missionPlan = downloadResult.second;
MissionRaw::MissionItem homePoint = missionPlan[0];
missionPlan.clear();
// Going relative alt mission so we dont care about altitude
auto latDeg = homePoint.x * 1e-7;
auto lonDeg = homePoint.y * 1e-7;
cout << "[MISSION] Home location: " << latDeg << ", " << lonDeg << endl;
// In case of ardupilot we want to set lat lon to 0, to use current position as takeoff position
missionPlan.push_back(makeRawMissionItem(
0, // lat
0, // lon
50.0,
0,
MAV_FRAME_GLOBAL_RELATIVE_ALT,
MAV_CMD_NAV_TAKEOFF));
// Setup speed during mission execution
missionPlan.push_back(makeRawMissionItem(
0, 0, 0, 0, MAV_FRAME_GLOBAL_RELATIVE_ALT, MAV_CMD_DO_CHANGE_SPEED, 9.35f, -1.0f));
// Action points
const vector<AzActionPoint> &azMissions = mAzMission.getActionPoints();
for (uint i = 0; i < azMissions.size(); i++) {
missionPlan.push_back(makeRawMissionItem(
azMissions[i].getPoint().latitude,
azMissions[i].getPoint().longitude,
azMissions[i].getHeight(),
0,
MAV_FRAME_GLOBAL_TERRAIN_ALT,
MAV_CMD_NAV_WAYPOINT));
}
// Landing point
missionPlan.push_back(makeRawMissionItem(
mAzMission.getReturnPoint().latitude,
mAzMission.getReturnPoint().longitude,
50.00,
0,
MAV_FRAME_GLOBAL_TERRAIN_ALT,
MAV_CMD_NAV_LAND));
//missionPlan.push_back(makeRawMissionItem(0, 0, 0, 0, MAV_FRAME_GLOBAL_INT, MAV_CMD_NAV_RETURN_TO_LAUNCH));
for (const auto& item : missionPlan) {
std::cout << "[MISSION] stateUploadMission() seq: " << (int)item.seq << '\n';
}
auto uploadResult = mMissionRaw->upload_mission(missionPlan);
if (uploadResult != MissionRaw::Result::Success) {
std::cout << "[MISSION] stateUploadMission() upload failed. Result: " << uploadResult << std::endl;
return false;
}
mMissionRaw->set_current_mission_item(0);
cout << "[MISSION] stateUploadMission() Setting raw mission ends" << endl;
return true;
}
bool AzMissionController::startMission()
{
MissionRaw::Result startResult = mMissionRaw->start_mission();
cout << "[MISSION] Start mission result:" << startResult << endl;
return startResult == MissionRaw::Result::Success;
}
void AzMissionController::stopMission(void)
{
mMissionRaw->pause_mission();
}
void AzMissionController::newPosition(Telemetry::Position pos)
{
(void)pos;
if (mMissionStarted == false) {
return;
}
if (mFlyingToReturnPoint == true) {
const AzCoordinate &point = mMission.getReturnPoint();
float distance = AzUtils::distance(pos.latitude_deg, pos.longitude_deg, point.latitude, point.longitude);
qDebug() << "AzMissionController::newPosition() distance to return point:" << distance << " km.";
if (distance <= 0.01) {
qDebug() << "AzMissionController::newPosition() return point reached. Mission finished.";
mMissionStarted = false;
mFlyingToReturnPoint = false;
emit finished();
}
}
else {
const AzCoordinate &target = mMission.getActionPoints().at(mCurrentMissionIndex).getPoint();
float distance = AzUtils::distance(pos.latitude_deg, pos.longitude_deg, target.latitude, target.longitude);
qDebug() << "AzMissionController::newPosition() distance to target:" << distance << "km.";
// TODO!! In final application we need to use the camera to find the target.
if (QCoreApplication::arguments().contains("quadcopter") && distance <= 0.01) {
qDebug() << "AzMissionController::newPosition() target reached. Continuing to the next item.";
flyToNextMissionItem();
}
else if (QCoreApplication::arguments().contains("plane")) {
if (mPlaneCirclingTime.isValid() == false && distance <= 0.1) {
qDebug() << "AzMissionController::newPosition() target reached. Starting circling.";
mPlaneCirclingTime.restart();
}
else if (mPlaneCirclingTime.elapsed() > 35000) {
qDebug() << "AzMissionController::newPosition() target reached. Ending circling.";
mPlaneCirclingTime.invalidate();
flyToNextMissionItem();
}
}
}
}
+13 -20
View File
@@ -1,12 +1,12 @@
#pragma once
#include <QObject>
#include <QElapsedTimer>
#include <mavsdk/mavsdk.h>
#include <mavsdk/plugins/mission_raw/mission_raw.h>
#include <mavsdk/plugins/telemetry/telemetry.h>
#include "az_mission.h"
#include "plugins/action/action.h"
using namespace mavsdk;
@@ -14,20 +14,10 @@ class AzMissionController : public QObject
{
Q_OBJECT
public:
explicit AzMissionController(System &system, AzMission &mission, QObject *parent = nullptr);
bool startMission();
void stopMission(void);
bool uploadMission(void);
private:
MissionRaw::MissionItem makeRawMissionItem(float latitudeDeg1e7,
float longitudeDeg1e7,
int32_t altitude,
float doPhoto,
MAV_FRAME frame,
MAV_CMD command,
float p2 = 0,
float p3 = 0);
explicit AzMissionController(AzMission &mission, QObject *parent = nullptr);
void setAction(Action *action);
bool startMissions(float absoluteAltitude);
void stopMissions(void);
public slots:
void newPosition(Telemetry::Position position);
@@ -37,10 +27,13 @@ signals:
void finished(void);
private:
AzMission &mAzMission;
bool flyToNextMissionItem(void);
AzMission &mMission;
Action *mAction;
int mCurrentMissionIndex;
bool mMissionStarted;
int mMissionItemSeqNum;
MissionRaw *mMissionRaw;
System &mSystem;
bool mFlyingToReturnPoint;
float mAbsoluteAltitude;
QElapsedTimer mPlaneCirclingTime;
};
+1 -1
View File
@@ -25,7 +25,7 @@ int main(int argc, char *argv[])
qCritical() << "\tFirst argument: mission JSON file.";
qCritical() << "\tSecond argument: \"quadcopter\" or \"plane\".";
qCritical() << "\tThird argument: \"udp\" or \"serial\" for connection type.";
qCritical() << "\tFor example ./drone_controller mission.json plane udp";
qCritical() << "\tFor example ./autopilot mission.json plane udp";
return 1;
}
+5 -5
View File
@@ -7,11 +7,11 @@
]
},
"action_points": [
{"point": [-35.35967231, 149.16146047], "height": 50, "action_enum": "waypoint"},
{"point": [-35.35973951, 149.16801175], "height": 50, "action_enum": "search", "action_specific": {"targets": ["artillery"]}},
{"point": [-35.36408532, 149.16816283], "height": 50, "action_enum": "search", "action_specific": {"targets": ["artillery", "tank"]}},
{"point": [-35.36546294, 149.16479791], "height": 50, "action_enum": "search", "action_specific": {"targets": ["tank"]}},
{"point": [-35.36364851, 149.16073255], "height": 50, "action_enum": "return"}
{"point": [-35.35967231, 149.16146047], "height": 500, "action_enum": "waypoint"},
{"point": [-35.35973951, 149.16801175], "height": 400, "action_enum": "search", "action_specific": {"targets": ["artillery"]}},
{"point": [-35.36408532, 149.16816283], "height": 300, "action_enum": "search", "action_specific": {"targets": ["artillery", "tank"]}},
{"point": [-35.36546294, 149.16479791], "height": 300, "action_enum": "search", "action_specific": {"targets": ["tank"]}},
{"point": [-35.36364851, 149.16073255], "height": 500, "action_enum": "return"}
],
"return_point": [-35.36286449, 149.16534729]
}
@@ -1,15 +0,0 @@
QT = core
CONFIG += c++17 cmdline
# You can make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line.
#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0
SOURCES += \
main.cpp
# Default rules for deployment.
qnx: target.path = /tmp/$${TARGET}/bin
else: unix:!android: target.path = /opt/$${TARGET}/bin
!isEmpty(target.path): INSTALLS += target
-99
View File
@@ -1,99 +0,0 @@
#include <QCoreApplication>
#include <QtCore>
#include <cmath>
#include <iostream>
// Structure to store tile information
struct Tile {
int row;
int column;
double topLeftLat;
double topLeftLon;
double bottomRightLat;
double bottomRightLon;
double centerLat;
double centerLon;
// Constructor
Tile(int r, int c, double tlLat, double tlLon, double brLat, double brLon)
: row(r), column(c), topLeftLat(tlLat), topLeftLon(tlLon),
bottomRightLat(brLat), bottomRightLon(brLon) {
centerLat = (topLeftLat + bottomRightLat) / 2.0;
centerLon = (topLeftLon + bottomRightLon) / 2.0;
}
};
// Function to calculate Haversine distance between two geographic points
double haversine(double lat1, double lon1, double lat2, double lon2) {
constexpr double R = 6371.0; // Earth radius in kilometers
double dLat = qDegreesToRadians(lat2 - lat1);
double dLon = qDegreesToRadians(lon2 - lon1);
double a = std::sin(dLat / 2) * std::sin(dLat / 2) +
std::cos(qDegreesToRadians(lat1)) * std::cos(qDegreesToRadians(lat2)) *
std::sin(dLon / 2) * std::sin(dLon / 2);
double c = 2 * std::atan2(std::sqrt(a), std::sqrt(1 - a));
return R * c;
}
// Function to parse filenames and extract Tile information
QVector<Tile> parseTiles(const QStringList &filenames) {
QVector<Tile> tiles;
static const QRegularExpression regex(R"(map_(\d+)_(\d+)_tl_([-\d\.]+)_([-\d\.]+)_br_([-\d\.]+)_([-\d\.]+)\.)");
for (const QString &filename : filenames) {
QRegularExpressionMatch match = regex.match(filename);
if (match.hasMatch()) {
int row = match.captured(1).toInt();
int column = match.captured(2).toInt();
double topLeftLat = match.captured(3).toDouble();
double topLeftLon = match.captured(4).toDouble();
double bottomRightLat = match.captured(5).toDouble();
double bottomRightLon = match.captured(6).toDouble();
tiles.append(Tile(row, column, topLeftLat, topLeftLon, bottomRightLat, bottomRightLon));
}
}
return tiles;
}
// Main function to find the nearest tiles
QVector<Tile> findNearestTiles(const QString &directoryPath, double currentLat, double currentLon) {
QDir directory(directoryPath);
QStringList filenames = directory.entryList(QStringList() << "*.jpg" << "*.png", QDir::Files);
QVector<Tile> tiles = parseTiles(filenames);
// Sort tiles based on Haversine distance
std::sort(tiles.begin(), tiles.end(), [currentLat, currentLon](const Tile &a, const Tile &b) {
return haversine(currentLat, currentLon, a.centerLat, a.centerLon) <
haversine(currentLat, currentLon, b.centerLat, b.centerLon);
});
return tiles;
}
int main(int argc, char *argv[]) {
QCoreApplication app(argc, argv);
if (argc != 4) {
std::cerr << "Usage: " << argv[0] << " <directory> <latitude> <longitude>" << std::endl;
return -1;
}
QString directoryPath = argv[1];
double currentLat = QString(argv[2]).toDouble();
double currentLon = QString(argv[3]).toDouble();
qInfo().noquote().nospace() << "Directory: " << directoryPath;
qInfo().noquote().nospace() << "CurrentLat: " << currentLat;
qInfo().noquote().nospace() << "CurrentLon: " << currentLon;
QVector<Tile> sortedTiles = findNearestTiles(directoryPath, currentLat, currentLon);
for (const Tile &tile : sortedTiles) {
std::cout << "Row: " << tile.row << ", Column: " << tile.column
<< ", Center: (" << tile.centerLat << ", " << tile.centerLon << ")\n";
}
return 0;
}