Files
gps-denied-desktop/docs/02_components/07_sequential_visual_odometry/geolocator.py
T
2025-12-01 20:21:48 +01:00

129 lines
5.0 KiB
Python

import numpy as np
import math
class GeoTracker:
def __init__(self, focal_length_mm, sensor_width_mm, image_width_px, image_height_px):
# Intrinsics
self.fx = (focal_length_mm / sensor_width_mm) * image_width_px
self.fy = self.fx
self.cx = image_width_px / 2.0
self.cy = image_height_px / 2.0
self.image_w = image_width_px
self.image_h = image_height_px
# Standard Camera Matrix
self.K = np.array([
[self.fx, 0, self.cx],
[0, self.fy, self.cy],
[0, 0, 1]
])
self.K_inv = np.linalg.inv(self.K)
def _get_rotation_matrix(self, yaw, pitch, roll):
# Converts NED Euler angles to Rotation Matrix (Body to NED)
# Using ZYX convention
y, p, r = np.radians(yaw), np.radians(pitch), np.radians(roll)
Rz = np.array([[np.cos(y), -np.sin(y), 0], [np.sin(y), np.cos(y), 0], [0, 0, 1]])
Ry = np.array([[np.cos(p), 0, np.sin(p)], [0, 1, 0], [-np.sin(p), 0, np.cos(p)]])
Rx = np.array([[1, 0, 0], [0, np.cos(r), -np.sin(r)], [0, np.sin(r), np.cos(r)]])
R_body_to_ned = Rz @ Ry @ Rx
# Camera Frame Alignment (Camera Z is Forward, NED X is North)
# Standard assumption: Camera is mounted Forward-Right-Down relative to Body
# But usually for drones, Camera Z (Optical) aligns with Body X (Forward) or Body Z (Down)
# Assuming standard "Forward Looking" camera:
# Cam Z -> Body X, Cam X -> Body Y, Cam Y -> Body Z
T_cam_to_body = np.array([
[0, 0, 1],
[1, 0, 0],
[0, 1, 0]
])
return R_body_to_ned @ T_cam_to_body
def pixel_to_ned_ground(self, u, v, altitude, R_cam_to_ned):
"""
Ray-plane intersection.
Projects pixel (u,v) to the ground plane (z=0) in NED frame.
"""
# 1. Pixel to Normalized Camera Ray
pixel_homo = np.array([u, v, 1.0])
ray_cam = self.K_inv @ pixel_homo
# 2. Rotate Ray to NED Frame
ray_ned = R_cam_to_ned @ ray_cam
# 3. Intersect with Ground (Z = 0)
# Camera is at [0, 0, -altitude] in NED
# Ray equation: P = O + t * D
# We want P.z = 0
# 0 = -altitude + t * ray_ned.z
# t = altitude / ray_ned.z
if ray_ned[2] <= 0: # Ray pointing up or parallel to horizon
return None
t = altitude / ray_ned[2]
# P_ground relative to camera position (which is 0,0 in horizontal)
north = t * ray_ned[0]
east = t * ray_ned[1]
return np.array([north, east])
def calculate_movement(self, kpts0, kpts1, altitude, yaw, start_lat, start_lon):
"""
Calculates the drone's movement based on optical flow on the ground.
Returns dictionary with shift in meters and new lat/lon.
"""
# Rotation Matrix (Assuming simplified Pitch=-90 for Nadir or 0 for Forward)
# For a downward facing drone, Pitch is usually -90.
# For this example, we assume the camera is stabilized or we use the passed yaw.
# We will assume a gimbal pitch of -90 (looking down) if not specified,
# or -45 if looking forward.
# Let's assume standard Nadir view for mapping (Pitch = -90).
R = self._get_rotation_matrix(yaw, -90, 0)
shifts_n = []
shifts_e = []
# Process a subset of points to save time
step = max(1, len(kpts0) // 50)
for i in range(0, len(kpts0), step):
p0 = self.pixel_to_ned_ground(kpts0[i][0], kpts0[i][1], altitude, R)
p1 = self.pixel_to_ned_ground(kpts1[i][0], kpts1[i][1], altitude, R)
if p0 is not None and p1 is not None:
# If the image moved LEFT, the drone moved RIGHT.
# If pixel 0 is at (10,10) and pixel 1 is at (5,5), the world moved "back" relative to camera.
# Drone movement = P1_world - P0_world?
# NO. If we are tracking static ground, P0 and P1 correspond to the SAME physical point.
# So the Ground Point relative to Camera 0 is X.
# The Ground Point relative to Camera 1 is Y.
# Camera Movement = X - Y.
diff = p0 - p1
shifts_n.append(diff[0])
shifts_e.append(diff[1])
if not shifts_n:
return None
# Robust average (Median) to reject outliers
avg_n = np.median(shifts_n)
avg_e = np.median(shifts_e)
# Convert Meters to Lat/Lon
# Earth Radius approx 6,378,137m
d_lat = (avg_n / 6378137.0) * (180 / math.pi)
d_lon = (avg_e / (6378137.0 * math.cos(math.radians(start_lat)))) * (180 / math.pi)
return {
'move_north_m': avg_n,
'move_east_m': avg_e,
'new_lat': start_lat + d_lat,
'new_lon': start_lon + d_lon,
'ground_points': len(shifts_n)
}