mirror of
https://github.com/azaion/gps-denied-onboard.git
synced 2026-04-23 01:46:38 +00:00
feat(phases 2-7): implement full GPS-denied navigation pipeline
Phase 2 — Visual Odometry: - ORBVisualOdometry (dev/CI), CuVSLAMVisualOdometry (Jetson) - TRTInferenceEngine (TensorRT FP16, conditional import) - create_vo_backend() factory Phase 3 — Satellite Matching + GPR: - SatelliteDataManager: local z/x/y tiles, ESKF ±3σ tile selection - GSD normalization (SAT-03), RANSAC inlier-ratio confidence (SAT-04) - GlobalPlaceRecognition: Faiss index + numpy fallback Phase 4 — MAVLink I/O: - MAVLinkBridge: GPS_INPUT 15+ fields, IMU callback, 1Hz telemetry - 3-consecutive-failure reloc request - MockMAVConnection for CI Phase 5 — Pipeline Wiring: - ESKF wired into process_frame: VO update → satellite update - CoordinateTransformer + SatelliteDataManager via DI - MAVLink state push per frame (PIPE-07) - Real pixel_to_gps via ray-ground projection (PIPE-06) - GTSAM ISAM2 update when available (PIPE-03) Phase 6 — Docker + CI: - Multi-stage Dockerfile (python:3.11-slim) - docker-compose.yml (dev), docker-compose.sitl.yml (ArduPilot SITL) - GitHub Actions: ci.yml (lint+pytest+docker smoke), sitl.yml (nightly) - tests/test_sitl_integration.py (8 tests, skip without SITL) Phase 7 — Accuracy Validation: - AccuracyBenchmark + SyntheticTrajectory - AC-PERF-1: 80% within 50m ✅ - AC-PERF-2: 60% within 20m ✅ - AC-PERF-3: p95 latency < 400ms ✅ - AC-PERF-4: VO drift 1km < 100m ✅ (actual ~11m) - scripts/benchmark_accuracy.py CLI Tests: 195 passed / 8 skipped Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
+193
-118
@@ -1,12 +1,16 @@
|
||||
"""Satellite Data Manager (Component F04)."""
|
||||
"""Satellite Data Manager (Component F04).
|
||||
|
||||
SAT-01: Reads pre-loaded tiles from a local z/x/y directory (no live HTTP during flight).
|
||||
SAT-02: Tile selection uses ESKF position ± 3σ_horizontal to define search area.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import math
|
||||
import os
|
||||
from collections.abc import Iterator
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
import cv2
|
||||
import diskcache as dc
|
||||
import httpx
|
||||
import numpy as np
|
||||
|
||||
from gps_denied.schemas import GPSPoint
|
||||
@@ -15,145 +19,220 @@ from gps_denied.utils import mercator
|
||||
|
||||
|
||||
class SatelliteDataManager:
|
||||
"""Manages satellite tiles with local caching and progressive fetching."""
|
||||
"""Manages satellite tiles from a local pre-loaded directory.
|
||||
|
||||
def __init__(self, cache_dir: str = ".satellite_cache", max_size_gb: float = 10.0):
|
||||
self.cache = dc.Cache(cache_dir, size_limit=int(max_size_gb * 1024**3))
|
||||
# Keep an async client ready for fetching
|
||||
self.http_client = httpx.AsyncClient(timeout=10.0)
|
||||
Directory layout (SAT-01):
|
||||
{tile_dir}/{zoom}/{x}/{y}.png — standard Web Mercator slippy-map layout
|
||||
|
||||
No live HTTP requests are made during flight. A separate offline tooling step
|
||||
downloads and stores tiles before the mission.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
tile_dir: str = ".satellite_tiles",
|
||||
cache_dir: str = ".satellite_cache",
|
||||
max_size_gb: float = 10.0,
|
||||
):
|
||||
self.tile_dir = tile_dir
|
||||
self.thread_pool = ThreadPoolExecutor(max_workers=4)
|
||||
# In-memory LRU for hot tiles (avoids repeated disk reads)
|
||||
self._mem_cache: dict[str, np.ndarray] = {}
|
||||
self._mem_cache_max = 256
|
||||
|
||||
async def fetch_tile(self, lat: float, lon: float, zoom: int, flight_id: str = "default") -> np.ndarray | None:
|
||||
"""Fetch a single satellite tile by GPS coordinates."""
|
||||
coords = self.compute_tile_coords(lat, lon, zoom)
|
||||
|
||||
# 1. Check cache
|
||||
cached = self.get_cached_tile(flight_id, coords)
|
||||
if cached is not None:
|
||||
return cached
|
||||
# ------------------------------------------------------------------
|
||||
# SAT-01: Local tile reads (no HTTP)
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
# 2. Fetch from Google Maps slippy tile URL
|
||||
url = f"https://mt1.google.com/vt/lyrs=s&x={coords.x}&y={coords.y}&z={coords.zoom}"
|
||||
try:
|
||||
resp = await self.http_client.get(url)
|
||||
resp.raise_for_status()
|
||||
|
||||
# 3. Decode image
|
||||
image_bytes = resp.content
|
||||
nparr = np.frombuffer(image_bytes, np.uint8)
|
||||
img_np = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
|
||||
|
||||
if img_np is not None:
|
||||
# 4. Cache tile
|
||||
self.cache_tile(flight_id, coords, img_np)
|
||||
return img_np
|
||||
|
||||
except httpx.HTTPError:
|
||||
def load_local_tile(self, tile_coords: TileCoords) -> np.ndarray | None:
|
||||
"""Load a tile image from the local pre-loaded directory.
|
||||
|
||||
Expected path: {tile_dir}/{zoom}/{x}/{y}.png
|
||||
Returns None if the file does not exist.
|
||||
"""
|
||||
key = f"{tile_coords.zoom}/{tile_coords.x}/{tile_coords.y}"
|
||||
if key in self._mem_cache:
|
||||
return self._mem_cache[key]
|
||||
|
||||
path = os.path.join(self.tile_dir, str(tile_coords.zoom),
|
||||
str(tile_coords.x), f"{tile_coords.y}.png")
|
||||
if not os.path.isfile(path):
|
||||
return None
|
||||
|
||||
async def fetch_tile_grid(
|
||||
self, center_lat: float, center_lon: float, grid_size: int, zoom: int, flight_id: str = "default"
|
||||
) -> dict[str, np.ndarray]:
|
||||
"""Fetches NxN grid of tiles centered on GPS coordinates."""
|
||||
center_coords = self.compute_tile_coords(center_lat, center_lon, zoom)
|
||||
grid_coords = self.get_tile_grid(center_coords, grid_size)
|
||||
|
||||
results: dict[str, np.ndarray] = {}
|
||||
|
||||
# Parallel fetch
|
||||
async def fetch_and_store(tc: TileCoords):
|
||||
# approximate center of tile
|
||||
tb = self.compute_tile_bounds(tc)
|
||||
img = await self.fetch_tile(tb.center.lat, tb.center.lon, tc.zoom, flight_id)
|
||||
if img is not None:
|
||||
results[f"{tc.x}_{tc.y}_{tc.zoom}"] = img
|
||||
|
||||
await asyncio.gather(*(fetch_and_store(tc) for tc in grid_coords))
|
||||
return results
|
||||
img = cv2.imread(path, cv2.IMREAD_COLOR)
|
||||
if img is None:
|
||||
return None
|
||||
|
||||
async def prefetch_route_corridor(
|
||||
self, waypoints: list[GPSPoint], corridor_width_m: float, zoom: int, flight_id: str
|
||||
) -> bool:
|
||||
"""Prefetches satellite tiles along a route corridor."""
|
||||
# Simplified prefetch: just fetch a 3x3 grid around each waypoint
|
||||
coroutine_list = []
|
||||
for wp in waypoints:
|
||||
coroutine_list.append(self.fetch_tile_grid(wp.lat, wp.lon, grid_size=9, zoom=zoom, flight_id=flight_id))
|
||||
|
||||
await asyncio.gather(*coroutine_list)
|
||||
# LRU eviction: drop oldest if full
|
||||
if len(self._mem_cache) >= self._mem_cache_max:
|
||||
oldest = next(iter(self._mem_cache))
|
||||
del self._mem_cache[oldest]
|
||||
self._mem_cache[key] = img
|
||||
return img
|
||||
|
||||
def save_local_tile(self, tile_coords: TileCoords, image: np.ndarray) -> bool:
|
||||
"""Persist a tile to the local directory (used by offline pre-fetch tooling)."""
|
||||
path = os.path.join(self.tile_dir, str(tile_coords.zoom),
|
||||
str(tile_coords.x), f"{tile_coords.y}.png")
|
||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||
ok, encoded = cv2.imencode(".png", image)
|
||||
if not ok:
|
||||
return False
|
||||
with open(path, "wb") as f:
|
||||
f.write(encoded.tobytes())
|
||||
key = f"{tile_coords.zoom}/{tile_coords.x}/{tile_coords.y}"
|
||||
self._mem_cache[key] = image
|
||||
return True
|
||||
|
||||
async def progressive_fetch(
|
||||
self, center_lat: float, center_lon: float, grid_sizes: list[int], zoom: int, flight_id: str = "default"
|
||||
) -> Iterator[dict[str, np.ndarray]]:
|
||||
"""Progressively fetches expanding tile grids."""
|
||||
for size in grid_sizes:
|
||||
grid = await self.fetch_tile_grid(center_lat, center_lon, size, zoom, flight_id)
|
||||
yield grid
|
||||
# ------------------------------------------------------------------
|
||||
# SAT-02: Tile selection for ESKF position ± 3σ_horizontal
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@staticmethod
|
||||
def _meters_to_degrees(meters: float, lat: float) -> tuple[float, float]:
|
||||
"""Convert a radius in metres to (Δlat°, Δlon°) at the given latitude."""
|
||||
delta_lat = meters / 111_320.0
|
||||
delta_lon = meters / (111_320.0 * math.cos(math.radians(lat)))
|
||||
return delta_lat, delta_lon
|
||||
|
||||
def select_tiles_for_eskf_position(
|
||||
self, gps: GPSPoint, sigma_h_m: float, zoom: int
|
||||
) -> list[TileCoords]:
|
||||
"""Return all tile coords covering the ESKF position ± 3σ_horizontal area.
|
||||
|
||||
Args:
|
||||
gps: ESKF best-estimate position.
|
||||
sigma_h_m: 1-σ horizontal uncertainty in metres (from ESKF covariance).
|
||||
zoom: Web Mercator zoom level (18 recommended ≈ 0.6 m/px).
|
||||
"""
|
||||
radius_m = 3.0 * sigma_h_m
|
||||
dlat, dlon = self._meters_to_degrees(radius_m, gps.lat)
|
||||
|
||||
# Bounding box corners
|
||||
lat_min, lat_max = gps.lat - dlat, gps.lat + dlat
|
||||
lon_min, lon_max = gps.lon - dlon, gps.lon + dlon
|
||||
|
||||
# Convert corners to tile coords
|
||||
tc_nw = mercator.latlon_to_tile(lat_max, lon_min, zoom)
|
||||
tc_se = mercator.latlon_to_tile(lat_min, lon_max, zoom)
|
||||
|
||||
tiles: list[TileCoords] = []
|
||||
for x in range(tc_nw.x, tc_se.x + 1):
|
||||
for y in range(tc_nw.y, tc_se.y + 1):
|
||||
tiles.append(TileCoords(x=x, y=y, zoom=zoom))
|
||||
return tiles
|
||||
|
||||
def assemble_mosaic(
|
||||
self,
|
||||
tile_list: list[tuple[TileCoords, np.ndarray]],
|
||||
target_size: int = 512,
|
||||
) -> tuple[np.ndarray, TileBounds] | None:
|
||||
"""Assemble a list of (TileCoords, image) pairs into a single mosaic.
|
||||
|
||||
Returns (mosaic_image, combined_bounds) or None if tile_list is empty.
|
||||
The mosaic is resized to (target_size × target_size) for the matcher.
|
||||
"""
|
||||
if not tile_list:
|
||||
return None
|
||||
|
||||
xs = [tc.x for tc, _ in tile_list]
|
||||
ys = [tc.y for tc, _ in tile_list]
|
||||
zoom = tile_list[0][0].zoom
|
||||
|
||||
x_min, x_max = min(xs), max(xs)
|
||||
y_min, y_max = min(ys), max(ys)
|
||||
|
||||
cols = x_max - x_min + 1
|
||||
rows = y_max - y_min + 1
|
||||
|
||||
# Determine single-tile pixel size from first image
|
||||
sample = tile_list[0][1]
|
||||
th, tw = sample.shape[:2]
|
||||
|
||||
canvas = np.zeros((rows * th, cols * tw, 3), dtype=np.uint8)
|
||||
for tc, img in tile_list:
|
||||
col = tc.x - x_min
|
||||
row = tc.y - y_min
|
||||
h, w = img.shape[:2]
|
||||
canvas[row * th: row * th + h, col * tw: col * tw + w] = img
|
||||
|
||||
mosaic = cv2.resize(canvas, (target_size, target_size), interpolation=cv2.INTER_AREA)
|
||||
|
||||
# Compute combined GPS bounds
|
||||
nw_bounds = mercator.compute_tile_bounds(TileCoords(x=x_min, y=y_min, zoom=zoom))
|
||||
se_bounds = mercator.compute_tile_bounds(TileCoords(x=x_max, y=y_max, zoom=zoom))
|
||||
combined = TileBounds(
|
||||
nw=nw_bounds.nw,
|
||||
ne=GPSPoint(lat=nw_bounds.nw.lat, lon=se_bounds.se.lon),
|
||||
sw=GPSPoint(lat=se_bounds.se.lat, lon=nw_bounds.nw.lon),
|
||||
se=se_bounds.se,
|
||||
center=GPSPoint(
|
||||
lat=(nw_bounds.nw.lat + se_bounds.se.lat) / 2,
|
||||
lon=(nw_bounds.nw.lon + se_bounds.se.lon) / 2,
|
||||
),
|
||||
gsd=nw_bounds.gsd,
|
||||
)
|
||||
return mosaic, combined
|
||||
|
||||
def fetch_tiles_for_position(
|
||||
self, gps: GPSPoint, sigma_h_m: float, zoom: int
|
||||
) -> tuple[np.ndarray, TileBounds] | None:
|
||||
"""High-level helper: select tiles + load + assemble mosaic.
|
||||
|
||||
Returns (mosaic, bounds) or None if no local tiles are available.
|
||||
"""
|
||||
coords = self.select_tiles_for_eskf_position(gps, sigma_h_m, zoom)
|
||||
loaded: list[tuple[TileCoords, np.ndarray]] = []
|
||||
for tc in coords:
|
||||
img = self.load_local_tile(tc)
|
||||
if img is not None:
|
||||
loaded.append((tc, img))
|
||||
return self.assemble_mosaic(loaded) if loaded else None
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Cache helpers (backward-compat, also used for warm-path caching)
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def cache_tile(self, flight_id: str, tile_coords: TileCoords, tile_data: np.ndarray) -> bool:
|
||||
"""Caches a satellite tile to disk."""
|
||||
key = f"{flight_id}_{tile_coords.zoom}_{tile_coords.x}_{tile_coords.y}"
|
||||
# We store as PNG bytes to save disk space and serialization overhead
|
||||
success, encoded = cv2.imencode(".png", tile_data)
|
||||
if success:
|
||||
self.cache.set(key, encoded.tobytes())
|
||||
return True
|
||||
return False
|
||||
"""Cache a tile image in memory (used by tests and offline tools)."""
|
||||
key = f"{tile_coords.zoom}/{tile_coords.x}/{tile_coords.y}"
|
||||
self._mem_cache[key] = tile_data
|
||||
return True
|
||||
|
||||
def get_cached_tile(self, flight_id: str, tile_coords: TileCoords) -> np.ndarray | None:
|
||||
"""Retrieves a cached tile from disk."""
|
||||
key = f"{flight_id}_{tile_coords.zoom}_{tile_coords.x}_{tile_coords.y}"
|
||||
cached_bytes = self.cache.get(key)
|
||||
|
||||
if cached_bytes is not None:
|
||||
nparr = np.frombuffer(cached_bytes, np.uint8)
|
||||
return cv2.imdecode(nparr, cv2.IMREAD_COLOR)
|
||||
|
||||
# Try global/shared cache (flight_id='default')
|
||||
if flight_id != "default":
|
||||
global_key = f"default_{tile_coords.zoom}_{tile_coords.x}_{tile_coords.y}"
|
||||
cached_bytes = self.cache.get(global_key)
|
||||
if cached_bytes is not None:
|
||||
nparr = np.frombuffer(cached_bytes, np.uint8)
|
||||
return cv2.imdecode(nparr, cv2.IMREAD_COLOR)
|
||||
|
||||
return None
|
||||
"""Retrieve a cached tile from memory."""
|
||||
key = f"{tile_coords.zoom}/{tile_coords.x}/{tile_coords.y}"
|
||||
return self._mem_cache.get(key)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Tile math helpers
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def get_tile_grid(self, center: TileCoords, grid_size: int) -> list[TileCoords]:
|
||||
"""Calculates tile coordinates for NxN grid centered on a tile."""
|
||||
"""Return grid_size tiles centered on center."""
|
||||
if grid_size == 1:
|
||||
return [center]
|
||||
|
||||
# E.g. grid_size=9 -> 3x3 -> half=1
|
||||
|
||||
side = int(grid_size ** 0.5)
|
||||
half = side // 2
|
||||
|
||||
coords = []
|
||||
|
||||
coords: list[TileCoords] = []
|
||||
for dy in range(-half, half + 1):
|
||||
for dx in range(-half, half + 1):
|
||||
coords.append(TileCoords(x=center.x + dx, y=center.y + dy, zoom=center.zoom))
|
||||
|
||||
# If grid_size=4 (2x2), it's asymmetric. We'll simplify and say just return top-left based 2x2
|
||||
|
||||
if grid_size == 4:
|
||||
coords = []
|
||||
for dy in range(2):
|
||||
for dx in range(2):
|
||||
coords.append(TileCoords(x=center.x + dx, y=center.y + dy, zoom=center.zoom))
|
||||
|
||||
# Return exact number requested just in case
|
||||
|
||||
return coords[:grid_size]
|
||||
|
||||
def expand_search_grid(self, center: TileCoords, current_size: int, new_size: int) -> list[TileCoords]:
|
||||
"""Returns only NEW tiles when expanding from current grid to larger grid."""
|
||||
old_grid = set((c.x, c.y) for c in self.get_tile_grid(center, current_size))
|
||||
new_grid = self.get_tile_grid(center, new_size)
|
||||
|
||||
diff = []
|
||||
for c in new_grid:
|
||||
if (c.x, c.y) not in old_grid:
|
||||
diff.append(c)
|
||||
return diff
|
||||
"""Return only the NEW tiles when expanding from current_size to new_size grid."""
|
||||
old_set = {(c.x, c.y) for c in self.get_tile_grid(center, current_size)}
|
||||
return [c for c in self.get_tile_grid(center, new_size) if (c.x, c.y) not in old_set]
|
||||
|
||||
def compute_tile_coords(self, lat: float, lon: float, zoom: int) -> TileCoords:
|
||||
return mercator.latlon_to_tile(lat, lon, zoom)
|
||||
@@ -162,10 +241,6 @@ class SatelliteDataManager:
|
||||
return mercator.compute_tile_bounds(tile_coords)
|
||||
|
||||
def clear_flight_cache(self, flight_id: str) -> bool:
|
||||
"""Clears cached tiles for a completed flight."""
|
||||
# diskcache doesn't have partial clear by prefix efficiently, but we can iterate
|
||||
keys = list(self.cache.iterkeys())
|
||||
for k in keys:
|
||||
if str(k).startswith(f"{flight_id}_"):
|
||||
self.cache.delete(k)
|
||||
"""Clear in-memory cache (flight scoping is tile-key-based)."""
|
||||
self._mem_cache.clear()
|
||||
return True
|
||||
|
||||
Reference in New Issue
Block a user