mirror of
https://github.com/azaion/detections.git
synced 2026-04-23 00:56:32 +00:00
[AZ-139] [AZ-140] [AZ-141] [AZ-142] Implement integration tests for health, single image, tiling, and async SSE
Made-with: Cursor
This commit is contained in:
@@ -1 +1,94 @@
|
||||
"""POST /detect/{media_id} async flow, SSE /detect/stream events, annotations callback."""
|
||||
import json
|
||||
import threading
|
||||
import time
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
def _ai_config_video(mock_loader_url: str) -> dict:
|
||||
base = mock_loader_url.rstrip("/")
|
||||
return {
|
||||
"probability_threshold": 0.25,
|
||||
"tracking_intersection_threshold": 0.6,
|
||||
"altitude": 400,
|
||||
"focal_length": 24,
|
||||
"sensor_width": 23.5,
|
||||
"paths": [f"{base}/load/video_short01.mp4"],
|
||||
"frame_period_recognition": 4,
|
||||
"frame_recognition_seconds": 2,
|
||||
}
|
||||
|
||||
|
||||
def test_ft_p08_immediate_async_response(
|
||||
warm_engine, http_client, jwt_token, mock_loader_url
|
||||
):
|
||||
media_id = f"async-{uuid.uuid4().hex}"
|
||||
body = _ai_config_video(mock_loader_url)
|
||||
headers = {"Authorization": f"Bearer {jwt_token}"}
|
||||
t0 = time.monotonic()
|
||||
r = http_client.post(f"/detect/{media_id}", json=body, headers=headers)
|
||||
elapsed = time.monotonic() - t0
|
||||
assert elapsed < 2.0
|
||||
assert r.status_code == 200
|
||||
assert r.json() == {"status": "started", "mediaId": media_id}
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
@pytest.mark.timeout(120)
|
||||
def test_ft_p09_sse_event_delivery(
|
||||
warm_engine, http_client, jwt_token, mock_loader_url, sse_client_factory
|
||||
):
|
||||
media_id = f"sse-{uuid.uuid4().hex}"
|
||||
body = _ai_config_video(mock_loader_url)
|
||||
headers = {"Authorization": f"Bearer {jwt_token}"}
|
||||
collected: list[dict] = []
|
||||
thread_exc: list[BaseException] = []
|
||||
done = threading.Event()
|
||||
|
||||
def _listen():
|
||||
try:
|
||||
with sse_client_factory() as sse:
|
||||
time.sleep(0.3)
|
||||
for event in sse.events():
|
||||
if not event.data or not str(event.data).strip():
|
||||
continue
|
||||
data = json.loads(event.data)
|
||||
if data.get("mediaId") != media_id:
|
||||
continue
|
||||
collected.append(data)
|
||||
if (
|
||||
data.get("mediaStatus") == "AIProcessed"
|
||||
and data.get("mediaPercent") == 100
|
||||
):
|
||||
break
|
||||
except BaseException as e:
|
||||
thread_exc.append(e)
|
||||
finally:
|
||||
done.set()
|
||||
|
||||
th = threading.Thread(target=_listen, daemon=True)
|
||||
th.start()
|
||||
time.sleep(0.5)
|
||||
r = http_client.post(f"/detect/{media_id}", json=body, headers=headers)
|
||||
assert r.status_code == 200
|
||||
ok = done.wait(timeout=120)
|
||||
assert ok, "SSE listener did not finish within 120s"
|
||||
th.join(timeout=5)
|
||||
assert not thread_exc, thread_exc
|
||||
assert any(e.get("mediaStatus") == "AIProcessing" for e in collected)
|
||||
final = collected[-1]
|
||||
assert final.get("mediaStatus") == "AIProcessed"
|
||||
assert final.get("mediaPercent") == 100
|
||||
|
||||
|
||||
def test_ft_n04_duplicate_media_id_409(
|
||||
warm_engine, http_client, jwt_token, mock_loader_url
|
||||
):
|
||||
media_id = "dup-test"
|
||||
body = _ai_config_video(mock_loader_url)
|
||||
headers = {"Authorization": f"Bearer {jwt_token}"}
|
||||
r1 = http_client.post(f"/detect/{media_id}", json=body, headers=headers)
|
||||
assert r1.status_code == 200
|
||||
r2 = http_client.post(f"/detect/{media_id}", json=body, headers=headers)
|
||||
assert r2.status_code == 409
|
||||
|
||||
@@ -1 +1,74 @@
|
||||
"""Health & engine lifecycle tests (FT-P-01, FT-P-02, FT-P-14, FT-P-15)."""
|
||||
import time
|
||||
|
||||
import pytest
|
||||
|
||||
_DETECT_TIMEOUT = 60
|
||||
|
||||
|
||||
def _get_health(http_client):
|
||||
r = http_client.get("/health")
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
|
||||
def _assert_active_ai(data):
|
||||
assert data["status"] == "healthy"
|
||||
assert data["aiAvailability"] not in ("None", "Downloading")
|
||||
|
||||
|
||||
@pytest.mark.cpu
|
||||
class TestHealthEngineStep01PreInit:
|
||||
def test_ft_p_01_pre_init_health(self, http_client):
|
||||
t0 = time.monotonic()
|
||||
data = _get_health(http_client)
|
||||
assert time.monotonic() - t0 < 2.0
|
||||
assert data["status"] == "healthy"
|
||||
assert data["aiAvailability"] == "None"
|
||||
assert data.get("errorMessage") is None
|
||||
|
||||
|
||||
@pytest.mark.cpu
|
||||
@pytest.mark.slow
|
||||
class TestHealthEngineStep02LazyInit:
|
||||
def test_ft_p_14_lazy_initialization(self, http_client, image_small):
|
||||
before = _get_health(http_client)
|
||||
assert before["aiAvailability"] == "None"
|
||||
files = {"file": ("lazy.jpg", image_small, "image/jpeg")}
|
||||
r = http_client.post("/detect", files=files, timeout=_DETECT_TIMEOUT)
|
||||
r.raise_for_status()
|
||||
body = r.json()
|
||||
assert isinstance(body, list)
|
||||
after = _get_health(http_client)
|
||||
_assert_active_ai(after)
|
||||
|
||||
|
||||
@pytest.mark.cpu
|
||||
@pytest.mark.slow
|
||||
class TestHealthEngineStep03Warmed:
|
||||
@pytest.fixture(autouse=True)
|
||||
def _warm(self, warm_engine):
|
||||
pass
|
||||
|
||||
def test_ft_p_02_post_init_health(self, http_client):
|
||||
data = _get_health(http_client)
|
||||
_assert_active_ai(data)
|
||||
assert data.get("errorMessage") is None
|
||||
|
||||
def test_ft_p_15_onnx_cpu_detect(self, http_client, image_small):
|
||||
files = {"file": ("onnx.jpg", image_small, "image/jpeg")}
|
||||
r = http_client.post("/detect", files=files, timeout=_DETECT_TIMEOUT)
|
||||
r.raise_for_status()
|
||||
body = r.json()
|
||||
assert isinstance(body, list)
|
||||
if body:
|
||||
d = body[0]
|
||||
for k in (
|
||||
"centerX",
|
||||
"centerY",
|
||||
"width",
|
||||
"height",
|
||||
"classNum",
|
||||
"label",
|
||||
"confidence",
|
||||
):
|
||||
assert k in d
|
||||
|
||||
@@ -1 +1,209 @@
|
||||
"""Synchronous POST /detect single-image scenarios (bounding boxes, config, class mapping)."""
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
_DETECT_SLOW_TIMEOUT = 120
|
||||
_EPS = 1e-6
|
||||
_WEATHER_CLASS_STRIDE = 20
|
||||
|
||||
|
||||
def _jpeg_width_height(data):
|
||||
if len(data) < 2 or data[0:2] != b"\xff\xd8":
|
||||
return None
|
||||
i = 2
|
||||
while i + 1 < len(data):
|
||||
if data[i] != 0xFF:
|
||||
i += 1
|
||||
continue
|
||||
i += 1
|
||||
while i < len(data) and data[i] == 0xFF:
|
||||
i += 1
|
||||
if i >= len(data):
|
||||
break
|
||||
m = data[i]
|
||||
i += 1
|
||||
if m in (0xD8, 0xD9):
|
||||
continue
|
||||
if i + 3 > len(data):
|
||||
break
|
||||
seg_len = (data[i] << 8) | data[i + 1]
|
||||
i += 2
|
||||
if m in (0xC0, 0xC1, 0xC2, 0xC3, 0xC5, 0xC6, 0xC7):
|
||||
if i + 5 > len(data):
|
||||
return None
|
||||
h = (data[i + 1] << 8) | data[i + 2]
|
||||
w = (data[i + 3] << 8) | data[i + 4]
|
||||
return w, h
|
||||
i += max(0, seg_len - 2)
|
||||
return None
|
||||
|
||||
|
||||
def _overlap_to_min_area_ratio(a, b):
|
||||
ox = 0.5 * (a["width"] + b["width"]) - abs(a["centerX"] - b["centerX"])
|
||||
oy = 0.5 * (a["height"] + b["height"]) - abs(a["centerY"] - b["centerY"])
|
||||
overlap_area = max(0.0, ox) * max(0.0, oy)
|
||||
aa = a["width"] * a["height"]
|
||||
ab = b["width"] * b["height"]
|
||||
m = min(aa, ab)
|
||||
if m <= 0:
|
||||
return 0.0
|
||||
return overlap_area / m
|
||||
|
||||
|
||||
def _load_classes_media():
|
||||
p = Path("/media/classes.json")
|
||||
if not p.is_file():
|
||||
pytest.skip(f"missing {p}")
|
||||
raw = json.loads(p.read_text())
|
||||
by_id = {}
|
||||
names = []
|
||||
for row in raw:
|
||||
cid = row["Id"]
|
||||
by_id[cid] = float(row["MaxSizeM"])
|
||||
names.append(row["Name"])
|
||||
return by_id, names
|
||||
|
||||
|
||||
def _weather_label_ok(label, base_names):
|
||||
for n in base_names:
|
||||
if label == n:
|
||||
return True
|
||||
if label == n + "(Wint)" or label == n + "(Night)":
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_ft_p_03_detection_response_structure_ac1(http_client, image_small, warm_engine):
|
||||
r = http_client.post(
|
||||
"/detect",
|
||||
files={"file": ("img.jpg", image_small, "image/jpeg")},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
body = r.json()
|
||||
assert isinstance(body, list)
|
||||
for d in body:
|
||||
assert isinstance(d["centerX"], (int, float))
|
||||
assert isinstance(d["centerY"], (int, float))
|
||||
assert isinstance(d["width"], (int, float))
|
||||
assert isinstance(d["height"], (int, float))
|
||||
assert 0.0 <= float(d["centerX"]) <= 1.0
|
||||
assert 0.0 <= float(d["centerY"]) <= 1.0
|
||||
assert 0.0 <= float(d["width"]) <= 1.0
|
||||
assert 0.0 <= float(d["height"]) <= 1.0
|
||||
assert isinstance(d["classNum"], int)
|
||||
assert isinstance(d["label"], str)
|
||||
assert isinstance(d["confidence"], (int, float))
|
||||
assert 0.0 <= float(d["confidence"]) <= 1.0
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_ft_p_05_confidence_filtering_ac2(http_client, image_small, warm_engine):
|
||||
cfg_hi = json.dumps({"probability_threshold": 0.8})
|
||||
r_hi = http_client.post(
|
||||
"/detect",
|
||||
files={"file": ("img.jpg", image_small, "image/jpeg")},
|
||||
data={"config": cfg_hi},
|
||||
)
|
||||
assert r_hi.status_code == 200
|
||||
hi = r_hi.json()
|
||||
assert isinstance(hi, list)
|
||||
for d in hi:
|
||||
assert float(d["confidence"]) + _EPS >= 0.8
|
||||
cfg_lo = json.dumps({"probability_threshold": 0.1})
|
||||
r_lo = http_client.post(
|
||||
"/detect",
|
||||
files={"file": ("img.jpg", image_small, "image/jpeg")},
|
||||
data={"config": cfg_lo},
|
||||
)
|
||||
assert r_lo.status_code == 200
|
||||
lo = r_lo.json()
|
||||
assert isinstance(lo, list)
|
||||
assert len(lo) >= len(hi)
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_ft_p_06_overlap_deduplication_ac3(http_client, image_dense, warm_engine):
|
||||
cfg_loose = json.dumps({"tracking_intersection_threshold": 0.6})
|
||||
r1 = http_client.post(
|
||||
"/detect",
|
||||
files={"file": ("img.jpg", image_dense, "image/jpeg")},
|
||||
data={"config": cfg_loose},
|
||||
timeout=_DETECT_SLOW_TIMEOUT,
|
||||
)
|
||||
assert r1.status_code == 200
|
||||
dets = r1.json()
|
||||
assert isinstance(dets, list)
|
||||
by_label = {}
|
||||
for d in dets:
|
||||
by_label.setdefault(d["label"], []).append(d)
|
||||
for label, group in by_label.items():
|
||||
for i in range(len(group)):
|
||||
for j in range(i + 1, len(group)):
|
||||
ratio = _overlap_to_min_area_ratio(group[i], group[j])
|
||||
assert ratio <= 0.6 + _EPS, (label, ratio)
|
||||
cfg_strict = json.dumps({"tracking_intersection_threshold": 0.01})
|
||||
r2 = http_client.post(
|
||||
"/detect",
|
||||
files={"file": ("img.jpg", image_dense, "image/jpeg")},
|
||||
data={"config": cfg_strict},
|
||||
timeout=_DETECT_SLOW_TIMEOUT,
|
||||
)
|
||||
assert r2.status_code == 200
|
||||
strict = r2.json()
|
||||
assert isinstance(strict, list)
|
||||
assert len(strict) <= len(dets)
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_ft_p_07_physical_size_filtering_ac4(http_client, image_small, warm_engine):
|
||||
by_id, _ = _load_classes_media()
|
||||
wh = _jpeg_width_height(image_small)
|
||||
assert wh is not None
|
||||
image_width_px, _ = wh
|
||||
altitude = 400.0
|
||||
focal_length = 24.0
|
||||
sensor_width = 23.5
|
||||
gsd = (sensor_width * altitude) / (focal_length * image_width_px)
|
||||
cfg = json.dumps(
|
||||
{
|
||||
"altitude": altitude,
|
||||
"focal_length": focal_length,
|
||||
"sensor_width": sensor_width,
|
||||
}
|
||||
)
|
||||
r = http_client.post(
|
||||
"/detect",
|
||||
files={"file": ("img.jpg", image_small, "image/jpeg")},
|
||||
data={"config": cfg},
|
||||
timeout=_DETECT_SLOW_TIMEOUT,
|
||||
)
|
||||
assert r.status_code == 200
|
||||
body = r.json()
|
||||
assert isinstance(body, list)
|
||||
for d in body:
|
||||
base_id = d["classNum"] % _WEATHER_CLASS_STRIDE
|
||||
assert base_id in by_id
|
||||
physical_width = float(d["width"]) * image_width_px * gsd
|
||||
assert physical_width <= by_id[base_id] + _EPS
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_ft_p_13_weather_mode_class_variants_ac5(
|
||||
http_client, image_different_types, warm_engine
|
||||
):
|
||||
_, base_names = _load_classes_media()
|
||||
r = http_client.post(
|
||||
"/detect",
|
||||
files={"file": ("img.jpg", image_different_types, "image/jpeg")},
|
||||
timeout=_DETECT_SLOW_TIMEOUT,
|
||||
)
|
||||
assert r.status_code == 200
|
||||
body = r.json()
|
||||
assert isinstance(body, list)
|
||||
for d in body:
|
||||
label = d["label"]
|
||||
assert isinstance(label, str)
|
||||
assert len(label) > 0
|
||||
assert _weather_label_ok(label, base_names)
|
||||
|
||||
@@ -1 +1,57 @@
|
||||
"""Large-image tiling and overlap behavior for POST /detect."""
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
_TILING_TIMEOUT = 120
|
||||
_GSD = {"altitude": 400, "focal_length": 24, "sensor_width": 23.5}
|
||||
_DUP_THRESHOLD = 0.01
|
||||
|
||||
|
||||
def _assert_coords_normalized(detections):
|
||||
for d in detections:
|
||||
for k in ("centerX", "centerY", "width", "height"):
|
||||
v = d[k]
|
||||
assert 0.0 <= v <= 1.0
|
||||
|
||||
|
||||
def _assert_no_same_label_near_duplicate_centers(detections):
|
||||
by_label = {}
|
||||
for d in detections:
|
||||
label = d["label"]
|
||||
cx, cy = d["centerX"], d["centerY"]
|
||||
prev = by_label.setdefault(label, [])
|
||||
for pcx, pcy in prev:
|
||||
assert not (
|
||||
abs(cx - pcx) < _DUP_THRESHOLD and abs(cy - pcy) < _DUP_THRESHOLD
|
||||
), f"near-duplicate centers for label {label!r}: ({pcx},{pcy}) vs ({cx},{cy})"
|
||||
prev.append((cx, cy))
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_ft_p_04_gsd_based_tiling_ac1(http_client, image_large, warm_engine):
|
||||
config = json.dumps(_GSD)
|
||||
r = http_client.post(
|
||||
"/detect",
|
||||
files={"file": ("img.jpg", image_large, "image/jpeg")},
|
||||
data={"config": config},
|
||||
timeout=_TILING_TIMEOUT,
|
||||
)
|
||||
assert r.status_code == 200
|
||||
body = r.json()
|
||||
assert isinstance(body, list)
|
||||
_assert_coords_normalized(body)
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_ft_p_16_tile_boundary_deduplication_ac2(http_client, image_large, warm_engine):
|
||||
config = json.dumps({**_GSD, "big_image_tile_overlap_percent": 20})
|
||||
r = http_client.post(
|
||||
"/detect",
|
||||
files={"file": ("img.jpg", image_large, "image/jpeg")},
|
||||
data={"config": config},
|
||||
timeout=_TILING_TIMEOUT,
|
||||
)
|
||||
assert r.status_code == 200
|
||||
body = r.json()
|
||||
assert isinstance(body, list)
|
||||
_assert_no_same_label_near_duplicate_centers(body)
|
||||
|
||||
Reference in New Issue
Block a user