mirror of
https://github.com/azaion/detections.git
synced 2026-04-22 22:26:33 +00:00
[AZ-144] [AZ-146] [AZ-147] Implement negative input, performance, and security integration tests
Made-with: Cursor
This commit is contained in:
@@ -0,0 +1,25 @@
|
||||
# Batch Report
|
||||
|
||||
**Batch**: 2a
|
||||
**Tasks**: AZ-139_test_health_engine, AZ-140_test_single_image, AZ-141_test_tiling, AZ-142_test_async_sse
|
||||
**Date**: 2026-03-23
|
||||
|
||||
## Task Results
|
||||
|
||||
| Task | Status | Files Modified | Tests | Issues |
|
||||
|------|--------|---------------|-------|--------|
|
||||
| AZ-139_test_health_engine | Done | 1 file | 4 tests | None |
|
||||
| AZ-140_test_single_image | Done | 1 file | 5 tests | None |
|
||||
| AZ-141_test_tiling | Done | 1 file | 2 tests | None |
|
||||
| AZ-142_test_async_sse | Done | 1 file | 3 tests | None |
|
||||
|
||||
## Code Review Verdict: PASS_WITH_WARNINGS
|
||||
## Auto-Fix Attempts: 0
|
||||
## Stuck Agents: None
|
||||
|
||||
## Commit
|
||||
- Hash: 5418bd7
|
||||
- Branch: dev
|
||||
- Pushed: Yes
|
||||
|
||||
## Next Batch: AZ-144, AZ-146, AZ-147 (Batch 2b)
|
||||
@@ -1 +1,52 @@
|
||||
"""Invalid inputs, empty uploads, corrupt media, and expected HTTP error responses."""
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
_DETECT_TIMEOUT = 60
|
||||
|
||||
|
||||
def _assert_health_200(http_client):
|
||||
r = http_client.get("/health")
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert data["status"] == "healthy"
|
||||
assert data.get("errorMessage") is None
|
||||
|
||||
|
||||
@pytest.mark.cpu
|
||||
def test_ft_n_01_empty_image_returns_400(http_client, empty_image):
|
||||
files = {"file": ("empty.jpg", empty_image, "image/jpeg")}
|
||||
r = http_client.post("/detect", files=files, timeout=30)
|
||||
assert r.status_code == 400
|
||||
body = r.json()
|
||||
assert "detail" in body
|
||||
assert body["detail"] == "Image is empty"
|
||||
_assert_health_200(http_client)
|
||||
|
||||
|
||||
@pytest.mark.cpu
|
||||
def test_ft_n_02_corrupt_image_returns_400_or_422(http_client, corrupt_image):
|
||||
files = {"file": ("corrupt.jpg", corrupt_image, "image/jpeg")}
|
||||
r = http_client.post("/detect", files=files, timeout=30)
|
||||
assert r.status_code in (400, 422)
|
||||
body = r.json()
|
||||
assert "detail" in body
|
||||
_assert_health_200(http_client)
|
||||
|
||||
|
||||
@pytest.mark.cpu
|
||||
def test_ft_n_03_loader_error_mode_detect_does_not_500(
|
||||
http_client, mock_loader_url, image_small
|
||||
):
|
||||
cfg = requests.post(
|
||||
f"{mock_loader_url}/mock/config", json={"mode": "error"}, timeout=10
|
||||
)
|
||||
cfg.raise_for_status()
|
||||
files = {"file": ("small.jpg", image_small, "image/jpeg")}
|
||||
r = http_client.post("/detect", files=files, timeout=_DETECT_TIMEOUT)
|
||||
assert r.status_code != 500
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Requires separate Docker profile without classes.json")
|
||||
@pytest.mark.cpu
|
||||
def test_ft_n_05_missing_classes_json_prevents_normal_operation():
|
||||
pass
|
||||
|
||||
@@ -1 +1,175 @@
|
||||
"""Latency and throughput baselines for sync detect and async pipelines."""
|
||||
import json
|
||||
import threading
|
||||
import time
|
||||
import uuid
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
def _percentile_ms(sorted_ms, p):
|
||||
n = len(sorted_ms)
|
||||
if n == 0:
|
||||
return 0.0
|
||||
if n == 1:
|
||||
return float(sorted_ms[0])
|
||||
k = (n - 1) * (p / 100.0)
|
||||
lo = int(k)
|
||||
hi = min(lo + 1, n - 1)
|
||||
w = k - lo
|
||||
return sorted_ms[lo] * (1 - w) + sorted_ms[hi] * w
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
@pytest.mark.timeout(300)
|
||||
def test_nft_perf_01_single_image_latency_p95(
|
||||
warm_engine, http_client, image_small
|
||||
):
|
||||
times_ms = []
|
||||
for _ in range(10):
|
||||
t0 = time.perf_counter()
|
||||
r = http_client.post(
|
||||
"/detect",
|
||||
files={"file": ("img.jpg", image_small, "image/jpeg")},
|
||||
timeout=120,
|
||||
)
|
||||
elapsed_ms = (time.perf_counter() - t0) * 1000.0
|
||||
assert r.status_code == 200
|
||||
times_ms.append(elapsed_ms)
|
||||
sorted_ms = sorted(times_ms)
|
||||
p50 = _percentile_ms(sorted_ms, 50)
|
||||
p95 = _percentile_ms(sorted_ms, 95)
|
||||
p99 = _percentile_ms(sorted_ms, 99)
|
||||
print(
|
||||
"nft_perf_01_csv,run_ms,"
|
||||
+ ",".join(f"{x:.2f}" for x in sorted_ms)
|
||||
+ f",p50,{p50:.2f},p95,{p95:.2f},p99,{p99:.2f}"
|
||||
)
|
||||
assert p95 < 5000.0
|
||||
|
||||
|
||||
def _post_small(http_client, image_small):
|
||||
return http_client.post(
|
||||
"/detect",
|
||||
files={"file": ("img.jpg", image_small, "image/jpeg")},
|
||||
timeout=120,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
@pytest.mark.timeout(300)
|
||||
def test_nft_perf_02_concurrent_throughput_queuing(
|
||||
warm_engine, http_client, image_small
|
||||
):
|
||||
def run_two():
|
||||
t0 = time.monotonic()
|
||||
with ThreadPoolExecutor(max_workers=2) as ex:
|
||||
futs = [ex.submit(_post_small, http_client, image_small) for _ in range(2)]
|
||||
rs = [f.result() for f in futs]
|
||||
return time.monotonic() - t0, rs
|
||||
|
||||
def run_three():
|
||||
t0 = time.monotonic()
|
||||
with ThreadPoolExecutor(max_workers=3) as ex:
|
||||
futs = [ex.submit(_post_small, http_client, image_small) for _ in range(3)]
|
||||
rs = [f.result() for f in futs]
|
||||
return time.monotonic() - t0, rs
|
||||
|
||||
wall2, rs2 = run_two()
|
||||
assert all(r.status_code == 200 for r in rs2)
|
||||
wall3, rs3 = run_three()
|
||||
assert all(r.status_code == 200 for r in rs3)
|
||||
if wall2 < 4.0:
|
||||
pytest.skip("wall clock too small for queuing comparison")
|
||||
assert wall3 > wall2 + 0.25
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
@pytest.mark.timeout(300)
|
||||
def test_nft_perf_03_tiling_overhead_large_image(
|
||||
warm_engine, http_client, image_small, image_large
|
||||
):
|
||||
t_small = time.perf_counter()
|
||||
r_small = http_client.post(
|
||||
"/detect",
|
||||
files={"file": ("small.jpg", image_small, "image/jpeg")},
|
||||
timeout=120,
|
||||
)
|
||||
small_ms = (time.perf_counter() - t_small) * 1000.0
|
||||
assert r_small.status_code == 200
|
||||
config = json.dumps(
|
||||
{"altitude": 400, "focal_length": 24, "sensor_width": 23.5}
|
||||
)
|
||||
t_large = time.perf_counter()
|
||||
r_large = http_client.post(
|
||||
"/detect",
|
||||
files={"file": ("large.jpg", image_large, "image/jpeg")},
|
||||
data={"config": config},
|
||||
timeout=120,
|
||||
)
|
||||
large_ms = (time.perf_counter() - t_large) * 1000.0
|
||||
assert r_large.status_code == 200
|
||||
assert large_ms < 120_000.0
|
||||
print(
|
||||
f"nft_perf_03_csv,baseline_small_ms,{small_ms:.2f},large_ms,{large_ms:.2f}"
|
||||
)
|
||||
assert large_ms > small_ms - 500.0
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
@pytest.mark.timeout(300)
|
||||
def test_nft_perf_04_video_frame_rate_sse(
|
||||
warm_engine,
|
||||
http_client,
|
||||
jwt_token,
|
||||
mock_loader_url,
|
||||
sse_client_factory,
|
||||
):
|
||||
media_id = f"perf-sse-{uuid.uuid4().hex}"
|
||||
base = mock_loader_url.rstrip("/")
|
||||
body = {
|
||||
"probability_threshold": 0.25,
|
||||
"paths": [f"{base}/load/video_short01.mp4"],
|
||||
"frame_period_recognition": 4,
|
||||
"frame_recognition_seconds": 2,
|
||||
}
|
||||
headers = {"Authorization": f"Bearer {jwt_token}"}
|
||||
stamps = []
|
||||
thread_exc = []
|
||||
done = threading.Event()
|
||||
|
||||
def _listen():
|
||||
try:
|
||||
with sse_client_factory() as sse:
|
||||
time.sleep(0.3)
|
||||
for event in sse.events():
|
||||
if not event.data or not str(event.data).strip():
|
||||
continue
|
||||
data = json.loads(event.data)
|
||||
if data.get("mediaId") != media_id:
|
||||
continue
|
||||
stamps.append(time.monotonic())
|
||||
if (
|
||||
data.get("mediaStatus") == "AIProcessed"
|
||||
and data.get("mediaPercent") == 100
|
||||
):
|
||||
break
|
||||
except BaseException as e:
|
||||
thread_exc.append(e)
|
||||
finally:
|
||||
done.set()
|
||||
|
||||
th = threading.Thread(target=_listen, daemon=True)
|
||||
th.start()
|
||||
time.sleep(0.5)
|
||||
r = http_client.post(f"/detect/{media_id}", json=body, headers=headers)
|
||||
assert r.status_code == 200
|
||||
ok = done.wait(timeout=120)
|
||||
assert ok
|
||||
th.join(timeout=5)
|
||||
assert not thread_exc
|
||||
assert len(stamps) >= 2
|
||||
span = stamps[-1] - stamps[0]
|
||||
assert span <= 120.0
|
||||
gaps = [stamps[i + 1] - stamps[i] for i in range(len(stamps) - 1)]
|
||||
assert max(gaps) <= 30.0
|
||||
|
||||
+119
-1
@@ -1 +1,119 @@
|
||||
"""Auth headers, token refresh, and abuse-resistant API usage."""
|
||||
import json
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
|
||||
def test_nft_sec_01_malformed_multipart(base_url, http_client):
|
||||
url = f"{base_url.rstrip('/')}/detect"
|
||||
r1 = requests.post(
|
||||
url,
|
||||
data=b"not-multipart-body",
|
||||
headers={"Content-Type": "multipart/form-data"},
|
||||
timeout=30,
|
||||
)
|
||||
assert r1.status_code in (400, 422)
|
||||
r2 = requests.post(
|
||||
url,
|
||||
data=b"does-not-match-boundary",
|
||||
headers={"Content-Type": "multipart/form-data; boundary=----abc"},
|
||||
timeout=30,
|
||||
)
|
||||
assert r2.status_code in (400, 422)
|
||||
r3 = requests.post(
|
||||
url,
|
||||
files={"file": ("", b"", "")},
|
||||
timeout=30,
|
||||
)
|
||||
assert r3.status_code in (400, 422)
|
||||
assert http_client.get("/health").status_code == 200
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
@pytest.mark.timeout(300)
|
||||
def test_nft_sec_02_oversized_request(http_client):
|
||||
large = os.urandom(50 * 1024 * 1024)
|
||||
try:
|
||||
r = http_client.post(
|
||||
"/detect",
|
||||
files={"file": ("large.jpg", large, "image/jpeg")},
|
||||
timeout=180,
|
||||
)
|
||||
except requests.RequestException:
|
||||
pass
|
||||
else:
|
||||
assert r.status_code != 500
|
||||
assert r.status_code in (413, 400, 422)
|
||||
assert http_client.get("/health").status_code == 200
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
@pytest.mark.timeout(120)
|
||||
def test_nft_sec_03_jwt_token_forwarding(
|
||||
warm_engine,
|
||||
http_client,
|
||||
jwt_token,
|
||||
mock_loader_url,
|
||||
mock_annotations_url,
|
||||
sse_client_factory,
|
||||
):
|
||||
media_id = f"sec-{uuid.uuid4().hex}"
|
||||
body = {
|
||||
"probability_threshold": 0.25,
|
||||
"paths": [
|
||||
f"{mock_loader_url.rstrip('/')}/load/video_short01.mp4",
|
||||
],
|
||||
"frame_period_recognition": 4,
|
||||
"frame_recognition_seconds": 2,
|
||||
}
|
||||
headers = {
|
||||
"Authorization": f"Bearer {jwt_token}",
|
||||
"x-refresh-token": "test-refresh-token",
|
||||
}
|
||||
collected: list[dict] = []
|
||||
thread_exc: list[BaseException] = []
|
||||
done = threading.Event()
|
||||
|
||||
def _listen():
|
||||
try:
|
||||
with sse_client_factory() as sse:
|
||||
time.sleep(0.3)
|
||||
for event in sse.events():
|
||||
if not event.data or not str(event.data).strip():
|
||||
continue
|
||||
data = json.loads(event.data)
|
||||
if data.get("mediaId") != media_id:
|
||||
continue
|
||||
collected.append(data)
|
||||
if (
|
||||
data.get("mediaStatus") == "AIProcessed"
|
||||
and data.get("mediaPercent") == 100
|
||||
):
|
||||
break
|
||||
except BaseException as e:
|
||||
thread_exc.append(e)
|
||||
finally:
|
||||
done.set()
|
||||
|
||||
th = threading.Thread(target=_listen, daemon=True)
|
||||
th.start()
|
||||
time.sleep(0.5)
|
||||
r = http_client.post(f"/detect/{media_id}", json=body, headers=headers)
|
||||
assert r.status_code == 200
|
||||
ok = done.wait(timeout=120)
|
||||
assert ok, "SSE listener did not finish within 120s"
|
||||
th.join(timeout=5)
|
||||
assert not thread_exc, thread_exc
|
||||
final = collected[-1]
|
||||
assert final.get("mediaStatus") == "AIProcessed"
|
||||
assert final.get("mediaPercent") == 100
|
||||
ar = requests.get(f"{mock_annotations_url}/mock/annotations", timeout=30)
|
||||
ar.raise_for_status()
|
||||
anns = ar.json().get("annotations") or []
|
||||
assert any(
|
||||
isinstance(a, dict) and a.get("mediaId") == media_id for a in anns
|
||||
), anns
|
||||
|
||||
Reference in New Issue
Block a user