mirror of
https://github.com/azaion/detections.git
synced 2026-04-22 21:56:33 +00:00
141 lines
4.1 KiB
Python
141 lines
4.1 KiB
Python
import json
|
|
import re
|
|
import threading
|
|
import time
|
|
import uuid
|
|
from concurrent.futures import ThreadPoolExecutor
|
|
from datetime import datetime
|
|
from pathlib import Path
|
|
|
|
import pytest
|
|
|
|
|
|
def _video_ai_body(video_path: str) -> dict:
|
|
return {
|
|
"probability_threshold": 0.25,
|
|
"tracking_intersection_threshold": 0.6,
|
|
"altitude": 400,
|
|
"focal_length": 24,
|
|
"sensor_width": 23.5,
|
|
"paths": [video_path],
|
|
"frame_period_recognition": 4,
|
|
"frame_recognition_seconds": 2,
|
|
}
|
|
|
|
|
|
@pytest.mark.slow
|
|
@pytest.mark.timeout(120)
|
|
def test_ft_n_08_nft_res_lim_02_sse_queue_bounded_best_effort(
|
|
warm_engine,
|
|
http_client,
|
|
jwt_token,
|
|
video_short_path,
|
|
sse_client_factory,
|
|
):
|
|
media_id = f"rlim-sse-{uuid.uuid4().hex}"
|
|
body = _video_ai_body(video_short_path)
|
|
headers = {"Authorization": f"Bearer {jwt_token}"}
|
|
collected: list[dict] = []
|
|
thread_exc: list[BaseException] = []
|
|
done = threading.Event()
|
|
|
|
def _listen():
|
|
try:
|
|
with sse_client_factory() as sse:
|
|
time.sleep(0.3)
|
|
for event in sse.events():
|
|
if not event.data or not str(event.data).strip():
|
|
continue
|
|
data = json.loads(event.data)
|
|
if data.get("mediaId") != media_id:
|
|
continue
|
|
collected.append(data)
|
|
if (
|
|
data.get("mediaStatus") == "AIProcessed"
|
|
and data.get("mediaPercent") == 100
|
|
):
|
|
break
|
|
except BaseException as e:
|
|
thread_exc.append(e)
|
|
finally:
|
|
done.set()
|
|
|
|
th = threading.Thread(target=_listen, daemon=True)
|
|
th.start()
|
|
time.sleep(0.5)
|
|
r = http_client.post(f"/detect/{media_id}", json=body, headers=headers)
|
|
assert r.status_code == 200
|
|
assert done.wait(timeout=120)
|
|
th.join(timeout=5)
|
|
assert not thread_exc, thread_exc
|
|
assert collected
|
|
assert collected[-1].get("mediaStatus") == "AIProcessed"
|
|
|
|
|
|
@pytest.mark.slow
|
|
@pytest.mark.timeout(300)
|
|
def test_nft_res_lim_01_worker_limit_concurrent_detect(
|
|
warm_engine, http_client, image_small
|
|
):
|
|
def do_detect(client, image):
|
|
t0 = time.monotonic()
|
|
r = client.post(
|
|
"/detect",
|
|
files={"file": ("img.jpg", image, "image/jpeg")},
|
|
timeout=120,
|
|
)
|
|
t1 = time.monotonic()
|
|
return t0, t1, r
|
|
|
|
with ThreadPoolExecutor(max_workers=4) as ex:
|
|
futs = [ex.submit(do_detect, http_client, image_small) for _ in range(4)]
|
|
results = [f.result() for f in futs]
|
|
|
|
for _, _, r in results:
|
|
assert r.status_code == 200
|
|
|
|
ends = sorted(t1 for _, t1, _ in results)
|
|
spread_first = ends[1] - ends[0]
|
|
spread_second = ends[3] - ends[2]
|
|
between = ends[2] - ends[1]
|
|
intra = max(spread_first, spread_second, 1e-6)
|
|
assert between > intra * 1.5
|
|
|
|
|
|
@pytest.mark.slow
|
|
@pytest.mark.timeout(120)
|
|
def test_nft_res_lim_03_max_detections_per_frame(
|
|
warm_engine, http_client, image_dense
|
|
):
|
|
r = http_client.post(
|
|
"/detect",
|
|
files={"file": ("img.jpg", image_dense, "image/jpeg")},
|
|
timeout=120,
|
|
)
|
|
assert r.status_code == 200
|
|
body = r.json()
|
|
assert isinstance(body, list)
|
|
assert len(body) <= 300
|
|
|
|
|
|
@pytest.mark.slow
|
|
def test_nft_res_lim_04_log_file_rotation(warm_engine, http_client, image_small):
|
|
http_client.post(
|
|
"/detect",
|
|
files={"file": ("img.jpg", image_small, "image/jpeg")},
|
|
timeout=60,
|
|
)
|
|
candidates = [
|
|
Path(__file__).resolve().parent.parent / "logs",
|
|
Path("/app/Logs"),
|
|
]
|
|
log_dir = next((p for p in candidates if p.is_dir()), None)
|
|
if log_dir is None:
|
|
pytest.skip("Log directory not accessible from e2e-runner container")
|
|
today = datetime.now().strftime("%Y%m%d")
|
|
expected = f"log_inference_{today}.txt"
|
|
names = {p.name for p in log_dir.iterdir() if p.is_file()}
|
|
if expected not in names:
|
|
pat = re.compile(r"^log_inference_\d{8}\.txt$")
|
|
assert any(pat.match(n) for n in names), names
|