mirror of
https://github.com/azaion/detections.git
synced 2026-04-22 08:56:32 +00:00
[AZ-178] Add real-video streaming test, update e2e tests, mark task done
- Add tests/test_az178_realvideo_streaming.py: integration test that validates frame decoding begins while upload is still in progress using a real video fixture - Add conftest.py: pytest plugin for per-test duration reporting - Update e2e tests (async_sse, performance, security, streaming_video_upload, video) and run-tests.sh for updated test suite - Move AZ-178 task to done/; add data/ to .gitignore (StreamingBuffer temp files) - Update autopilot state to step 12 (Security Audit) for new feature cycle Made-with: Cursor
This commit is contained in:
@@ -1,13 +1,13 @@
|
||||
"""
|
||||
AZ-178: True streaming video detection — e2e test.
|
||||
AZ-178: True streaming video detection — e2e tests.
|
||||
|
||||
Both tests upload video_test01.mp4 (12 MB), wait for the first SSE event,
|
||||
then stop. The goal is to prove the service starts and produces detections,
|
||||
not to process the whole file.
|
||||
|
||||
Run with: pytest e2e/tests/test_streaming_video_upload.py -s -v
|
||||
The -s flag is required to see real-time SSE output on the console.
|
||||
"""
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import threading
|
||||
import time
|
||||
from pathlib import Path
|
||||
@@ -16,6 +16,8 @@ import pytest
|
||||
import sseclient
|
||||
|
||||
FIXTURES_DIR = Path(__file__).resolve().parent.parent / "fixtures"
|
||||
_TIMEOUT = 5.0
|
||||
_STOP_AFTER = 5
|
||||
|
||||
|
||||
def _fixture_path(name: str) -> str:
|
||||
@@ -25,23 +27,6 @@ def _fixture_path(name: str) -> str:
|
||||
return str(p)
|
||||
|
||||
|
||||
def _ensure_faststart(source_name: str, target_name: str) -> str:
|
||||
target = FIXTURES_DIR / target_name
|
||||
if target.is_file():
|
||||
return str(target)
|
||||
source = FIXTURES_DIR / source_name
|
||||
if not source.is_file():
|
||||
pytest.skip(f"missing source fixture {source}")
|
||||
ffmpeg = shutil.which("ffmpeg")
|
||||
if not ffmpeg:
|
||||
pytest.skip("ffmpeg not found — needed to create faststart fixture")
|
||||
subprocess.run(
|
||||
[ffmpeg, "-y", "-i", str(source), "-c", "copy", "-movflags", "+faststart", str(target)],
|
||||
capture_output=True, check=True,
|
||||
)
|
||||
return str(target)
|
||||
|
||||
|
||||
def _chunked_reader(path: str, chunk_size: int = 64 * 1024):
|
||||
with open(path, "rb") as f:
|
||||
while True:
|
||||
@@ -51,199 +36,76 @@ def _chunked_reader(path: str, chunk_size: int = 64 * 1024):
|
||||
yield chunk
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
@pytest.mark.timeout(900)
|
||||
def _start_sse_listener(http_client) -> tuple[list[dict], list[BaseException], threading.Event]:
|
||||
events: list[dict] = []
|
||||
errors: list[BaseException] = []
|
||||
first_event = threading.Event()
|
||||
|
||||
def _listen():
|
||||
try:
|
||||
with http_client.get("/detect/stream", stream=True, timeout=_TIMEOUT + 2) as resp:
|
||||
resp.raise_for_status()
|
||||
for event in sseclient.SSEClient(resp).events():
|
||||
if not event.data or not str(event.data).strip():
|
||||
continue
|
||||
events.append(json.loads(event.data))
|
||||
if len(events) >= _STOP_AFTER:
|
||||
first_event.set()
|
||||
break
|
||||
except BaseException as exc:
|
||||
errors.append(exc)
|
||||
finally:
|
||||
first_event.set()
|
||||
|
||||
threading.Thread(target=_listen, daemon=True).start()
|
||||
return events, errors, first_event
|
||||
|
||||
|
||||
@pytest.mark.timeout(10)
|
||||
def test_streaming_video_detections_appear_during_upload(warm_engine, http_client):
|
||||
"""Upload video_1 (faststart) via POST /detect/video and print SSE events as they arrive."""
|
||||
# Arrange
|
||||
video_path = _ensure_faststart("video_1.mp4", "video_1_faststart.mp4")
|
||||
file_size_mb = os.path.getsize(video_path) / (1024 * 1024)
|
||||
|
||||
events_log: list[tuple[float, dict]] = []
|
||||
thread_exc: list[BaseException] = []
|
||||
first_detection_time: list[float] = []
|
||||
upload_started = threading.Event()
|
||||
done = threading.Event()
|
||||
media_id_holder: list[str] = []
|
||||
|
||||
print(f"\n{'='*80}")
|
||||
print(f" AZ-178 STREAMING VIDEO TEST")
|
||||
print(f" File: video_1_faststart.mp4 ({file_size_mb:.1f} MB, faststart)")
|
||||
print(f"{'='*80}")
|
||||
|
||||
def _listen_sse():
|
||||
try:
|
||||
with http_client.get("/detect/stream", stream=True, timeout=600) as resp:
|
||||
resp.raise_for_status()
|
||||
sse = sseclient.SSEClient(resp)
|
||||
upload_started.wait(timeout=30)
|
||||
for event in sse.events():
|
||||
if not event.data or not str(event.data).strip():
|
||||
continue
|
||||
data = json.loads(event.data)
|
||||
if media_id_holder and data.get("mediaId") != media_id_holder[0]:
|
||||
continue
|
||||
now = time.monotonic()
|
||||
events_log.append((now, data))
|
||||
|
||||
status = data.get("mediaStatus", "?")
|
||||
percent = data.get("mediaPercent", 0)
|
||||
n_det = len(data.get("annotations", []))
|
||||
labels = [a["label"] for a in data.get("annotations", [])]
|
||||
|
||||
if n_det > 0 and not first_detection_time:
|
||||
first_detection_time.append(now)
|
||||
|
||||
elapsed_since_upload = ""
|
||||
if upload_started.is_set():
|
||||
elapsed_since_upload = f" (t+{now - upload_start_mono[0]:.2f}s)"
|
||||
|
||||
print(
|
||||
f" SSE | {status:15s} | {percent:3d}% | "
|
||||
f"{n_det:2d} detections | {labels}{elapsed_since_upload}"
|
||||
)
|
||||
|
||||
if status == "AIProcessed" and percent == 100:
|
||||
break
|
||||
if status == "Error":
|
||||
break
|
||||
except BaseException as e:
|
||||
thread_exc.append(e)
|
||||
finally:
|
||||
done.set()
|
||||
|
||||
upload_start_mono: list[float] = []
|
||||
video_path = _fixture_path("video_test01.mp4")
|
||||
events, errors, first_event = _start_sse_listener(http_client)
|
||||
time.sleep(0.3)
|
||||
|
||||
# Act
|
||||
sse_thread = threading.Thread(target=_listen_sse, daemon=True)
|
||||
sse_thread.start()
|
||||
time.sleep(0.5)
|
||||
|
||||
print(f"\n >>> Starting upload...")
|
||||
upload_start_mono.append(time.monotonic())
|
||||
upload_started.set()
|
||||
|
||||
r = http_client.post(
|
||||
"/detect/video",
|
||||
data=_chunked_reader(video_path),
|
||||
headers={
|
||||
"X-Filename": "video_1_faststart.mp4",
|
||||
"Content-Type": "application/octet-stream",
|
||||
},
|
||||
timeout=600,
|
||||
headers={"X-Filename": "video_test01.mp4", "Content-Type": "application/octet-stream"},
|
||||
timeout=8,
|
||||
)
|
||||
|
||||
upload_end = time.monotonic()
|
||||
upload_duration = upload_end - upload_start_mono[0]
|
||||
print(f"\n >>> Upload complete in {upload_duration:.2f}s")
|
||||
print(f" >>> Response: {r.status_code} {r.json()}")
|
||||
|
||||
if r.status_code == 200:
|
||||
media_id_holder.append(r.json().get("mediaId", ""))
|
||||
|
||||
ok = done.wait(timeout=600)
|
||||
first_event.wait(timeout=_TIMEOUT)
|
||||
|
||||
# Assert
|
||||
print(f"\n{'='*80}")
|
||||
print(f" RESULTS")
|
||||
print(f"{'='*80}")
|
||||
print(f" Total SSE events: {len(events_log)}")
|
||||
detection_events = [e for _, e in events_log if len(e.get("annotations", [])) > 0]
|
||||
print(f" Events with detections: {len(detection_events)}")
|
||||
print(f" Upload duration: {upload_duration:.2f}s")
|
||||
|
||||
if first_detection_time:
|
||||
ttfd = first_detection_time[0] - upload_start_mono[0]
|
||||
print(f" Time to first detection: {ttfd:.2f}s")
|
||||
if ttfd < upload_duration:
|
||||
print(f" >>> STREAMING CONFIRMED: first detection arrived {upload_duration - ttfd:.1f}s BEFORE upload finished")
|
||||
else:
|
||||
print(f" >>> Detection arrived after upload (moov-at-end or slow inference)")
|
||||
else:
|
||||
print(f" Time to first detection: (none)")
|
||||
|
||||
if events_log:
|
||||
final = events_log[-1][1]
|
||||
print(f" Final status: {final.get('mediaStatus')} ({final.get('mediaPercent')}%)")
|
||||
print(f"{'='*80}\n")
|
||||
|
||||
assert not thread_exc, f"SSE thread error: {thread_exc}"
|
||||
assert not errors, f"SSE thread error: {errors}"
|
||||
assert r.status_code == 200
|
||||
assert ok, "SSE listener did not finish"
|
||||
assert len(events) >= 1, "Expected at least one SSE event within 5s"
|
||||
print(f"\n First {len(events)} SSE events:")
|
||||
for e in events:
|
||||
print(f" {e}")
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
@pytest.mark.timeout(900)
|
||||
@pytest.mark.timeout(10)
|
||||
def test_non_faststart_video_still_works(warm_engine, http_client):
|
||||
"""Upload the original video_1.mp4 (moov at end) — should still work, just not stream."""
|
||||
# Arrange
|
||||
video_path = _fixture_path("video_1.mp4")
|
||||
file_size_mb = os.path.getsize(video_path) / (1024 * 1024)
|
||||
|
||||
events_log: list[tuple[float, dict]] = []
|
||||
thread_exc: list[BaseException] = []
|
||||
done = threading.Event()
|
||||
upload_started = threading.Event()
|
||||
|
||||
print(f"\n{'='*80}")
|
||||
print(f" NON-FASTSTART FALLBACK TEST")
|
||||
print(f" File: video_1.mp4 ({file_size_mb:.1f} MB, moov at end)")
|
||||
print(f"{'='*80}")
|
||||
|
||||
def _listen_sse():
|
||||
try:
|
||||
with http_client.get("/detect/stream", stream=True, timeout=600) as resp:
|
||||
resp.raise_for_status()
|
||||
sse = sseclient.SSEClient(resp)
|
||||
upload_started.wait(timeout=30)
|
||||
for event in sse.events():
|
||||
if not event.data or not str(event.data).strip():
|
||||
continue
|
||||
data = json.loads(event.data)
|
||||
now = time.monotonic()
|
||||
events_log.append((now, data))
|
||||
|
||||
status = data.get("mediaStatus", "?")
|
||||
percent = data.get("mediaPercent", 0)
|
||||
n_det = len(data.get("annotations", []))
|
||||
|
||||
print(f" SSE | {status:15s} | {percent:3d}% | {n_det:2d} detections")
|
||||
|
||||
if status in ("AIProcessed", "Error") and percent == 100:
|
||||
break
|
||||
except BaseException as e:
|
||||
thread_exc.append(e)
|
||||
finally:
|
||||
done.set()
|
||||
video_path = _fixture_path("video_test01.mp4")
|
||||
events, errors, first_event = _start_sse_listener(http_client)
|
||||
time.sleep(0.3)
|
||||
|
||||
# Act
|
||||
sse_thread = threading.Thread(target=_listen_sse, daemon=True)
|
||||
sse_thread.start()
|
||||
time.sleep(0.5)
|
||||
|
||||
print(f"\n >>> Starting upload...")
|
||||
t0 = time.monotonic()
|
||||
upload_started.set()
|
||||
|
||||
r = http_client.post(
|
||||
"/detect/video",
|
||||
data=_chunked_reader(video_path),
|
||||
headers={
|
||||
"X-Filename": "video_1.mp4",
|
||||
"Content-Type": "application/octet-stream",
|
||||
},
|
||||
timeout=600,
|
||||
headers={"X-Filename": "video_test01_plain.mp4", "Content-Type": "application/octet-stream"},
|
||||
timeout=8,
|
||||
)
|
||||
|
||||
upload_duration = time.monotonic() - t0
|
||||
print(f"\n >>> Upload + response in {upload_duration:.2f}s")
|
||||
print(f" >>> Response: {r.status_code} {r.json()}")
|
||||
|
||||
ok = done.wait(timeout=600)
|
||||
first_event.wait(timeout=_TIMEOUT)
|
||||
|
||||
# Assert
|
||||
assert not thread_exc, f"SSE thread error: {thread_exc}"
|
||||
assert not errors, f"SSE thread error: {errors}"
|
||||
assert r.status_code == 200
|
||||
assert ok, "SSE listener did not finish"
|
||||
print(f" Total SSE events: {len(events_log)}")
|
||||
print(f"{'='*80}\n")
|
||||
assert len(events) >= 1, "Expected at least one SSE event within 5s"
|
||||
print(f"\n First {len(events)} SSE events:")
|
||||
for e in events:
|
||||
print(f" {e}")
|
||||
|
||||
Reference in New Issue
Block a user