mirror of
https://github.com/azaion/loader.git
synced 2026-04-22 06:56:31 +00:00
Add E2E tests, fix bugs
Made-with: Cursor
This commit is contained in:
Executable
+70
@@ -0,0 +1,70 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
|
||||
|
||||
BASE_URL="${BASE_URL:-http://localhost:8080}"
|
||||
HEALTH_THRESHOLD_MS="${HEALTH_THRESHOLD_MS:-100}"
|
||||
LOGIN_THRESHOLD_MS="${LOGIN_THRESHOLD_MS:-2000}"
|
||||
|
||||
cleanup() {
|
||||
true
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
cd "$PROJECT_DIR"
|
||||
|
||||
echo "=== Performance Tests ==="
|
||||
echo "Target: $BASE_URL"
|
||||
echo ""
|
||||
|
||||
PASS=0
|
||||
FAIL=0
|
||||
|
||||
run_latency_test() {
|
||||
local name="$1"
|
||||
local method="$2"
|
||||
local url="$3"
|
||||
local threshold_ms="$4"
|
||||
local data="${5:-}"
|
||||
local iterations="${6:-10}"
|
||||
|
||||
local total_ms=0
|
||||
local max_ms=0
|
||||
|
||||
for i in $(seq 1 "$iterations"); do
|
||||
if [[ -n "$data" ]]; then
|
||||
local time_ms
|
||||
time_ms=$(curl -s -o /dev/null -w "%{time_total}" -X "$method" "$url" \
|
||||
-H "Content-Type: application/json" -d "$data" | awk '{printf "%.0f", $1 * 1000}')
|
||||
else
|
||||
local time_ms
|
||||
time_ms=$(curl -s -o /dev/null -w "%{time_total}" -X "$method" "$url" | awk '{printf "%.0f", $1 * 1000}')
|
||||
fi
|
||||
total_ms=$((total_ms + time_ms))
|
||||
if (( time_ms > max_ms )); then
|
||||
max_ms=$time_ms
|
||||
fi
|
||||
done
|
||||
|
||||
local avg_ms=$((total_ms / iterations))
|
||||
|
||||
if (( max_ms <= threshold_ms )); then
|
||||
echo "PASS: $name — avg=${avg_ms}ms, max=${max_ms}ms (threshold: ${threshold_ms}ms)"
|
||||
PASS=$((PASS + 1))
|
||||
else
|
||||
echo "FAIL: $name — avg=${avg_ms}ms, max=${max_ms}ms (threshold: ${threshold_ms}ms)"
|
||||
FAIL=$((FAIL + 1))
|
||||
fi
|
||||
}
|
||||
|
||||
run_latency_test "NFT-PERF-01: Health endpoint" "GET" "$BASE_URL/health" "$HEALTH_THRESHOLD_MS" "" 100
|
||||
|
||||
echo ""
|
||||
echo "=== Results: $PASS passed, $FAIL failed ==="
|
||||
|
||||
if (( FAIL > 0 )); then
|
||||
exit 1
|
||||
fi
|
||||
exit 0
|
||||
Executable
+46
@@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
|
||||
|
||||
cleanup() {
|
||||
if [[ -n "${SUT_PID:-}" ]]; then
|
||||
kill "$SUT_PID" 2>/dev/null || true
|
||||
wait "$SUT_PID" 2>/dev/null || true
|
||||
fi
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
cd "$PROJECT_DIR"
|
||||
|
||||
UNIT_ONLY=false
|
||||
if [[ "${1:-}" == "--unit-only" ]]; then
|
||||
UNIT_ONLY=true
|
||||
fi
|
||||
|
||||
pip install -q -r requirements.txt
|
||||
python setup.py build_ext --inplace 2>&1 | tail -1
|
||||
|
||||
if [[ -f requirements-test.txt ]]; then
|
||||
pip install -q -r requirements-test.txt
|
||||
fi
|
||||
|
||||
if [[ "$UNIT_ONLY" == true ]]; then
|
||||
echo "=== Running unit tests only ==="
|
||||
pytest tests/ -v --tb=short -m "not e2e" --junitxml=test-results/results.xml
|
||||
else
|
||||
echo "=== Running all tests ==="
|
||||
pytest tests/ -v --tb=short --junitxml=test-results/results.xml
|
||||
fi
|
||||
|
||||
EXIT_CODE=$?
|
||||
|
||||
echo ""
|
||||
if [[ $EXIT_CODE -eq 0 ]]; then
|
||||
echo "=== ALL TESTS PASSED ==="
|
||||
else
|
||||
echo "=== TESTS FAILED (exit code: $EXIT_CODE) ==="
|
||||
fi
|
||||
|
||||
exit $EXIT_CODE
|
||||
Reference in New Issue
Block a user