mirror of
https://github.com/azaion/loader.git
synced 2026-04-22 07:06:33 +00:00
[AZ-185][AZ-186] Batch 2
Made-with: Cursor
This commit is contained in:
@@ -0,0 +1,331 @@
|
||||
import gzip
|
||||
import importlib.util
|
||||
import io
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import yaml
|
||||
|
||||
from download_manager import decrypt_cbc_file
|
||||
|
||||
_ROOT = Path(__file__).resolve().parents[1]
|
||||
_SCRIPT = _ROOT / "scripts" / "publish_artifact.py"
|
||||
_WOODPECKER = _ROOT / ".woodpecker" / "build-arm.yml"
|
||||
|
||||
|
||||
def _load_publish():
|
||||
spec = importlib.util.spec_from_file_location("publish_artifact", _SCRIPT)
|
||||
mod = importlib.util.module_from_spec(spec)
|
||||
assert spec.loader is not None
|
||||
spec.loader.exec_module(mod)
|
||||
return mod
|
||||
|
||||
|
||||
def _s3_client_factory(storage):
|
||||
def client(service_name, **kwargs):
|
||||
if service_name != "s3":
|
||||
raise AssertionError(service_name)
|
||||
m = MagicMock()
|
||||
|
||||
def upload_fileobj(body, bucket, key):
|
||||
storage.setdefault(bucket, {})[key] = body.read()
|
||||
|
||||
m.upload_fileobj.side_effect = upload_fileobj
|
||||
|
||||
def get_object(Bucket=None, Key=None):
|
||||
return {"Body": io.BytesIO(storage[Bucket][Key])}
|
||||
|
||||
m.get_object.side_effect = get_object
|
||||
return m
|
||||
|
||||
return client
|
||||
|
||||
|
||||
class TestPublishArtifact(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self._env_patch = None
|
||||
|
||||
def tearDown(self):
|
||||
if self._env_patch:
|
||||
self._env_patch.stop()
|
||||
|
||||
def _base_env(self):
|
||||
return {
|
||||
"S3_ENDPOINT": "https://s3.example.test",
|
||||
"S3_ACCESS_KEY": "ak",
|
||||
"S3_SECRET_KEY": "sk",
|
||||
"S3_BUCKET": "test-bucket",
|
||||
"ADMIN_API_URL": "https://admin.example.test",
|
||||
"ADMIN_API_TOKEN": "token",
|
||||
}
|
||||
|
||||
def test_ac1_end_to_end_publish(self):
|
||||
# Arrange
|
||||
mod = _load_publish()
|
||||
env = self._base_env()
|
||||
self._env_patch = patch.dict(os.environ, env, clear=False)
|
||||
self._env_patch.start()
|
||||
captured = {}
|
||||
storage = {}
|
||||
|
||||
def fake_post(url, headers=None, json=None, timeout=None):
|
||||
class R:
|
||||
status_code = 200
|
||||
|
||||
def raise_for_status(self):
|
||||
pass
|
||||
|
||||
captured["url"] = url
|
||||
captured["body"] = json
|
||||
return R()
|
||||
|
||||
fd, src = tempfile.mkstemp()
|
||||
os.close(fd)
|
||||
try:
|
||||
with open(src, "wb") as f:
|
||||
f.write(b"artifact-bytes")
|
||||
with patch.object(
|
||||
mod.boto3, "client", side_effect=_s3_client_factory(storage)
|
||||
), patch.object(mod.requests, "post", side_effect=fake_post):
|
||||
# Act
|
||||
out = mod.publish(
|
||||
src,
|
||||
"loader",
|
||||
"dev",
|
||||
"arm64",
|
||||
"v1",
|
||||
)
|
||||
# Assert
|
||||
self.assertEqual(
|
||||
out["object_key"],
|
||||
"dev/loader-arm64-v1.enc",
|
||||
)
|
||||
key = out["object_key"]
|
||||
body = storage["test-bucket"][key]
|
||||
h = __import__("hashlib").sha256(body).hexdigest().lower()
|
||||
self.assertEqual(h, out["sha256"])
|
||||
self.assertEqual(captured["body"]["sha256"], out["sha256"])
|
||||
self.assertEqual(captured["body"]["size_bytes"], len(body))
|
||||
self.assertEqual(captured["body"]["encryption_key"], out["encryption_key_hex"])
|
||||
self.assertEqual(captured["body"]["cdn_url"], out["cdn_url"])
|
||||
finally:
|
||||
os.unlink(src)
|
||||
|
||||
def test_ac2_woodpecker_publish_step_after_build(self):
|
||||
# Arrange
|
||||
raw = _WOODPECKER.read_text(encoding="utf-8")
|
||||
# Act
|
||||
doc = yaml.safe_load(raw)
|
||||
names = [s["name"] for s in doc["steps"]]
|
||||
# Assert
|
||||
self.assertIn("build-push", names)
|
||||
self.assertIn("publish-artifact", names)
|
||||
self.assertLess(names.index("build-push"), names.index("publish-artifact"))
|
||||
build_cmds = "\n".join(doc["steps"][names.index("build-push")]["commands"])
|
||||
self.assertIn("docker save", build_cmds)
|
||||
pub_cmds = "\n".join(doc["steps"][names.index("publish-artifact")]["commands"])
|
||||
self.assertIn("publish_artifact.py", pub_cmds)
|
||||
self.assertIn("loader-image.tar", pub_cmds)
|
||||
|
||||
def test_ac3_unique_key_per_publish(self):
|
||||
# Arrange
|
||||
mod = _load_publish()
|
||||
self._env_patch = patch.dict(os.environ, self._base_env(), clear=False)
|
||||
self._env_patch.start()
|
||||
keys = []
|
||||
storage = {}
|
||||
|
||||
def capture_post(url, headers=None, json=None, timeout=None):
|
||||
keys.append(json["encryption_key"])
|
||||
|
||||
class R:
|
||||
status_code = 200
|
||||
|
||||
def raise_for_status(self):
|
||||
pass
|
||||
|
||||
return R()
|
||||
|
||||
fd, src = tempfile.mkstemp()
|
||||
os.close(fd)
|
||||
try:
|
||||
with open(src, "wb") as f:
|
||||
f.write(b"x")
|
||||
with patch.object(
|
||||
mod.boto3, "client", side_effect=_s3_client_factory(storage)
|
||||
), patch.object(mod.requests, "post", side_effect=capture_post):
|
||||
# Act
|
||||
mod.publish(src, "r", "dev", "arm64", "1")
|
||||
mod.publish(src, "r", "dev", "arm64", "2")
|
||||
# Assert
|
||||
self.assertEqual(len(keys), 2)
|
||||
self.assertNotEqual(keys[0], keys[1])
|
||||
self.assertEqual(len(bytes.fromhex(keys[0])), 32)
|
||||
self.assertEqual(len(bytes.fromhex(keys[1])), 32)
|
||||
finally:
|
||||
os.unlink(src)
|
||||
|
||||
def test_ac4_sha256_matches_s3_object_and_registration(self):
|
||||
# Arrange
|
||||
mod = _load_publish()
|
||||
self._env_patch = patch.dict(os.environ, self._base_env(), clear=False)
|
||||
self._env_patch.start()
|
||||
posted = {}
|
||||
storage = {}
|
||||
|
||||
def fake_post(url, headers=None, json=None, timeout=None):
|
||||
posted.update(json)
|
||||
|
||||
class R:
|
||||
status_code = 200
|
||||
|
||||
def raise_for_status(self):
|
||||
pass
|
||||
|
||||
return R()
|
||||
|
||||
fd, src = tempfile.mkstemp()
|
||||
os.close(fd)
|
||||
try:
|
||||
with open(src, "wb") as f:
|
||||
f.write(b"payload-for-hash")
|
||||
with patch.object(
|
||||
mod.boto3, "client", side_effect=_s3_client_factory(storage)
|
||||
), patch.object(mod.requests, "post", side_effect=fake_post):
|
||||
# Act
|
||||
out = mod.publish(src, "m", "stage", "arm64", "9.9.9")
|
||||
key = out["object_key"]
|
||||
body = storage["test-bucket"][key]
|
||||
expect = __import__("hashlib").sha256(body).hexdigest().lower()
|
||||
# Assert
|
||||
self.assertEqual(posted["sha256"], expect)
|
||||
self.assertEqual(out["sha256"], expect)
|
||||
finally:
|
||||
os.unlink(src)
|
||||
|
||||
def test_ac5_main_entry_matches_cli_invocation(self):
|
||||
# Arrange
|
||||
mod = _load_publish()
|
||||
self._env_patch = patch.dict(os.environ, self._base_env(), clear=False)
|
||||
self._env_patch.start()
|
||||
storage = {}
|
||||
|
||||
def ok_post(url, headers=None, json=None, timeout=None):
|
||||
class R:
|
||||
status_code = 200
|
||||
|
||||
def raise_for_status(self):
|
||||
pass
|
||||
|
||||
return R()
|
||||
|
||||
fd, src = tempfile.mkstemp()
|
||||
os.close(fd)
|
||||
try:
|
||||
with open(src, "wb") as f:
|
||||
f.write(b"cli-data")
|
||||
with patch.object(
|
||||
mod.boto3, "client", side_effect=_s3_client_factory(storage)
|
||||
), patch.object(mod.requests, "post", side_effect=ok_post):
|
||||
# Act
|
||||
code = mod.main(
|
||||
[
|
||||
"--file",
|
||||
src,
|
||||
"--resource-name",
|
||||
"model",
|
||||
"--dev-stage",
|
||||
"dev",
|
||||
"--architecture",
|
||||
"arm64",
|
||||
"--version",
|
||||
"0.0.1",
|
||||
]
|
||||
)
|
||||
# Assert
|
||||
self.assertEqual(code, 0)
|
||||
self.assertGreater(
|
||||
len(storage["test-bucket"]["dev/model-arm64-0.0.1.enc"]), 0
|
||||
)
|
||||
finally:
|
||||
os.unlink(src)
|
||||
|
||||
def test_ac5_cli_help_exits_zero(self):
|
||||
# Act
|
||||
r = subprocess.run(
|
||||
[sys.executable, str(_SCRIPT), "--help"],
|
||||
cwd=str(_ROOT),
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
# Assert
|
||||
self.assertEqual(r.returncode, 0)
|
||||
self.assertIn("--resource-name", r.stdout)
|
||||
|
||||
def test_ac5_subprocess_script_missing_env_exits_nonzero(self):
|
||||
# Arrange
|
||||
fd, path = tempfile.mkstemp()
|
||||
os.close(fd)
|
||||
try:
|
||||
minimal_env = {
|
||||
k: v
|
||||
for k, v in os.environ.items()
|
||||
if k in ("PATH", "HOME", "TMPDIR", "SYSTEMROOT")
|
||||
}
|
||||
# Act
|
||||
r = subprocess.run(
|
||||
[
|
||||
sys.executable,
|
||||
str(_SCRIPT),
|
||||
"--file",
|
||||
path,
|
||||
"--resource-name",
|
||||
"x",
|
||||
"--dev-stage",
|
||||
"d",
|
||||
"--architecture",
|
||||
"arm64",
|
||||
"--version",
|
||||
"1",
|
||||
],
|
||||
cwd=str(_ROOT),
|
||||
env=minimal_env,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
# Assert
|
||||
self.assertNotEqual(r.returncode, 0)
|
||||
finally:
|
||||
os.unlink(path)
|
||||
|
||||
def test_encryption_compatible_with_decrypt_cbc_file(self):
|
||||
# Arrange
|
||||
mod = _load_publish()
|
||||
aes_key = os.urandom(32)
|
||||
fd, plain = tempfile.mkstemp()
|
||||
os.close(fd)
|
||||
gz_path = tempfile.NamedTemporaryFile(delete=False, suffix=".gz").name
|
||||
enc_path = tempfile.NamedTemporaryFile(delete=False, suffix=".enc").name
|
||||
dec_path = tempfile.NamedTemporaryFile(delete=False, suffix=".bin").name
|
||||
try:
|
||||
with open(plain, "wb") as f:
|
||||
f.write(b"round-trip-plain")
|
||||
mod.gzip_file(plain, gz_path)
|
||||
mod.encrypt_aes256_cbc_file(gz_path, enc_path, aes_key)
|
||||
# Act
|
||||
decrypt_cbc_file(enc_path, aes_key, dec_path)
|
||||
with open(dec_path, "rb") as f:
|
||||
restored = gzip.decompress(f.read())
|
||||
# Assert
|
||||
self.assertEqual(restored, b"round-trip-plain")
|
||||
finally:
|
||||
for p in (plain, gz_path, enc_path, dec_path):
|
||||
try:
|
||||
os.unlink(p)
|
||||
except OSError:
|
||||
pass
|
||||
@@ -0,0 +1,351 @@
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
import unittest
|
||||
from typing import List
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from download_manager import ResumableDownloadManager
|
||||
from update_manager import UpdateManager, maybe_start_update_background
|
||||
from version_collector import VersionCollector
|
||||
|
||||
|
||||
class TestUpdateManager(unittest.TestCase):
|
||||
def _make_manager(
|
||||
self,
|
||||
tmp: str,
|
||||
*,
|
||||
post_get_update=None,
|
||||
subprocess_run=None,
|
||||
head_content_length=None,
|
||||
wait_fn=None,
|
||||
stop_event=None,
|
||||
):
|
||||
dm_dir = os.path.join(tmp, "dm")
|
||||
model_dir = os.path.join(tmp, "models")
|
||||
state_path = os.path.join(dm_dir, "update_orchestrator.json")
|
||||
os.makedirs(model_dir, exist_ok=True)
|
||||
dm = ResumableDownloadManager(dm_dir)
|
||||
vc = VersionCollector(model_dir, subprocess_run=subprocess_run or MagicMock())
|
||||
um = UpdateManager(
|
||||
"http://api.test",
|
||||
lambda: "tok",
|
||||
dm,
|
||||
vc,
|
||||
os.path.join(tmp, "compose.yml"),
|
||||
model_dir,
|
||||
state_path,
|
||||
interval_seconds=300.0,
|
||||
subprocess_run=subprocess_run,
|
||||
post_get_update=post_get_update,
|
||||
head_content_length=head_content_length,
|
||||
wait_fn=wait_fn,
|
||||
stop_event=stop_event,
|
||||
)
|
||||
return um, dm, vc
|
||||
|
||||
def test_ac2_background_loop_polls_on_schedule(self):
|
||||
# Arrange
|
||||
tmp = tempfile.mkdtemp()
|
||||
posts: List[dict] = []
|
||||
|
||||
def post(token, body):
|
||||
posts.append({"token": token, "body": body})
|
||||
return []
|
||||
|
||||
waits: List[float] = []
|
||||
|
||||
def wait_fn(interval):
|
||||
waits.append(interval)
|
||||
return len(waits) >= 2
|
||||
|
||||
def fake_run(cmd, **kwargs):
|
||||
if cmd[:3] == ["docker", "images", "--format"]:
|
||||
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
|
||||
raise AssertionError(cmd)
|
||||
|
||||
um, _, _ = self._make_manager(
|
||||
tmp,
|
||||
post_get_update=post,
|
||||
subprocess_run=fake_run,
|
||||
head_content_length=lambda url, token: 1,
|
||||
wait_fn=wait_fn,
|
||||
)
|
||||
# Act
|
||||
um.run_forever()
|
||||
# Assert
|
||||
self.assertEqual(len(posts), 2)
|
||||
self.assertEqual(waits, [300.0, 300.0])
|
||||
|
||||
def test_ac2_default_interval_is_five_minutes(self):
|
||||
# Arrange / Act
|
||||
tmp = tempfile.mkdtemp()
|
||||
dm_dir = os.path.join(tmp, "dm")
|
||||
model_dir = os.path.join(tmp, "m")
|
||||
os.makedirs(model_dir, exist_ok=True)
|
||||
dm = ResumableDownloadManager(dm_dir)
|
||||
vc = VersionCollector(model_dir, subprocess_run=MagicMock())
|
||||
um = UpdateManager(
|
||||
"http://x",
|
||||
lambda: None,
|
||||
dm,
|
||||
vc,
|
||||
"c.yml",
|
||||
model_dir,
|
||||
os.path.join(dm_dir, "st.json"),
|
||||
)
|
||||
# Assert
|
||||
self.assertEqual(um._interval, 300.0)
|
||||
|
||||
def test_ac3_ai_model_update_applied(self):
|
||||
# Arrange
|
||||
tmp = tempfile.mkdtemp()
|
||||
model_dir = os.path.join(tmp, "models")
|
||||
os.makedirs(model_dir, exist_ok=True)
|
||||
|
||||
def fake_run(cmd, **kwargs):
|
||||
if cmd[:3] == ["docker", "images", "--format"]:
|
||||
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
|
||||
raise AssertionError(cmd)
|
||||
|
||||
dm_mock = MagicMock()
|
||||
|
||||
def post(token, body):
|
||||
return [
|
||||
{
|
||||
"resourceName": "detection_model",
|
||||
"version": "2026-04-20",
|
||||
"cdnUrl": "http://cdn/x",
|
||||
"sha256": "ab",
|
||||
"encryptionKey": "k",
|
||||
}
|
||||
]
|
||||
|
||||
um, _, _ = self._make_manager(
|
||||
tmp,
|
||||
post_get_update=post,
|
||||
subprocess_run=fake_run,
|
||||
head_content_length=lambda url, token: 4,
|
||||
)
|
||||
um._download_manager = dm_mock
|
||||
|
||||
def capture_fetch(job_id, url, sha256, size, decryption_key, output_plaintext_path):
|
||||
with open(output_plaintext_path, "wb") as f:
|
||||
f.write(b"trt")
|
||||
|
||||
dm_mock.fetch_decrypt_verify.side_effect = capture_fetch
|
||||
# Act
|
||||
um._tick_once()
|
||||
# Assert
|
||||
dm_mock.fetch_decrypt_verify.assert_called_once()
|
||||
args, kwargs = dm_mock.fetch_decrypt_verify.call_args
|
||||
self.assertTrue(args[5].endswith("azaion-2026-04-20.trt"))
|
||||
self.assertTrue(os.path.isfile(os.path.join(model_dir, "azaion-2026-04-20.trt")))
|
||||
|
||||
def test_ac4_docker_image_update_applied(self):
|
||||
# Arrange
|
||||
tmp = tempfile.mkdtemp()
|
||||
recorded: List[List[str]] = []
|
||||
|
||||
def fake_run(cmd, **kwargs):
|
||||
recorded.append(list(cmd))
|
||||
if cmd[:3] == ["docker", "images", "--format"]:
|
||||
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
|
||||
if cmd[:3] == ["docker", "load", "-i"]:
|
||||
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
|
||||
if cmd[:2] == ["docker", "compose"]:
|
||||
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
|
||||
raise AssertionError(cmd)
|
||||
|
||||
dm_mock = MagicMock()
|
||||
|
||||
def post(token, body):
|
||||
return [
|
||||
{
|
||||
"resourceName": "annotations",
|
||||
"version": "2026-04-13",
|
||||
"cdnUrl": "http://cdn/a",
|
||||
"sha256": "cd",
|
||||
"encryptionKey": "k",
|
||||
}
|
||||
]
|
||||
|
||||
um, _, _ = self._make_manager(
|
||||
tmp,
|
||||
post_get_update=post,
|
||||
subprocess_run=fake_run,
|
||||
head_content_length=lambda url, token: 8,
|
||||
)
|
||||
um._download_manager = dm_mock
|
||||
|
||||
def capture_fetch(job_id, url, sha256, size, decryption_key, output_plaintext_path):
|
||||
with open(output_plaintext_path, "wb") as f:
|
||||
f.write(b"tarbytes")
|
||||
|
||||
dm_mock.fetch_decrypt_verify.side_effect = capture_fetch
|
||||
# Act
|
||||
um._tick_once()
|
||||
# Assert
|
||||
loads = [c for c in recorded if c[:3] == ["docker", "load", "-i"]]
|
||||
composes = [c for c in recorded if c[:2] == ["docker", "compose"]]
|
||||
self.assertEqual(len(loads), 1)
|
||||
self.assertEqual(len(composes), 1)
|
||||
self.assertIn("annotations", composes[0])
|
||||
|
||||
def test_ac5_self_update_applied_last(self):
|
||||
# Arrange
|
||||
tmp = tempfile.mkdtemp()
|
||||
recorded: List[str] = []
|
||||
|
||||
def fake_run(cmd, **kwargs):
|
||||
if cmd[:3] == ["docker", "images", "--format"]:
|
||||
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
|
||||
if cmd[:3] == ["docker", "load", "-i"]:
|
||||
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
|
||||
if cmd[:2] == ["docker", "compose"]:
|
||||
recorded.append(cmd[-1])
|
||||
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
|
||||
raise AssertionError(cmd)
|
||||
|
||||
dm_mock = MagicMock()
|
||||
|
||||
def post(token, body):
|
||||
return [
|
||||
{
|
||||
"resourceName": "loader",
|
||||
"version": "v2",
|
||||
"cdnUrl": "http://cdn/l",
|
||||
"sha256": "00",
|
||||
"encryptionKey": "k",
|
||||
},
|
||||
{
|
||||
"resourceName": "annotations",
|
||||
"version": "v1",
|
||||
"cdnUrl": "http://cdn/a",
|
||||
"sha256": "11",
|
||||
"encryptionKey": "k",
|
||||
},
|
||||
]
|
||||
|
||||
um, _, _ = self._make_manager(
|
||||
tmp,
|
||||
post_get_update=post,
|
||||
subprocess_run=fake_run,
|
||||
head_content_length=lambda url, token: 1,
|
||||
)
|
||||
um._download_manager = dm_mock
|
||||
|
||||
def capture_fetch(job_id, url, sha256, size, decryption_key, output_plaintext_path):
|
||||
with open(output_plaintext_path, "wb") as f:
|
||||
f.write(b"x")
|
||||
|
||||
dm_mock.fetch_decrypt_verify.side_effect = capture_fetch
|
||||
# Act
|
||||
um._tick_once()
|
||||
# Assert
|
||||
self.assertEqual(recorded, ["annotations", "loader"])
|
||||
|
||||
def test_ac6_invalidate_after_docker_apply(self):
|
||||
# Arrange
|
||||
tmp = tempfile.mkdtemp()
|
||||
|
||||
def fake_run(cmd, **kwargs):
|
||||
if cmd[:3] == ["docker", "images", "--format"]:
|
||||
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
|
||||
if cmd[:3] == ["docker", "load", "-i"]:
|
||||
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
|
||||
if cmd[:2] == ["docker", "compose"]:
|
||||
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
|
||||
raise AssertionError(cmd)
|
||||
|
||||
dm_mock = MagicMock()
|
||||
|
||||
def post(token, body):
|
||||
return [
|
||||
{
|
||||
"resourceName": "annotations",
|
||||
"version": "v9",
|
||||
"cdnUrl": "http://cdn/a",
|
||||
"sha256": "11",
|
||||
"encryptionKey": "k",
|
||||
}
|
||||
]
|
||||
|
||||
um, _, vc = self._make_manager(
|
||||
tmp,
|
||||
post_get_update=post,
|
||||
subprocess_run=fake_run,
|
||||
head_content_length=lambda url, token: 1,
|
||||
)
|
||||
um._download_manager = dm_mock
|
||||
|
||||
def capture_fetch(job_id, url, sha256, size, decryption_key, output_plaintext_path):
|
||||
with open(output_plaintext_path, "wb") as f:
|
||||
f.write(b"x")
|
||||
|
||||
dm_mock.fetch_decrypt_verify.side_effect = capture_fetch
|
||||
vc.collect()
|
||||
self.assertIsNotNone(vc._cache)
|
||||
# Act
|
||||
um._tick_once()
|
||||
# Assert
|
||||
self.assertIsNone(vc._cache)
|
||||
|
||||
def test_maybe_start_skips_without_download_state_dir(self):
|
||||
# Arrange
|
||||
old = os.environ.pop("LOADER_DOWNLOAD_STATE_DIR", None)
|
||||
try:
|
||||
|
||||
def get_client():
|
||||
return MagicMock()
|
||||
|
||||
# Act
|
||||
maybe_start_update_background(get_client, "http://x")
|
||||
finally:
|
||||
if old is not None:
|
||||
os.environ["LOADER_DOWNLOAD_STATE_DIR"] = old
|
||||
|
||||
def test_pending_compose_drained_on_startup(self):
|
||||
# Arrange
|
||||
tmp = tempfile.mkdtemp()
|
||||
dm_dir = os.path.join(tmp, "dm")
|
||||
os.makedirs(dm_dir, exist_ok=True)
|
||||
state_path = os.path.join(dm_dir, "update_orchestrator.json")
|
||||
with open(state_path, "w", encoding="utf-8") as f:
|
||||
json.dump({"pending_compose": ["annotations", "loader"]}, f)
|
||||
recorded: List[str] = []
|
||||
|
||||
def fake_run(cmd, **kwargs):
|
||||
if cmd[:3] == ["docker", "images", "--format"]:
|
||||
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
|
||||
if cmd[:2] == ["docker", "compose"]:
|
||||
recorded.append(cmd[-1])
|
||||
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
|
||||
raise AssertionError(cmd)
|
||||
|
||||
model_dir = os.path.join(tmp, "m")
|
||||
os.makedirs(model_dir, exist_ok=True)
|
||||
dm = ResumableDownloadManager(dm_dir)
|
||||
vc = VersionCollector(model_dir, subprocess_run=fake_run)
|
||||
um = UpdateManager(
|
||||
"http://api.test",
|
||||
lambda: None,
|
||||
dm,
|
||||
vc,
|
||||
os.path.join(tmp, "compose.yml"),
|
||||
model_dir,
|
||||
state_path,
|
||||
subprocess_run=fake_run,
|
||||
)
|
||||
# Act
|
||||
um._drain_pending_compose()
|
||||
# Assert
|
||||
self.assertEqual(recorded, ["annotations", "loader"])
|
||||
with open(state_path, encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
self.assertEqual(data.get("pending_compose"), [])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -0,0 +1,65 @@
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
import unittest
|
||||
from version_collector import VersionCollector
|
||||
|
||||
|
||||
class TestVersionCollector(unittest.TestCase):
|
||||
def test_ac1_version_collector_reads_local_state(self):
|
||||
# Arrange
|
||||
tmp = tempfile.mkdtemp()
|
||||
open(os.path.join(tmp, "azaion-2026-03-10.trt"), "wb").close()
|
||||
|
||||
def fake_run(cmd, **kwargs):
|
||||
if cmd[:3] == ["docker", "images", "--format"]:
|
||||
return subprocess.CompletedProcess(
|
||||
cmd,
|
||||
0,
|
||||
stdout="azaion/annotations:arm64_2026-03-01\n",
|
||||
stderr="",
|
||||
)
|
||||
raise AssertionError(f"unexpected cmd {cmd}")
|
||||
|
||||
vc = VersionCollector(tmp, subprocess_run=fake_run)
|
||||
# Act
|
||||
got = vc.collect_as_dicts()
|
||||
# Assert
|
||||
self.assertEqual(
|
||||
got,
|
||||
[
|
||||
{"resource_name": "detection_model", "version": "2026-03-10"},
|
||||
{"resource_name": "annotations", "version": "arm64_2026-03-01"},
|
||||
],
|
||||
)
|
||||
|
||||
def test_ac6_cache_invalidates_after_changes(self):
|
||||
# Arrange
|
||||
tmp = tempfile.mkdtemp()
|
||||
open(os.path.join(tmp, "azaion-2026-01-01.trt"), "wb").close()
|
||||
|
||||
def fake_run(cmd, **kwargs):
|
||||
if cmd[:3] == ["docker", "images", "--format"]:
|
||||
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")
|
||||
raise AssertionError(f"unexpected cmd {cmd}")
|
||||
|
||||
vc = VersionCollector(tmp, subprocess_run=fake_run)
|
||||
first = vc.collect_as_dicts()
|
||||
open(os.path.join(tmp, "azaion-2026-02-01.trt"), "wb").close()
|
||||
second_cached = vc.collect_as_dicts()
|
||||
vc.invalidate()
|
||||
third = vc.collect_as_dicts()
|
||||
# Assert
|
||||
self.assertEqual(
|
||||
first,
|
||||
[{"resource_name": "detection_model", "version": "2026-01-01"}],
|
||||
)
|
||||
self.assertEqual(second_cached, first)
|
||||
self.assertEqual(
|
||||
third,
|
||||
[{"resource_name": "detection_model", "version": "2026-02-01"}],
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
Reference in New Issue
Block a user