mirror of
https://github.com/azaion/annotations.git
synced 2026-04-22 12:56:30 +00:00
move python inference to Azaion.Inference folder
This commit is contained in:
@@ -0,0 +1,58 @@
|
||||
<h2>Azaion AI</h2>
|
||||
|
||||
<p>
|
||||
Azaion AI is a worker written on cython (c-compilable python) which listens to socket and rabbit queue.
|
||||
It accepts commands om a format:
|
||||
|
||||
- CommandType: Inference / Load
|
||||
- Filename
|
||||
|
||||
And correspondingly do inference or just load encrypted file from the API.
|
||||
Results (file or annotations) is putted to the other queue, or the same socket, depending on the command source.
|
||||
</p>
|
||||
|
||||
<h2>Installation</h2>
|
||||
|
||||
<h3>Install libs</h3>
|
||||
https://www.python.org/downloads/
|
||||
|
||||
Windows
|
||||
|
||||
- [Install CUDA](https://developer.nvidia.com/cuda-12-1-0-download-archive)
|
||||
|
||||
Linux
|
||||
```
|
||||
sudo apt install nvidia-driver-535
|
||||
|
||||
wget https://developer.download.nvidia.com/compute/cudnn/9.2.0/local_installers/cudnn-local-repo-ubuntu2204-9.2.0_1.0-1_amd64.deb
|
||||
sudo dpkg -i cudnn-local-repo-ubuntu2204-9.2.0_1.0-1_amd64.deb
|
||||
|
||||
sudo cp /var/cudnn-local-repo-ubuntu2204-9.2.0/cudnn-*-keyring.gpg /usr/share/keyrings/
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install cudnn nvidia-cuda-toolkit -y
|
||||
nvcc --version
|
||||
```
|
||||
|
||||
|
||||
<h3>Install dependencies</h3>
|
||||
1. Install python with max version 3.11. Pytorch for now supports 3.11 max
|
||||
|
||||
Make sure that your virtual env is installed with links to the global python packages and headers, like this:
|
||||
```
|
||||
python -m venv --system-site-packages venv
|
||||
```
|
||||
This is crucial for the build because build needs Python.h header and other files.
|
||||
|
||||
```
|
||||
python -m pip install --upgrade pip
|
||||
pip install opencv-python cython msgpack cryptography rstream pika zmq pyjwt pyinstaller tensorboard
|
||||
```
|
||||
In case of fbgemm.dll error (Windows specific):
|
||||
|
||||
- copypaste libomp140.x86_64.dll to C:\Windows\System32
|
||||
|
||||
|
||||
<h3>Build</h3>
|
||||
```
|
||||
python setup.py build_ext --inplace
|
||||
```
|
||||
@@ -0,0 +1,13 @@
|
||||
cdef class AIRecognitionConfig:
|
||||
cdef public double frame_recognition_seconds
|
||||
cdef public int frame_period_recognition
|
||||
cdef public double probability_threshold
|
||||
|
||||
cdef public double tracking_distance_confidence
|
||||
cdef public double tracking_probability_increase
|
||||
cdef public double tracking_intersection_threshold
|
||||
|
||||
cdef public bytes file_data
|
||||
|
||||
@staticmethod
|
||||
cdef from_msgpack(bytes data)
|
||||
@@ -0,0 +1,42 @@
|
||||
from msgpack import unpackb
|
||||
|
||||
cdef class AIRecognitionConfig:
|
||||
def __init__(self,
|
||||
frame_period_recognition,
|
||||
frame_recognition_seconds,
|
||||
probability_threshold,
|
||||
|
||||
tracking_distance_confidence,
|
||||
tracking_probability_increase,
|
||||
tracking_intersection_threshold,
|
||||
|
||||
file_data
|
||||
):
|
||||
self.frame_period_recognition = frame_period_recognition
|
||||
self.frame_recognition_seconds = frame_recognition_seconds
|
||||
self.probability_threshold = probability_threshold
|
||||
|
||||
self.tracking_distance_confidence = tracking_distance_confidence
|
||||
self.tracking_probability_increase = tracking_probability_increase
|
||||
self.tracking_intersection_threshold = tracking_intersection_threshold
|
||||
|
||||
self.file_data = file_data
|
||||
|
||||
def __str__(self):
|
||||
return (f'frame_seconds : {self.frame_recognition_seconds}, distance_confidence : {self.tracking_distance_confidence}, '
|
||||
f'probability_increase : {self.tracking_probability_increase}, intersection_threshold : {self.tracking_intersection_threshold}, frame_period_recognition : {self.frame_period_recognition}')
|
||||
|
||||
@staticmethod
|
||||
cdef from_msgpack(bytes data):
|
||||
unpacked = unpackb(data, strict_map_key=False)
|
||||
return AIRecognitionConfig(
|
||||
unpacked.get("FramePeriodRecognition", 0),
|
||||
unpacked.get("FrameRecognitionSeconds", 0.0),
|
||||
unpacked.get("ProbabilityThreshold", 0.0),
|
||||
|
||||
unpacked.get("TrackingDistanceConfidence", 0.0),
|
||||
unpacked.get("TrackingProbabilityIncrease", 0.0),
|
||||
unpacked.get("TrackingIntersectionThreshold", 0.0),
|
||||
|
||||
|
||||
unpacked.get("Data", b''))
|
||||
@@ -0,0 +1,10 @@
|
||||
cdef class Detection:
|
||||
cdef public double x, y, w, h, confidence
|
||||
cdef public int cls
|
||||
|
||||
cdef class Annotation:
|
||||
cdef bytes image
|
||||
cdef long time
|
||||
cdef public list[Detection] detections
|
||||
cdef bytes serialize(self)
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
import msgpack
|
||||
|
||||
cdef class Detection:
|
||||
def __init__(self, double x, double y, double w, double h, int cls, double confidence):
|
||||
self.x = x
|
||||
self.y = y
|
||||
self.w = w
|
||||
self.h = h
|
||||
self.cls = cls
|
||||
self.confidence = confidence
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.cls}: {self.x:.2f} {self.y:.2f} {self.w:.2f} {self.h:.2f}, prob: {(self.confidence*100):.1f}%'
|
||||
|
||||
cdef class Annotation:
|
||||
def __init__(self, long time, list[Detection] detections):
|
||||
self.time = time
|
||||
self.detections = detections if detections is not None else []
|
||||
self.image = b''
|
||||
|
||||
cdef bytes serialize(self):
|
||||
return msgpack.packb({
|
||||
"i": self.image, # "i" = image
|
||||
"t": self.time, # "t" = time
|
||||
"d": [ # "d" = detections
|
||||
{
|
||||
"x": det.x,
|
||||
"y": det.y,
|
||||
"w": det.w,
|
||||
"h": det.h,
|
||||
"c": det.cls,
|
||||
"p": det.confidence
|
||||
} for det in self.detections
|
||||
]
|
||||
})
|
||||
@@ -0,0 +1,15 @@
|
||||
from user cimport User
|
||||
|
||||
cdef class ApiClient:
|
||||
cdef str email, password, token, folder, token_file, api_url
|
||||
cdef User user
|
||||
|
||||
cdef get_encryption_key(self, str hardware_hash)
|
||||
cdef login(self)
|
||||
cdef set_token(self, str token)
|
||||
cdef get_user(self)
|
||||
|
||||
cdef load_bytes(self, str filename)
|
||||
cdef load_ai_model(self)
|
||||
cdef load_queue_config(self)
|
||||
|
||||
@@ -0,0 +1,114 @@
|
||||
import json
|
||||
import os
|
||||
from http import HTTPStatus
|
||||
from uuid import UUID
|
||||
import jwt
|
||||
import requests
|
||||
cimport constants
|
||||
from hardware_service cimport HardwareService, HardwareInfo
|
||||
from security cimport Security
|
||||
from io import BytesIO
|
||||
from user cimport User, RoleEnum
|
||||
|
||||
cdef class ApiClient:
|
||||
"""Handles API authentication and downloading of the AI model."""
|
||||
def __init__(self, str email, str password, str folder):
|
||||
self.email = email
|
||||
self.password = password
|
||||
self.folder = folder
|
||||
self.user = None
|
||||
|
||||
if os.path.exists(<str>constants.TOKEN_FILE):
|
||||
with open(<str>constants.TOKEN_FILE, "r") as file:
|
||||
self.set_token(<str>file.read().strip())
|
||||
else:
|
||||
self.token = None
|
||||
|
||||
cdef get_encryption_key(self, str hardware_hash):
|
||||
cdef str key = f'{self.email}-{self.password}-{hardware_hash}-#%@AzaionKey@%#---'
|
||||
return Security.calc_hash(key)
|
||||
|
||||
cdef login(self):
|
||||
response = requests.post(f"{constants.API_URL}/login",
|
||||
json={"email": self.email, "password": self.password})
|
||||
response.raise_for_status()
|
||||
token = response.json()["token"]
|
||||
self.set_token(token)
|
||||
with open(<str>constants.TOKEN_FILE, 'w') as file:
|
||||
file.write(token)
|
||||
|
||||
cdef set_token(self, str token):
|
||||
self.token = token
|
||||
claims = jwt.decode(token, options={"verify_signature": False})
|
||||
|
||||
try:
|
||||
id = str(UUID(claims.get("nameid", "")))
|
||||
except ValueError:
|
||||
raise ValueError("Invalid GUID format in claims")
|
||||
|
||||
email = claims.get("unique_name", "")
|
||||
|
||||
role_str = claims.get("role", "")
|
||||
if role_str == "ApiAdmin":
|
||||
role = RoleEnum.ApiAdmin
|
||||
elif role_str == "Admin":
|
||||
role = RoleEnum.Admin
|
||||
elif role_str == "ResourceUploader":
|
||||
role = RoleEnum.ResourceUploader
|
||||
elif role_str == "Validator":
|
||||
role = RoleEnum.Validator
|
||||
elif role_str == "Operator":
|
||||
role = RoleEnum.Operator
|
||||
else:
|
||||
role = RoleEnum.NONE
|
||||
self.user = User(id, email, role)
|
||||
|
||||
cdef get_user(self):
|
||||
if self.user is None:
|
||||
self.login()
|
||||
return self.user
|
||||
|
||||
cdef load_bytes(self, str filename):
|
||||
hardware_service = HardwareService()
|
||||
cdef HardwareInfo hardware = hardware_service.get_hardware_info()
|
||||
|
||||
if self.token is None:
|
||||
self.login()
|
||||
|
||||
url = f"{constants.API_URL}/resources/get/{self.folder}"
|
||||
headers = {
|
||||
"Authorization": f"Bearer {self.token}",
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
|
||||
payload = json.dumps(
|
||||
{
|
||||
"password": self.password,
|
||||
"hardware": hardware.to_json_object(),
|
||||
"fileName": filename
|
||||
}, indent=4)
|
||||
response = requests.post(url, data=payload, headers=headers, stream=True)
|
||||
|
||||
if response.status_code == HTTPStatus.UNAUTHORIZED or response.status_code == HTTPStatus.FORBIDDEN:
|
||||
self.login()
|
||||
headers = {
|
||||
"Authorization": f"Bearer {self.token}",
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
response = requests.post(url, data=payload, headers=headers, stream=True)
|
||||
|
||||
if response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR:
|
||||
print('500!')
|
||||
|
||||
key = self.get_encryption_key(hardware.hash)
|
||||
|
||||
stream = BytesIO(response.raw.read())
|
||||
data = Security.decrypt_to(stream, key)
|
||||
print(f'loaded file: {filename}, {len(data)} bytes')
|
||||
return data
|
||||
|
||||
cdef load_ai_model(self):
|
||||
return self.load_bytes(constants.AI_MODEL_FILE)
|
||||
|
||||
cdef load_queue_config(self):
|
||||
return self.load_bytes(constants.QUEUE_CONFIG_FILENAME).decode(encoding='utf-8')
|
||||
@@ -0,0 +1,22 @@
|
||||
pyinstaller --onefile ^
|
||||
--collect-all jwt ^
|
||||
--collect-all requests ^
|
||||
--collect-all psutil ^
|
||||
--collect-all cryptography ^
|
||||
--collect-all msgpack ^
|
||||
--collect-all expecttest ^
|
||||
--collect-all zmq ^
|
||||
--hidden-import user ^
|
||||
--hidden-import security ^
|
||||
--hidden-import secure_model ^
|
||||
--hidden-import api_client ^
|
||||
--hidden-import hardware_service ^
|
||||
--hidden-import constants ^
|
||||
--hidden-import annotation ^
|
||||
--hidden-import remote_command ^
|
||||
--hidden-import ai_config ^
|
||||
--hidden-import inference ^
|
||||
--hidden-import remote_command_handler ^
|
||||
--hidden-import cv2 ^
|
||||
--hidden-import onnxruntime ^
|
||||
start.py
|
||||
@@ -0,0 +1,12 @@
|
||||
cdef int ZMQ_PORT = 5127 # Port for the zmq
|
||||
|
||||
cdef int QUEUE_MAXSIZE # Maximum size of the command queue
|
||||
cdef str COMMANDS_QUEUE # Name of the commands queue in rabbit
|
||||
cdef str ANNOTATIONS_QUEUE # Name of the annotations queue in rabbit
|
||||
|
||||
cdef str API_URL # Base URL for the external API
|
||||
cdef str TOKEN_FILE # Name of the token file where temporary token would be stored
|
||||
cdef str QUEUE_CONFIG_FILENAME # queue config filename to load from api
|
||||
cdef str AI_MODEL_FILE # AI Model file
|
||||
|
||||
cdef bytes DONE_SIGNAL
|
||||
@@ -0,0 +1,12 @@
|
||||
cdef int ZMQ_PORT = 5127 # Port for the zmq
|
||||
|
||||
cdef int QUEUE_MAXSIZE = 1000 # Maximum size of the command queue
|
||||
cdef str COMMANDS_QUEUE = "azaion-commands"
|
||||
cdef str ANNOTATIONS_QUEUE = "azaion-annotations"
|
||||
|
||||
cdef str API_URL = "https://api.azaion.com" # Base URL for the external API
|
||||
cdef str TOKEN_FILE = "token"
|
||||
cdef str QUEUE_CONFIG_FILENAME = "secured-config.json"
|
||||
cdef str AI_MODEL_FILE = "azaion.onnx"
|
||||
|
||||
cdef bytes DONE_SIGNAL = b"DONE"
|
||||
@@ -0,0 +1,8 @@
|
||||
cdef class HardwareInfo:
|
||||
cdef str cpu, gpu, memory, mac_address, hash
|
||||
cdef to_json_object(self)
|
||||
|
||||
cdef class HardwareService:
|
||||
cdef bint is_windows
|
||||
cdef get_mac_address(self, interface=*)
|
||||
cdef HardwareInfo get_hardware_info(self)
|
||||
@@ -0,0 +1,74 @@
|
||||
import subprocess
|
||||
from security cimport Security
|
||||
import psutil
|
||||
|
||||
cdef class HardwareInfo:
|
||||
def __init__(self, str cpu, str gpu, str memory, str mac_address, str hw_hash):
|
||||
self.cpu = cpu
|
||||
self.gpu = gpu
|
||||
self.memory = memory
|
||||
self.mac_address = mac_address
|
||||
self.hash = hw_hash
|
||||
|
||||
cdef to_json_object(self):
|
||||
return {
|
||||
"CPU": self.cpu,
|
||||
"GPU": self.gpu,
|
||||
"MacAddress": self.mac_address,
|
||||
"Memory": self.memory,
|
||||
"Hash": self.hash,
|
||||
}
|
||||
|
||||
def __str__(self):
|
||||
return f'CPU: {self.cpu}. GPU: {self.gpu}. Memory: {self.memory}. MAC Address: {self.mac_address}'
|
||||
|
||||
cdef class HardwareService:
|
||||
"""Handles hardware information retrieval and hash generation."""
|
||||
|
||||
def __init__(self):
|
||||
try:
|
||||
res = subprocess.check_output("ver", shell=True).decode('utf-8')
|
||||
if "Microsoft Windows" in res:
|
||||
self.is_windows = True
|
||||
else:
|
||||
self.is_windows = False
|
||||
except Exception:
|
||||
print('Error during os type checking')
|
||||
self.is_windows = False
|
||||
|
||||
cdef get_mac_address(self, interface="Ethernet"):
|
||||
addresses = psutil.net_if_addrs()
|
||||
for interface_name, interface_info in addresses.items():
|
||||
if interface_name == interface:
|
||||
for addr in interface_info:
|
||||
if addr.family == psutil.AF_LINK:
|
||||
return addr.address.replace('-', '')
|
||||
return None
|
||||
|
||||
cdef HardwareInfo get_hardware_info(self):
|
||||
if self.is_windows:
|
||||
os_command = (
|
||||
"wmic CPU get Name /Value && "
|
||||
"wmic path Win32_VideoController get Name /Value && "
|
||||
"wmic OS get TotalVisibleMemorySize /Value"
|
||||
)
|
||||
else:
|
||||
os_command = (
|
||||
"/bin/bash -c \" lscpu | grep 'Model name:' | cut -d':' -f2 && "
|
||||
"lspci | grep VGA | cut -d':' -f3 && "
|
||||
"free -g | grep Mem: | awk '{print $2}' && \""
|
||||
)
|
||||
# in case of subprocess error do:
|
||||
# cdef bytes os_command_bytes = os_command.encode('utf-8')
|
||||
# and use os_command_bytes
|
||||
result = subprocess.check_output(os_command, shell=True).decode('utf-8')
|
||||
lines = [line.strip() for line in result.splitlines() if line.strip()]
|
||||
|
||||
cdef str cpu = lines[0].replace("Name=", "").replace(" ", " ")
|
||||
cdef str gpu = lines[1].replace("Name=", "").replace(" ", " ")
|
||||
cdef str memory = lines[2].replace("TotalVisibleMemorySize=", "").replace(" ", " ")
|
||||
cdef str mac_address = self.get_mac_address()
|
||||
cdef str full_hw_str = f'Azaion_{mac_address}_{cpu}_{gpu}'
|
||||
|
||||
hw_hash = Security.calc_hash(full_hw_str)
|
||||
return HardwareInfo(cpu, gpu, memory, mac_address, hw_hash)
|
||||
@@ -0,0 +1,27 @@
|
||||
from remote_command cimport RemoteCommand
|
||||
from annotation cimport Annotation
|
||||
from ai_config cimport AIRecognitionConfig
|
||||
|
||||
cdef class Inference:
|
||||
cdef object session
|
||||
cdef object on_annotation
|
||||
cdef Annotation _previous_annotation
|
||||
cdef AIRecognitionConfig ai_config
|
||||
cdef bint stop_signal
|
||||
|
||||
cdef str model_input
|
||||
cdef int model_width
|
||||
cdef int model_height
|
||||
|
||||
cdef bint is_video(self, str filepath)
|
||||
cdef run_inference(self, RemoteCommand cmd, int batch_size=?)
|
||||
cdef _process_video(self, RemoteCommand cmd, int batch_size)
|
||||
cdef _process_image(self, RemoteCommand cmd)
|
||||
cdef stop(self)
|
||||
|
||||
cdef preprocess(self, frame)
|
||||
cdef postprocess(self, output, int img_width, int img_height)
|
||||
|
||||
|
||||
cdef detect_frame(self, frame, long time)
|
||||
cdef bint is_valid_annotation(self, Annotation annotation)
|
||||
@@ -0,0 +1,188 @@
|
||||
import mimetypes
|
||||
import time
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
import onnxruntime as onnx
|
||||
|
||||
from remote_command cimport RemoteCommand
|
||||
from annotation cimport Detection, Annotation
|
||||
from ai_config cimport AIRecognitionConfig
|
||||
|
||||
cdef class Inference:
|
||||
def __init__(self, model_bytes, on_annotation):
|
||||
self.stop_signal = False
|
||||
self.session = onnx.InferenceSession(
|
||||
model_bytes, providers=["CUDAExecutionProvider", "CPUExecutionProvider"]
|
||||
)
|
||||
self.on_annotation = on_annotation
|
||||
self.ai_config = AIRecognitionConfig(4, 2, 0.25, 0.15, 15, 0.8, b'')
|
||||
model_inputs = self.session.get_inputs()
|
||||
self.model_input = model_inputs[0].name
|
||||
input_shape = model_inputs[0].shape
|
||||
self.model_width = input_shape[2]
|
||||
self.model_height = input_shape[3]
|
||||
print(f'AI detection model input: {self.model_input} ({self.model_width}, {self.model_height})')
|
||||
model_meta = self.session.get_modelmeta()
|
||||
print("Metadata:", model_meta.custom_metadata_map)
|
||||
|
||||
cdef preprocess(self, frame):
|
||||
img = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
|
||||
img = cv2.resize(img, (self.model_width, self.model_height))
|
||||
image_data = np.array(img) / 255.0
|
||||
image_data = np.transpose(image_data, (2, 0, 1)) # Channel first
|
||||
image_data = np.expand_dims(image_data, axis=0).astype(np.float32)
|
||||
return image_data
|
||||
|
||||
cdef postprocess(self, output, int img_width, int img_height):
|
||||
outputs = np.transpose(np.squeeze(output[0]))
|
||||
rows = outputs.shape[0]
|
||||
|
||||
boxes = []
|
||||
scores = []
|
||||
class_ids = []
|
||||
|
||||
x_factor = img_width / self.model_width
|
||||
y_factor = img_height / self.model_height
|
||||
|
||||
for i in range(rows):
|
||||
classes_scores = outputs[i][4:]
|
||||
max_score = np.amax(classes_scores)
|
||||
|
||||
if max_score >= self.ai_config.probability_threshold:
|
||||
class_id = np.argmax(classes_scores)
|
||||
x, y, w, h = outputs[i][0], outputs[i][1], outputs[i][2], outputs[i][3]
|
||||
|
||||
left = int((x - w / 2) * x_factor)
|
||||
top = int((y - h / 2) * y_factor)
|
||||
width = int(w * x_factor)
|
||||
height = int(h * y_factor)
|
||||
|
||||
class_ids.append(class_id)
|
||||
scores.append(max_score)
|
||||
boxes.append([left, top, width, height])
|
||||
indices = cv2.dnn.NMSBoxes(boxes, scores, self.ai_config.probability_threshold, 0.45)
|
||||
detections = []
|
||||
for i in indices:
|
||||
x, y, w, h = boxes[i]
|
||||
detections.append(Detection(x, y, w, h, class_ids[i], scores[i]))
|
||||
return detections
|
||||
|
||||
cdef bint is_video(self, str filepath):
|
||||
mime_type, _ = mimetypes.guess_type(<str>filepath)
|
||||
return mime_type and mime_type.startswith("video")
|
||||
|
||||
cdef run_inference(self, RemoteCommand cmd, int batch_size=8):
|
||||
print('run inference..')
|
||||
self.ai_config = AIRecognitionConfig.from_msgpack(cmd.data)
|
||||
self.stop_signal = False
|
||||
if self.is_video(cmd.filename):
|
||||
self._process_video(cmd, batch_size)
|
||||
else:
|
||||
self._process_image(cmd)
|
||||
|
||||
cdef _process_video(self, RemoteCommand cmd, int batch_size):
|
||||
frame_count = 0
|
||||
batch_frame = []
|
||||
self._previous_annotation = None
|
||||
self.start_video_time = time.time()
|
||||
|
||||
v_input = cv2.VideoCapture(<str>cmd.filename)
|
||||
while v_input.isOpened():
|
||||
ret, frame = v_input.read()
|
||||
if not ret or frame is None:
|
||||
break
|
||||
|
||||
frame_count += 1
|
||||
if frame_count % self.ai_config.frame_period_recognition == 0:
|
||||
ms = int(v_input.get(cv2.CAP_PROP_POS_MSEC))
|
||||
annotation = self.detect_frame(frame, ms)
|
||||
if annotation is not None:
|
||||
self._previous_annotation = annotation
|
||||
self.on_annotation(annotation)
|
||||
|
||||
|
||||
cdef detect_frame(self, frame, long time):
|
||||
cdef Annotation annotation
|
||||
img_height, img_width = frame.shape[:2]
|
||||
|
||||
start_time = time.time()
|
||||
img_data = self.preprocess(frame)
|
||||
preprocess_time = time.time()
|
||||
outputs = self.session.run(None, {self.model_input: img_data})
|
||||
inference_time = time.time()
|
||||
detections = self.postprocess(outputs, img_width, img_height)
|
||||
postprocess_time = time.time()
|
||||
print(f'video time, ms: {time / 1000:.3f}. total time, s : {postprocess_time - self.start_video_time:.3f} '
|
||||
f'preprocess time: {preprocess_time - start_time:.3f}, inference time: {inference_time - preprocess_time:.3f},'
|
||||
f' postprocess time: {postprocess_time - inference_time:.3f}, total time: {postprocess_time - start_time:.3f}')
|
||||
if len(detections) > 0:
|
||||
annotation = Annotation(frame, time, detections)
|
||||
if self.is_valid_annotation(annotation):
|
||||
_, image = cv2.imencode('.jpg', frame)
|
||||
annotation.image = image.tobytes()
|
||||
return annotation
|
||||
return None
|
||||
|
||||
|
||||
cdef _process_image(self, RemoteCommand cmd):
|
||||
self._previous_annotation = None
|
||||
frame = cv2.imread(<str>cmd.filename)
|
||||
annotation = self.detect_frame(frame, 0)
|
||||
if annotation is None:
|
||||
_, image = cv2.imencode('.jpg', frame)
|
||||
annotation = Annotation(frame, time, [])
|
||||
annotation.image = image.tobytes()
|
||||
self.on_annotation(cmd, annotation)
|
||||
|
||||
|
||||
cdef stop(self):
|
||||
self.stop_signal = True
|
||||
|
||||
|
||||
cdef bint is_valid_annotation(self, Annotation annotation):
|
||||
# No detections, invalid
|
||||
if not annotation.detections:
|
||||
return False
|
||||
|
||||
# First valid annotation, always accept
|
||||
if self._previous_annotation is None:
|
||||
return True
|
||||
|
||||
# Enough time has passed since last annotation
|
||||
if annotation.time >= self._previous_annotation.time + <long>(self.ai_config.frame_recognition_seconds * 1000):
|
||||
return True
|
||||
|
||||
# More objects detected than before
|
||||
if len(annotation.detections) > len(self._previous_annotation.detections):
|
||||
return True
|
||||
|
||||
cdef:
|
||||
Detection current_det, prev_det
|
||||
double dx, dy, distance_sq, min_distance_sq
|
||||
Detection closest_det
|
||||
|
||||
# Check each detection against previous frame
|
||||
for current_det in annotation.detections:
|
||||
min_distance_sq = 1e18 # Initialize with large value
|
||||
closest_det = None
|
||||
|
||||
# Find the closest detection in previous frame
|
||||
for prev_det in self._previous_annotation.detections:
|
||||
dx = current_det.x - prev_det.x
|
||||
dy = current_det.y - prev_det.y
|
||||
distance_sq = dx * dx + dy * dy
|
||||
|
||||
if distance_sq < min_distance_sq:
|
||||
min_distance_sq = distance_sq
|
||||
closest_det = prev_det
|
||||
|
||||
# Check if beyond tracking distance
|
||||
if min_distance_sq > self.ai_config.tracking_distance_confidence:
|
||||
return True
|
||||
|
||||
# Check probability increase
|
||||
if current_det.confidence >= closest_det.confidence + self.ai_config.tracking_probability_increase:
|
||||
return True
|
||||
|
||||
return False
|
||||
Binary file not shown.
@@ -0,0 +1,73 @@
|
||||
import traceback
|
||||
from queue import Queue
|
||||
cimport constants
|
||||
import msgpack
|
||||
|
||||
from api_client cimport ApiClient
|
||||
from annotation cimport Annotation
|
||||
from inference cimport Inference
|
||||
from remote_command cimport RemoteCommand, CommandType
|
||||
from remote_command_handler cimport RemoteCommandHandler
|
||||
from user cimport User
|
||||
|
||||
cdef class ParsedArguments:
|
||||
cdef str email, password, folder;
|
||||
|
||||
def __init__(self, str email, str password, str folder):
|
||||
self.email = email
|
||||
self.password = password
|
||||
self.folder = folder
|
||||
|
||||
cdef class CommandProcessor:
|
||||
cdef ApiClient api_client
|
||||
cdef RemoteCommandHandler remote_handler
|
||||
cdef object command_queue
|
||||
cdef bint running
|
||||
cdef Inference inference
|
||||
|
||||
def __init__(self, args: ParsedArguments):
|
||||
self.api_client = ApiClient(args.email, args.password, args.folder)
|
||||
self.remote_handler = RemoteCommandHandler(self.on_command)
|
||||
self.command_queue = Queue(maxsize=constants.QUEUE_MAXSIZE)
|
||||
self.remote_handler.start()
|
||||
self.running = True
|
||||
model = self.api_client.load_ai_model()
|
||||
self.inference = Inference(model, self.on_annotation)
|
||||
|
||||
def start(self):
|
||||
while self.running:
|
||||
try:
|
||||
command = self.command_queue.get()
|
||||
self.inference.run_inference(command)
|
||||
self.remote_handler.send(command.client_id, <bytes>'DONE'.encode('utf-8'))
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
|
||||
cdef on_command(self, RemoteCommand command):
|
||||
try:
|
||||
if command.command_type == CommandType.GET_USER:
|
||||
self.get_user(command, self.api_client.get_user())
|
||||
elif command.command_type == CommandType.LOAD:
|
||||
response = self.api_client.load_bytes(command.filename)
|
||||
self.remote_handler.send(command.client_id, response)
|
||||
elif command.command_type == CommandType.INFERENCE:
|
||||
self.command_queue.put(command)
|
||||
elif command.command_type == CommandType.STOP_INFERENCE:
|
||||
self.inference.stop()
|
||||
elif command.command_type == CommandType.EXIT:
|
||||
self.stop()
|
||||
else:
|
||||
pass
|
||||
except Exception as e:
|
||||
print(f"Error handling client: {e}")
|
||||
|
||||
cdef get_user(self, RemoteCommand command, User user):
|
||||
self.remote_handler.send(command.client_id, user.serialize())
|
||||
|
||||
cdef on_annotation(self, RemoteCommand cmd, Annotation annotation):
|
||||
data = annotation.serialize()
|
||||
self.remote_handler.send(cmd.client_id, data)
|
||||
|
||||
def stop(self):
|
||||
self.remote_handler.stop()
|
||||
self.running = False
|
||||
@@ -0,0 +1,15 @@
|
||||
cdef enum CommandType:
|
||||
GET_USER = 10
|
||||
LOAD = 20
|
||||
INFERENCE = 30
|
||||
STOP_INFERENCE = 40
|
||||
EXIT = 100
|
||||
|
||||
cdef class RemoteCommand:
|
||||
cdef public bytes client_id
|
||||
cdef CommandType command_type
|
||||
cdef str filename
|
||||
cdef bytes data
|
||||
|
||||
@staticmethod
|
||||
cdef from_msgpack(bytes data)
|
||||
@@ -0,0 +1,23 @@
|
||||
import msgpack
|
||||
|
||||
cdef class RemoteCommand:
|
||||
def __init__(self, CommandType command_type, str filename, bytes data):
|
||||
self.command_type = command_type
|
||||
self.filename = filename
|
||||
self.data = data
|
||||
|
||||
def __str__(self):
|
||||
command_type_names = {
|
||||
10: "GET_USER",
|
||||
20: "LOAD",
|
||||
30: "INFERENCE",
|
||||
40: "STOP INFERENCE",
|
||||
100: "EXIT"
|
||||
}
|
||||
data_str = f'. Data: {len(self.data)} bytes' if self.data else ''
|
||||
return f'{command_type_names[self.command_type]}: {self.filename}{data_str}'
|
||||
|
||||
@staticmethod
|
||||
cdef from_msgpack(bytes data):
|
||||
unpacked = msgpack.unpackb(data, strict_map_key=False)
|
||||
return RemoteCommand(unpacked.get("CommandType"), unpacked.get("Filename"), unpacked.get("Data"))
|
||||
@@ -0,0 +1,15 @@
|
||||
cdef class RemoteCommandHandler:
|
||||
cdef object _context
|
||||
cdef object _router
|
||||
cdef object _dealer
|
||||
cdef object _shutdown_event
|
||||
cdef object _on_command
|
||||
|
||||
cdef object _proxy_thread
|
||||
cdef object _workers
|
||||
|
||||
cdef start(self)
|
||||
cdef _proxy_loop(self)
|
||||
cdef _worker_loop(self)
|
||||
cdef send(self, bytes client_id, bytes data)
|
||||
cdef stop(self)
|
||||
@@ -0,0 +1,69 @@
|
||||
import time
|
||||
import zmq
|
||||
from threading import Thread, Event
|
||||
from remote_command cimport RemoteCommand
|
||||
cimport constants
|
||||
|
||||
|
||||
cdef class RemoteCommandHandler:
|
||||
def __init__(self, object on_command):
|
||||
self._on_command = on_command
|
||||
self._context = zmq.Context.instance()
|
||||
self._shutdown_event = Event()
|
||||
|
||||
self._router = self._context.socket(zmq.ROUTER)
|
||||
self._router.setsockopt(zmq.LINGER, 0)
|
||||
self._router.bind(f'tcp://*:{constants.ZMQ_PORT}')
|
||||
|
||||
self._dealer = self._context.socket(zmq.DEALER)
|
||||
self._dealer.setsockopt(zmq.LINGER, 0)
|
||||
self._dealer.bind("inproc://backend")
|
||||
|
||||
self._proxy_thread = Thread(target=self._proxy_loop, daemon=True)
|
||||
|
||||
self._workers = []
|
||||
for _ in range(4): # 4 worker threads
|
||||
worker = Thread(target=self._worker_loop, daemon=True)
|
||||
self._workers.append(worker)
|
||||
|
||||
cdef start(self):
|
||||
self._proxy_thread.start()
|
||||
for worker in self._workers:
|
||||
worker.start()
|
||||
|
||||
cdef _proxy_loop(self):
|
||||
zmq.proxy(self._router, self._dealer)
|
||||
|
||||
cdef _worker_loop(self):
|
||||
worker_socket = self._context.socket(zmq.DEALER)
|
||||
worker_socket.setsockopt(zmq.LINGER, 0)
|
||||
worker_socket.connect("inproc://backend")
|
||||
poller = zmq.Poller()
|
||||
poller.register(worker_socket, zmq.POLLIN)
|
||||
print('started receiver loop...')
|
||||
while not self._shutdown_event.is_set():
|
||||
try:
|
||||
socks = dict(poller.poll(500))
|
||||
if worker_socket in socks:
|
||||
client_id, message = worker_socket.recv_multipart()
|
||||
cmd = RemoteCommand.from_msgpack(<bytes> message)
|
||||
cmd.client_id = client_id
|
||||
print(f'Received [{cmd}] from the client {client_id}')
|
||||
self._on_command(cmd)
|
||||
except Exception as e:
|
||||
print(f"Worker error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
cdef send(self, bytes client_id, bytes data):
|
||||
with self._context.socket(zmq.DEALER) as socket:
|
||||
socket.connect("inproc://backend")
|
||||
socket.send_multipart([client_id, data])
|
||||
print(f'{len(data)} bytes was sent to client {client_id}')
|
||||
|
||||
cdef stop(self):
|
||||
self._shutdown_event.set()
|
||||
time.sleep(0.5)
|
||||
self._router.close()
|
||||
self._dealer.close()
|
||||
self._context.term()
|
||||
@@ -0,0 +1,5 @@
|
||||
setuptools
|
||||
Cython
|
||||
opencv-python
|
||||
numpy
|
||||
onnxruntime-gpu
|
||||
@@ -0,0 +1,12 @@
|
||||
cdef class SecureModelLoader:
|
||||
cdef:
|
||||
bytes _model_bytes
|
||||
str _ramdisk_path
|
||||
str _temp_file_path
|
||||
int _disk_size_mb
|
||||
|
||||
cpdef str load_model(self, bytes model_bytes)
|
||||
cdef str _get_ramdisk_path(self)
|
||||
cdef void _create_ramdisk(self)
|
||||
cdef void _store_model(self)
|
||||
cdef void _cleanup(self)
|
||||
@@ -0,0 +1,104 @@
|
||||
import os
|
||||
import platform
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from libc.stdio cimport FILE, fopen, fclose, remove
|
||||
from libc.stdlib cimport free
|
||||
from libc.string cimport strdup
|
||||
|
||||
cdef class SecureModelLoader:
|
||||
def __cinit__(self, int disk_size_mb=512):
|
||||
self._disk_size_mb = disk_size_mb
|
||||
self._ramdisk_path = None
|
||||
self._temp_file_path = None
|
||||
|
||||
cpdef str load_model(self, bytes model_bytes):
|
||||
"""Public method to load YOLO model securely."""
|
||||
self._model_bytes = model_bytes
|
||||
self._create_ramdisk()
|
||||
self._store_model()
|
||||
return self._temp_file_path
|
||||
|
||||
cdef str _get_ramdisk_path(self):
|
||||
"""Determine the RAM disk path based on the OS."""
|
||||
if platform.system() == "Windows":
|
||||
return "R:\\"
|
||||
elif platform.system() == "Linux":
|
||||
return "/mnt/ramdisk"
|
||||
elif platform.system() == "Darwin":
|
||||
return "/Volumes/RAMDisk"
|
||||
else:
|
||||
raise RuntimeError("Unsupported OS for RAM disk")
|
||||
|
||||
cdef void _create_ramdisk(self):
|
||||
"""Create a RAM disk securely based on the OS."""
|
||||
system = platform.system()
|
||||
|
||||
if system == "Windows":
|
||||
# Create RAM disk via PowerShell
|
||||
command = f'powershell -Command "subst R: {tempfile.gettempdir()}"'
|
||||
if os.system(command) != 0:
|
||||
raise RuntimeError("Failed to create RAM disk on Windows")
|
||||
self._ramdisk_path = "R:\\"
|
||||
|
||||
elif system == "Linux":
|
||||
# Use tmpfs for RAM disk
|
||||
self._ramdisk_path = "/mnt/ramdisk"
|
||||
if not Path(self._ramdisk_path).exists():
|
||||
os.mkdir(self._ramdisk_path)
|
||||
if os.system(f"mount -t tmpfs -o size={self._disk_size_mb}M tmpfs {self._ramdisk_path}") != 0:
|
||||
raise RuntimeError("Failed to create RAM disk on Linux")
|
||||
|
||||
elif system == "Darwin":
|
||||
# Use hdiutil for macOS RAM disk
|
||||
block_size = 2048 # 512-byte blocks * 2048 = 1MB
|
||||
num_blocks = self._disk_size_mb * block_size
|
||||
result = os.popen(f"hdiutil attach -nomount ram://{num_blocks}").read().strip()
|
||||
if result:
|
||||
self._ramdisk_path = "/Volumes/RAMDisk"
|
||||
os.system(f"diskutil eraseVolume HFS+ RAMDisk {result}")
|
||||
else:
|
||||
raise RuntimeError("Failed to create RAM disk on macOS")
|
||||
|
||||
cdef void _store_model(self):
|
||||
"""Write model securely to the RAM disk."""
|
||||
cdef char* temp_path
|
||||
cdef FILE* cfile
|
||||
|
||||
with tempfile.NamedTemporaryFile(
|
||||
dir=self._ramdisk_path, suffix='.pt', delete=False
|
||||
) as tmp_file:
|
||||
tmp_file.write(self._model_bytes)
|
||||
self._temp_file_path = tmp_file.name
|
||||
|
||||
encoded_path = self._temp_file_path.encode('utf-8')
|
||||
temp_path = strdup(encoded_path)
|
||||
with nogil:
|
||||
cfile = fopen(temp_path, "rb")
|
||||
if cfile == NULL:
|
||||
raise IOError(f"Could not open {self._temp_file_path}")
|
||||
fclose(cfile)
|
||||
|
||||
cdef void _cleanup(self):
|
||||
"""Remove the model file and unmount RAM disk securely."""
|
||||
cdef char* c_path
|
||||
if self._temp_file_path:
|
||||
c_path = strdup(os.fsencode(self._temp_file_path))
|
||||
with nogil:
|
||||
remove(c_path)
|
||||
free(c_path)
|
||||
self._temp_file_path = None
|
||||
|
||||
# Unmount RAM disk based on OS
|
||||
if self._ramdisk_path:
|
||||
if platform.system() == "Windows":
|
||||
os.system("subst R: /D")
|
||||
elif platform.system() == "Linux":
|
||||
os.system(f"umount {self._ramdisk_path}")
|
||||
elif platform.system() == "Darwin":
|
||||
os.system("hdiutil detach /Volumes/RAMDisk")
|
||||
self._ramdisk_path = None
|
||||
|
||||
def __dealloc__(self):
|
||||
"""Ensure cleanup when the object is deleted."""
|
||||
self._cleanup()
|
||||
@@ -0,0 +1,9 @@
|
||||
cdef class Security:
|
||||
@staticmethod
|
||||
cdef encrypt_to(input_stream, key)
|
||||
|
||||
@staticmethod
|
||||
cdef decrypt_to(input_stream, key)
|
||||
|
||||
@staticmethod
|
||||
cdef calc_hash(str key)
|
||||
@@ -0,0 +1,51 @@
|
||||
import base64
|
||||
import hashlib
|
||||
import os
|
||||
from hashlib import sha384
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import padding
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
|
||||
BUFFER_SIZE = 64 * 1024 # 64 KB
|
||||
|
||||
cdef class Security:
|
||||
@staticmethod
|
||||
cdef encrypt_to(input_stream, key):
|
||||
cdef bytes aes_key = hashlib.sha256(key.encode('utf-8')).digest()
|
||||
iv = os.urandom(16)
|
||||
|
||||
cipher = Cipher(algorithms.AES(<bytes>aes_key), modes.CFB(iv), backend=default_backend())
|
||||
encryptor = cipher.encryptor()
|
||||
|
||||
cdef bytearray res = bytearray()
|
||||
res.extend(iv)
|
||||
while chunk := input_stream.read(BUFFER_SIZE):
|
||||
encrypted_chunk = encryptor.update(chunk)
|
||||
res.extend(encrypted_chunk)
|
||||
res.extend(encryptor.finalize())
|
||||
return res
|
||||
|
||||
@staticmethod
|
||||
cdef decrypt_to(input_stream, key):
|
||||
cdef bytes aes_key = hashlib.sha256(key.encode('utf-8')).digest()
|
||||
cdef bytes iv = input_stream.read(16)
|
||||
|
||||
cdef cipher = Cipher(algorithms.AES(<bytes>aes_key), modes.CBC(<bytes>iv), backend=default_backend())
|
||||
cdef decryptor = cipher.decryptor()
|
||||
|
||||
cdef bytearray res = bytearray()
|
||||
while chunk := input_stream.read(BUFFER_SIZE):
|
||||
decrypted_chunk = decryptor.update(chunk)
|
||||
res.extend(decrypted_chunk)
|
||||
res.extend(decryptor.finalize())
|
||||
|
||||
unpadder = padding.PKCS7(128).unpadder() # AES block size is 128 bits (16 bytes)
|
||||
return unpadder.update(res) + unpadder.finalize()
|
||||
|
||||
@staticmethod
|
||||
cdef calc_hash(str key):
|
||||
str_bytes = key.encode('utf-8')
|
||||
hash_bytes = sha384(str_bytes).digest()
|
||||
cdef str h = base64.b64encode(hash_bytes).decode('utf-8')
|
||||
return h
|
||||
@@ -0,0 +1,37 @@
|
||||
from setuptools import setup, Extension
|
||||
from Cython.Build import cythonize
|
||||
import numpy as np
|
||||
|
||||
extensions = [
|
||||
Extension('constants', ['constants.pyx']),
|
||||
Extension('annotation', ['annotation.pyx']),
|
||||
Extension('security', ['security.pyx']),
|
||||
Extension('hardware_service', ['hardware_service.pyx'], extra_compile_args=["-g"], extra_link_args=["-g"]),
|
||||
Extension('remote_command', ['remote_command.pyx']),
|
||||
Extension('remote_command_handler', ['remote_command_handler.pyx']),
|
||||
Extension('user', ['user.pyx']),
|
||||
Extension('api_client', ['api_client.pyx']),
|
||||
Extension('secure_model', ['secure_model.pyx']),
|
||||
Extension('ai_config', ['ai_config.pyx']),
|
||||
Extension('inference', ['inference.pyx']),
|
||||
Extension('main', ['main.pyx']),
|
||||
]
|
||||
|
||||
setup(
|
||||
name="azaion.ai",
|
||||
ext_modules=cythonize(
|
||||
extensions,
|
||||
compiler_directives={
|
||||
"language_level": 3,
|
||||
"emit_code_comments" : False,
|
||||
"binding": True,
|
||||
'boundscheck': False,
|
||||
'wraparound': False
|
||||
}
|
||||
),
|
||||
install_requires=[
|
||||
'ultralytics>=8.0.0',
|
||||
'pywin32; platform_system=="Windows"'
|
||||
],
|
||||
zip_safe=False
|
||||
)
|
||||
@@ -0,0 +1,22 @@
|
||||
import argparse
|
||||
from main import ParsedArguments, CommandProcessor
|
||||
|
||||
|
||||
def parse_arguments():
|
||||
parser = argparse.ArgumentParser(description="Command Processor")
|
||||
parser.add_argument("-e", "--email", type=str, default="", help="Email")
|
||||
parser.add_argument("-p", "--pw", type=str, default="", help="Password")
|
||||
parser.add_argument("-f", "--folder", type=str, default="", help="Folder to API inner folder to download file from")
|
||||
args = parser.parse_args()
|
||||
|
||||
return ParsedArguments(args.email, args.pw, args.folder)
|
||||
|
||||
def start(args: ParsedArguments):
|
||||
processor = CommandProcessor(args)
|
||||
try:
|
||||
processor.start()
|
||||
except KeyboardInterrupt:
|
||||
processor.stop()
|
||||
|
||||
if __name__ == '__main__':
|
||||
start(parse_arguments())
|
||||
@@ -0,0 +1,57 @@
|
||||
# -*- mode: python ; coding: utf-8 -*-
|
||||
from PyInstaller.utils.hooks import collect_all
|
||||
|
||||
datas = []
|
||||
binaries = []
|
||||
hiddenimports = ['user', 'security', 'secure_model', 'api_client', 'hardware_service', 'constants', 'annotation', 'remote_command', 'ai_config', 'inference', 'remote_command_handler', 'cv2', 'onnxruntime']
|
||||
tmp_ret = collect_all('jwt')
|
||||
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
|
||||
tmp_ret = collect_all('requests')
|
||||
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
|
||||
tmp_ret = collect_all('psutil')
|
||||
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
|
||||
tmp_ret = collect_all('cryptography')
|
||||
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
|
||||
tmp_ret = collect_all('msgpack')
|
||||
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
|
||||
tmp_ret = collect_all('expecttest')
|
||||
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
|
||||
tmp_ret = collect_all('zmq')
|
||||
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
|
||||
|
||||
|
||||
a = Analysis(
|
||||
['start.py'],
|
||||
pathex=[],
|
||||
binaries=binaries,
|
||||
datas=datas,
|
||||
hiddenimports=hiddenimports,
|
||||
hookspath=[],
|
||||
hooksconfig={},
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
noarchive=False,
|
||||
optimize=0,
|
||||
)
|
||||
pyz = PYZ(a.pure)
|
||||
|
||||
exe = EXE(
|
||||
pyz,
|
||||
a.scripts,
|
||||
a.binaries,
|
||||
a.datas,
|
||||
[],
|
||||
name='start',
|
||||
debug=False,
|
||||
bootloader_ignore_signals=False,
|
||||
strip=False,
|
||||
upx=True,
|
||||
upx_exclude=[],
|
||||
runtime_tmpdir=None,
|
||||
console=True,
|
||||
disable_windowed_traceback=False,
|
||||
argv_emulation=False,
|
||||
target_arch=None,
|
||||
codesign_identity=None,
|
||||
entitlements_file=None,
|
||||
)
|
||||
@@ -0,0 +1 @@
|
||||
eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJuYW1laWQiOiJkOTBhMzZjYS1lMjM3LTRmYmQtOWM3Yy0xMjcwNDBhYzg1NTYiLCJ1bmlxdWVfbmFtZSI6ImFkbWluQGF6YWlvbi5jb20iLCJyb2xlIjoiQXBpQWRtaW4iLCJuYmYiOjE3Mzg4Mjk0NTMsImV4cCI6MTczODg0Mzg1MywiaWF0IjoxNzM4ODI5NDUzLCJpc3MiOiJBemFpb25BcGkiLCJhdWQiOiJBbm5vdGF0b3JzL09yYW5nZVBpL0FkbWlucyJ9.t6ImX8KkH5IQ4zNNY5IbXESSI6uia4iuzyMhodvM7AA
|
||||
@@ -0,0 +1,15 @@
|
||||
cdef enum RoleEnum:
|
||||
NONE = 0
|
||||
Operator = 10
|
||||
Validator = 20
|
||||
CompanionPC = 30
|
||||
Admin = 40
|
||||
ResourceUploader = 50
|
||||
ApiAdmin = 1000
|
||||
|
||||
cdef class User:
|
||||
cdef public str id
|
||||
cdef public str email
|
||||
cdef public RoleEnum role
|
||||
|
||||
cdef bytes serialize(self)
|
||||
@@ -0,0 +1,15 @@
|
||||
import msgpack
|
||||
|
||||
cdef class User:
|
||||
|
||||
def __init__(self, str id, str email, RoleEnum role):
|
||||
self.id = id
|
||||
self.email = email
|
||||
self.role = role
|
||||
|
||||
cdef bytes serialize(self):
|
||||
return msgpack.packb({
|
||||
"i": self.id,
|
||||
"e": self.email,
|
||||
"r": self.role
|
||||
})
|
||||
Reference in New Issue
Block a user