update AI initializing

rework AIAvailabilityStatus events to mediatr
This commit is contained in:
Oleksandr Bezdieniezhnykh
2025-09-01 20:12:13 +03:00
parent d1ce9d9365
commit 067f02cc63
23 changed files with 282 additions and 192 deletions
+1 -32
View File
@@ -14,6 +14,7 @@ using Azaion.Common.DTO.Config;
using Azaion.Common.Events; using Azaion.Common.Events;
using Azaion.Common.Extensions; using Azaion.Common.Extensions;
using Azaion.Common.Services; using Azaion.Common.Services;
using Azaion.Common.Services.Inference;
using LibVLCSharp.Shared; using LibVLCSharp.Shared;
using MediatR; using MediatR;
using Microsoft.WindowsAPICodePack.Dialogs; using Microsoft.WindowsAPICodePack.Dialogs;
@@ -106,38 +107,6 @@ public partial class Annotator
_logger.LogError(e, e.Message); _logger.LogError(e, e.Message);
} }
}; };
_inferenceClient.AIAvailabilityReceived += (_, command) =>
{
Dispatcher.Invoke(() =>
{
_logger.LogInformation(command.Message);
var aiEnabled = command.Message == "enabled";
AIDetectBtn.IsEnabled = aiEnabled;
var aiDisabledText = "Будь ласка, зачекайте, наразі розпізнавання AI недоступне";
var messagesDict = new Dictionary<string, string>
{
{ "disabled", aiDisabledText },
{ "downloading", "Будь ласка зачекайте, йде завантаження AI для Вашої відеокарти" },
{ "converting", "Будь ласка зачекайте, йде налаштування AI під Ваше залізо. (5-12 хвилин в залежності від моделі відеокарти, до 50 хв на старих GTX1650)" },
{ "uploading", "Будь ласка зачекайте, йде зберігання" },
{ "enabled", "AI готовий для розпізнавання" }
};
if (command.Message?.StartsWith("Error") ?? false)
{
_logger.LogError(command.Message);
StatusHelp.Text = command.Message;
}
else
StatusHelp.Text = messagesDict!.GetValueOrDefault(command.Message, aiDisabledText);
if (aiEnabled)
StatusHelp.Foreground = aiEnabled ? Brushes.White : Brushes.Red;
});
};
_inferenceClient.Send(RemoteCommand.Create(CommandType.AIAvailabilityCheck));
Editor.GetTimeFunc = () => TimeSpan.FromMilliseconds(_mediaPlayer.Time); Editor.GetTimeFunc = () => TimeSpan.FromMilliseconds(_mediaPlayer.Time);
MapMatcherComponent.Init(_appConfig, gpsMatcherService); MapMatcherComponent.Init(_appConfig, gpsMatcherService);
} }
+13 -1
View File
@@ -12,6 +12,7 @@ using Azaion.Common.DTO.Config;
using Azaion.Common.Events; using Azaion.Common.Events;
using Azaion.Common.Extensions; using Azaion.Common.Extensions;
using Azaion.Common.Services; using Azaion.Common.Services;
using Azaion.Common.Services.Inference;
using GMap.NET; using GMap.NET;
using GMap.NET.WindowsPresentation; using GMap.NET.WindowsPresentation;
using LibVLCSharp.Shared; using LibVLCSharp.Shared;
@@ -43,7 +44,8 @@ public class AnnotatorEventHandler(
INotificationHandler<AnnotationsDeletedEvent>, INotificationHandler<AnnotationsDeletedEvent>,
INotificationHandler<AnnotationAddedEvent>, INotificationHandler<AnnotationAddedEvent>,
INotificationHandler<SetStatusTextEvent>, INotificationHandler<SetStatusTextEvent>,
INotificationHandler<GPSMatcherResultProcessedEvent> INotificationHandler<GPSMatcherResultProcessedEvent>,
INotificationHandler<AIAvailabilityStatusEvent>
{ {
private const int STEP = 20; private const int STEP = 20;
private const int LARGE_STEP = 5000; private const int LARGE_STEP = 5000;
@@ -472,4 +474,14 @@ public class AnnotatorEventHandler(
map.SatelliteMap.Position = pointLatLon; map.SatelliteMap.Position = pointLatLon;
map.SatelliteMap.ZoomAndCenterMarkers(null); map.SatelliteMap.ZoomAndCenterMarkers(null);
} }
public async Task Handle(AIAvailabilityStatusEvent e, CancellationToken cancellationToken)
{
mainWindow.Dispatcher.Invoke(() =>
{
logger.LogInformation(e.ToString());
mainWindow.AIDetectBtn.IsEnabled = e.Status == AIAvailabilityEnum.Enabled;
mainWindow.StatusHelp.Text = e.ToString();
});
}
} }
+33
View File
@@ -0,0 +1,33 @@
using MediatR;
using MessagePack;
namespace Azaion.Common.DTO;
public enum AIAvailabilityEnum
{
None = 0,
Downloading = 10,
Converting = 20,
Uploading = 30,
Enabled = 200,
Error = 500
}
[MessagePackObject]
public class AIAvailabilityStatusEvent : INotification
{
[Key("s")] public AIAvailabilityEnum Status { get; set; }
[Key("m")] public string? ErrorMessage { get; set; }
public override string ToString() => $"{StatusMessageDict.GetValueOrDefault(Status, "Помилка")} {ErrorMessage}";
private static readonly Dictionary<AIAvailabilityEnum, string> StatusMessageDict = new()
{
{ AIAvailabilityEnum.Downloading, "Йде завантаження AI для Вашої відеокарти" },
{ AIAvailabilityEnum.Converting, "Йде налаштування AI під Ваше залізо. (5-12 хвилин в залежності від моделі відеокарти, до 50 хв на старих GTX1650)" },
{ AIAvailabilityEnum.Uploading, "Йде зберігання AI" },
{ AIAvailabilityEnum.Enabled, "AI готовий для розпізнавання" },
{ AIAvailabilityEnum.Error, "Помилка під час налаштування AI" }
};
}
@@ -21,6 +21,7 @@ using RabbitMQ.Stream.Client.Reliable;
namespace Azaion.Common.Services; namespace Azaion.Common.Services;
// SHOULD BE ONLY ONE INSTANCE OF AnnotationService. Do not add ANY NotificationHandler to it! // SHOULD BE ONLY ONE INSTANCE OF AnnotationService. Do not add ANY NotificationHandler to it!
// Queue consumer should be created only once.
public class AnnotationService : IAnnotationService public class AnnotationService : IAnnotationService
{ {
private readonly IDbFactory _dbFactory; private readonly IDbFactory _dbFactory;
@@ -1,18 +1,17 @@
using System.Diagnostics; using System.Diagnostics;
using System.Text; using System.Text;
using Azaion.Common.DTO; using Azaion.Common.DTO;
using MediatR;
using MessagePack; using MessagePack;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options; using Microsoft.Extensions.Options;
using NetMQ; using NetMQ;
using NetMQ.Sockets; using NetMQ.Sockets;
namespace Azaion.Common.Services; namespace Azaion.Common.Services.Inference;
public interface IInferenceClient : IDisposable public interface IInferenceClient : IDisposable
{ {
event EventHandler<RemoteCommand>? InferenceDataReceived;
event EventHandler<RemoteCommand>? AIAvailabilityReceived;
void Send(RemoteCommand create); void Send(RemoteCommand create);
void Stop(); void Stop();
} }
@@ -20,21 +19,22 @@ public interface IInferenceClient : IDisposable
public class InferenceClient : IInferenceClient public class InferenceClient : IInferenceClient
{ {
private readonly ILogger<InferenceClient> _logger; private readonly ILogger<InferenceClient> _logger;
public event EventHandler<RemoteCommand>? BytesReceived;
public event EventHandler<RemoteCommand>? InferenceDataReceived;
public event EventHandler<RemoteCommand>? AIAvailabilityReceived;
private readonly DealerSocket _dealer = new(); private readonly DealerSocket _dealer = new();
private readonly NetMQPoller _poller = new(); private readonly NetMQPoller _poller = new();
private readonly Guid _clientId = Guid.NewGuid(); private readonly Guid _clientId = Guid.NewGuid();
private readonly InferenceClientConfig _inferenceClientConfig; private readonly InferenceClientConfig _inferenceClientConfig;
private readonly LoaderClientConfig _loaderClientConfig; private readonly LoaderClientConfig _loaderClientConfig;
private readonly IMediator _mediator;
public InferenceClient(ILogger<InferenceClient> logger, IOptions<InferenceClientConfig> inferenceConfig, IOptions<LoaderClientConfig> loaderConfig) public InferenceClient(ILogger<InferenceClient> logger, IOptions<InferenceClientConfig> inferenceConfig,
IMediator mediator,
IOptions<LoaderClientConfig> loaderConfig)
{ {
_logger = logger; _logger = logger;
_inferenceClientConfig = inferenceConfig.Value; _inferenceClientConfig = inferenceConfig.Value;
_loaderClientConfig = loaderConfig.Value; _loaderClientConfig = loaderConfig.Value;
_mediator = mediator;
Start(); Start();
} }
@@ -59,32 +59,31 @@ public class InferenceClient : IInferenceClient
_dealer.Options.Identity = Encoding.UTF8.GetBytes(_clientId.ToString("N")); _dealer.Options.Identity = Encoding.UTF8.GetBytes(_clientId.ToString("N"));
_dealer.Connect($"tcp://{_inferenceClientConfig.ZeroMqHost}:{_inferenceClientConfig.ZeroMqPort}"); _dealer.Connect($"tcp://{_inferenceClientConfig.ZeroMqHost}:{_inferenceClientConfig.ZeroMqPort}");
_dealer.ReceiveReady += (_, e) => ProcessClientCommand(e.Socket); _dealer.ReceiveReady += async (_, e) => await ProcessClientCommand(e.Socket);
_poller.Add(_dealer); _poller.Add(_dealer);
_ = Task.Run(() => _poller.RunAsync()); _ = Task.Run(() => _poller.RunAsync());
} }
private void ProcessClientCommand(NetMQSocket socket, CancellationToken ct = default) private async Task ProcessClientCommand(NetMQSocket socket, CancellationToken ct = default)
{ {
while (socket.TryReceiveFrameBytes(TimeSpan.Zero, out var bytes)) while (socket.TryReceiveFrameBytes(TimeSpan.Zero, out var bytes))
{ {
if (bytes?.Length == 0) if (bytes.Length == 0)
continue; continue;
var remoteCommand = MessagePackSerializer.Deserialize<RemoteCommand>(bytes, cancellationToken: ct); var remoteCommand = MessagePackSerializer.Deserialize<RemoteCommand>(bytes, cancellationToken: ct);
switch (remoteCommand.CommandType) switch (remoteCommand.CommandType)
{ {
case CommandType.DataBytes:
BytesReceived?.Invoke(this, remoteCommand);
break;
case CommandType.InferenceData: case CommandType.InferenceData:
InferenceDataReceived?.Invoke(this, remoteCommand); await _mediator.Publish(new InferenceDataEvent(remoteCommand), ct);
break; break;
case CommandType.AIAvailabilityResult: case CommandType.AIAvailabilityResult:
AIAvailabilityReceived?.Invoke(this, remoteCommand); var aiAvailabilityStatus = MessagePackSerializer.Deserialize<AIAvailabilityStatusEvent>(remoteCommand.Data, cancellationToken: ct);
await _mediator.Publish(aiAvailabilityStatus, ct);
break; break;
default:
throw new ArgumentOutOfRangeException();
} }
} }
} }
@@ -0,0 +1,56 @@
using Azaion.Common.DTO;
using Azaion.Common.DTO.Config;
using Azaion.Common.Extensions;
using Microsoft.Extensions.Options;
namespace Azaion.Common.Services.Inference;
public interface IInferenceService
{
Task RunInference(List<string> mediaPaths, CancellationToken ct = default);
CancellationTokenSource InferenceCancelTokenSource { get; set; }
void StopInference();
}
// SHOULD BE ONLY ONE INSTANCE OF InferenceService. Do not add ANY NotificationHandler to it!
// _inferenceCancelTokenSource should be created only once.
public class InferenceService : IInferenceService
{
private readonly IInferenceClient _client;
private readonly IAzaionApi _azaionApi;
private readonly IOptions<AIRecognitionConfig> _aiConfigOptions;
public CancellationTokenSource InferenceCancelTokenSource { get; set; } = new();
public CancellationTokenSource CheckAIAvailabilityTokenSource { get; set; } = new();
public InferenceService(IInferenceClient client, IAzaionApi azaionApi, IOptions<AIRecognitionConfig> aiConfigOptions)
{
_client = client;
_azaionApi = azaionApi;
_aiConfigOptions = aiConfigOptions;
}
public async Task CheckAIAvailabilityStatus()
{
CheckAIAvailabilityTokenSource = new CancellationTokenSource();
while (!CheckAIAvailabilityTokenSource.IsCancellationRequested)
{
_client.Send(RemoteCommand.Create(CommandType.AIAvailabilityCheck));
await Task.Delay(10000, CheckAIAvailabilityTokenSource.Token);
}
}
public async Task RunInference(List<string> mediaPaths, CancellationToken ct = default)
{
InferenceCancelTokenSource = new CancellationTokenSource();
_client.Send(RemoteCommand.Create(CommandType.Login, _azaionApi.Credentials));
var aiConfig = _aiConfigOptions.Value;
aiConfig.Paths = mediaPaths;
_client.Send(RemoteCommand.Create(CommandType.Inference, aiConfig));
using var combinedTokenSource = CancellationTokenSource.CreateLinkedTokenSource(ct, InferenceCancelTokenSource.Token);
await combinedTokenSource.Token.AsTask();
}
public void StopInference() => _client.Stop();
}
@@ -0,0 +1,43 @@
using Azaion.Common.Database;
using Azaion.Common.DTO;
using Azaion.Common.Events;
using MediatR;
using MessagePack;
using Microsoft.Extensions.Logging;
namespace Azaion.Common.Services.Inference;
public class InferenceServiceEventHandler(IInferenceService inferenceService,
IAnnotationService annotationService,
IMediator mediator,
ILogger<InferenceServiceEventHandler> logger) :
INotificationHandler<InferenceDataEvent>,
INotificationHandler<AIAvailabilityStatusEvent>
{
public async Task Handle(InferenceDataEvent e, CancellationToken ct)
{
try
{
if (e.Command.Message == "DONE")
{
await inferenceService.InferenceCancelTokenSource.CancelAsync();
return;
}
var annImage = MessagePackSerializer.Deserialize<AnnotationImage>(e.Command.Data, cancellationToken: ct);
var annotation = await annotationService.SaveAnnotation(annImage, ct);
await mediator.Publish(new AnnotationAddedEvent(annotation), ct);
}
catch (Exception ex)
{
logger.LogError(ex, ex.Message);
}
}
public async Task Handle(AIAvailabilityStatusEvent e, CancellationToken ct)
{
e.Status = AIAvailabilityEnum.Enabled;
}
}
@@ -0,0 +1,9 @@
using Azaion.Common.DTO;
using MediatR;
namespace Azaion.Common.Services.Inference;
public class InferenceDataEvent(RemoteCommand command) : INotification
{
public RemoteCommand Command { get; set; } = command;
}
@@ -1,82 +0,0 @@
using Azaion.Common.Database;
using Azaion.Common.DTO;
using Azaion.Common.DTO.Config;
using Azaion.Common.Events;
using Azaion.Common.Extensions;
using MediatR;
using MessagePack;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace Azaion.Common.Services;
public interface IInferenceService
{
Task RunInference(List<string> mediaPaths, CancellationToken ct = default);
void StopInference();
}
public class InferenceService : IInferenceService
{
private readonly IInferenceClient _client;
private readonly IAzaionApi _azaionApi;
private readonly IOptions<AIRecognitionConfig> _aiConfigOptions;
private readonly IAnnotationService _annotationService;
private readonly IMediator _mediator;
private CancellationTokenSource _inferenceCancelTokenSource = new();
public InferenceService(
ILogger<InferenceService> logger,
IInferenceClient client,
IAzaionApi azaionApi,
IOptions<AIRecognitionConfig> aiConfigOptions,
IAnnotationService annotationService,
IMediator mediator)
{
_client = client;
_azaionApi = azaionApi;
_aiConfigOptions = aiConfigOptions;
_annotationService = annotationService;
_mediator = mediator;
client.InferenceDataReceived += async (sender, command) =>
{
try
{
if (command.Message == "DONE")
{
_inferenceCancelTokenSource?.Cancel();
return;
}
var annImage = MessagePackSerializer.Deserialize<AnnotationImage>(command.Data);
await ProcessDetection(annImage);
}
catch (Exception e)
{
logger.LogError(e, e.Message);
}
};
}
private async Task ProcessDetection(AnnotationImage annotationImage, CancellationToken ct = default)
{
var annotation = await _annotationService.SaveAnnotation(annotationImage, ct);
await _mediator.Publish(new AnnotationAddedEvent(annotation), ct);
}
public async Task RunInference(List<string> mediaPaths, CancellationToken ct = default)
{
_inferenceCancelTokenSource = new CancellationTokenSource();
_client.Send(RemoteCommand.Create(CommandType.Login, _azaionApi.Credentials));
var aiConfig = _aiConfigOptions.Value;
aiConfig.Paths = mediaPaths;
_client.Send(RemoteCommand.Create(CommandType.Inference, aiConfig));
using var combinedTokenSource = CancellationTokenSource.CreateLinkedTokenSource(ct, _inferenceCancelTokenSource.Token);
await combinedTokenSource.Token.AsTask();
}
public void StopInference() => _client.Stop();
}
@@ -0,0 +1,14 @@
cdef enum AIAvailabilityEnum:
NONE = 0
DOWNLOADING = 10
CONVERTING = 20
UPLOADING = 30
ENABLED = 200
ERROR = 500
cdef class AIAvailabilityStatus:
cdef AIAvailabilityEnum status
cdef str error_message
cdef bytes serialize(self)
cdef set_status(self, AIAvailabilityEnum status, str error_message=*)
@@ -0,0 +1,36 @@
cimport constants_inf
import msgpack
AIStatus2Text = {
AIAvailabilityEnum.NONE: "None",
AIAvailabilityEnum.DOWNLOADING: "Downloading",
AIAvailabilityEnum.CONVERTING: "Converting",
AIAvailabilityEnum.UPLOADING: "Uploading",
AIAvailabilityEnum.ENABLED: "Enabled",
AIAvailabilityEnum.ERROR: "Error",
}
cdef class AIAvailabilityStatus:
def __init__(self):
self.status = AIAvailabilityEnum.NONE
self.error_message = None
def __str__(self):
status_text = AIStatus2Text.get(self.status, "Unknown")
error_text = self.error_message if self.error_message else ""
return f"{status_text} {error_text}"
cdef bytes serialize(self):
return msgpack.packb({
"s": self.status,
"m": self.error_message
})
cdef set_status(self, AIAvailabilityEnum status, str error_message=None):
self.status = status
self.error_message = error_message
if error_message is not None:
constants_inf.logerror(<str>error_message)
else:
constants_inf.log(<str>str(self))
+3 -1
View File
@@ -35,6 +35,7 @@ venv\Scripts\pyinstaller --name=azaion-inference ^
--collect-all jwt ^ --collect-all jwt ^
--collect-all loguru ^ --collect-all loguru ^
--hidden-import constants_inf ^ --hidden-import constants_inf ^
--hidden-import ai_availability_status ^
--hidden-import file_data ^ --hidden-import file_data ^
--hidden-import remote_command_inf ^ --hidden-import remote_command_inf ^
--hidden-import remote_command_handler_inf ^ --hidden-import remote_command_handler_inf ^
@@ -49,8 +50,9 @@ start.py
robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "ai_config.cp312-win_amd64.pyd" "annotation.cp312-win_amd64.pyd" robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "ai_config.cp312-win_amd64.pyd" "annotation.cp312-win_amd64.pyd"
robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "constants_inf.cp312-win_amd64.pyd" "file_data.cp312-win_amd64.pyd" robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "constants_inf.cp312-win_amd64.pyd" "file_data.cp312-win_amd64.pyd"
robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "ai_availability_status.pyd"
robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "remote_command_inf.cp312-win_amd64.pyd" "remote_command_handler_inf.cp312-win_amd64.pyd" robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "remote_command_inf.cp312-win_amd64.pyd" "remote_command_handler_inf.cp312-win_amd64.pyd"
robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "inference.cp312-win_amd64.pyd" "inference_engine.cp312-win_amd64.pyd" robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "inference.cp312-win_amd64.py=d" "inference_engine.cp312-win_amd64.pyd"
robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "loader_client.cp312-win_amd64.pyd" "tensorrt_engine.cp312-win_amd64.pyd" robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "loader_client.cp312-win_amd64.pyd" "tensorrt_engine.cp312-win_amd64.pyd"
robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "onnx_engine.cp312-win_amd64.pyd" "main_inference.cp312-win_amd64.pyd" robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "onnx_engine.cp312-win_amd64.pyd" "main_inference.cp312-win_amd64.pyd"
+3 -1
View File
@@ -1,3 +1,4 @@
from ai_availability_status cimport AIAvailabilityStatus
from remote_command_inf cimport RemoteCommand from remote_command_inf cimport RemoteCommand
from annotation cimport Annotation, Detection from annotation cimport Annotation, Detection
from ai_config cimport AIRecognitionConfig from ai_config cimport AIRecognitionConfig
@@ -12,6 +13,7 @@ cdef class Inference:
cdef dict[str, list(Detection)] _tile_detections cdef dict[str, list(Detection)] _tile_detections
cdef AIRecognitionConfig ai_config cdef AIRecognitionConfig ai_config
cdef bint stop_signal cdef bint stop_signal
cdef AIAvailabilityStatus ai_availability_status
cdef str model_input cdef str model_input
cdef int model_width cdef int model_width
@@ -19,7 +21,7 @@ cdef class Inference:
cdef int tile_width cdef int tile_width
cdef int tile_height cdef int tile_height
cdef build_tensor_engine(self, object updater_callback) cdef bytes get_onnx_engine_bytes(self)
cdef init_ai(self) cdef init_ai(self)
cdef bint is_building_engine cdef bint is_building_engine
cdef bint is_video(self, str filepath) cdef bint is_video(self, str filepath)
+33 -39
View File
@@ -5,6 +5,8 @@ from pathlib import Path
import cv2 import cv2
import numpy as np import numpy as np
cimport constants_inf cimport constants_inf
from ai_availability_status cimport AIAvailabilityEnum, AIAvailabilityStatus
from remote_command_inf cimport RemoteCommand from remote_command_inf cimport RemoteCommand
from annotation cimport Detection, Annotation from annotation cimport Detection, Annotation
from ai_config cimport AIRecognitionConfig from ai_config cimport AIRecognitionConfig
@@ -60,67 +62,59 @@ cdef class Inference:
self.tile_height = 0 self.tile_height = 0
self.engine = None self.engine = None
self.is_building_engine = False self.is_building_engine = False
self.ai_availability_status = AIAvailabilityStatus()
self.init_ai()
cdef build_tensor_engine(self, object updater_callback): cdef bytes get_onnx_engine_bytes(self):
if tensor_gpu_index == -1:
return
try:
engine_filename = TensorRTEngine.get_engine_filename(0)
models_dir = constants_inf.MODELS_FOLDER models_dir = constants_inf.MODELS_FOLDER
self.ai_availability_status.set_status(AIAvailabilityEnum.DOWNLOADING)
self.is_building_engine = True
updater_callback('downloading')
res = self.loader_client.load_big_small_resource(engine_filename, models_dir)
if res.err is None:
constants_inf.log('tensor rt engine is here, no need to build')
self.is_building_engine = False
updater_callback('enabled')
return
constants_inf.logerror(res.err)
# time.sleep(8) # prevent simultaneously loading dll and models
updater_callback('converting')
constants_inf.log('try to load onnx')
res = self.loader_client.load_big_small_resource(constants_inf.AI_ONNX_MODEL_FILE, models_dir) res = self.loader_client.load_big_small_resource(constants_inf.AI_ONNX_MODEL_FILE, models_dir)
if res.err is not None: if res.err is not None:
updater_callback(f'Error. {res.err}') raise Exception(res.err)
model_bytes = TensorRTEngine.convert_from_onnx(res.data) return res.data
updater_callback('uploading')
res = self.loader_client.upload_big_small_resource(model_bytes, <str> engine_filename, models_dir)
if res.err is not None:
updater_callback(f'Error. {res.err}')
constants_inf.log(f'uploaded {engine_filename} to CDN and API')
self.is_building_engine = False
updater_callback('enabled')
except Exception as e:
updater_callback(f'Error. {str(e)}')
cdef init_ai(self): cdef init_ai(self):
constants_inf.log(<str> 'init AI...')
try:
while self.is_building_engine:
time.sleep(1)
if self.engine is not None: if self.engine is not None:
return return
self.is_building_engine = True
models_dir = constants_inf.MODELS_FOLDER models_dir = constants_inf.MODELS_FOLDER
if tensor_gpu_index > -1: if tensor_gpu_index > -1:
while self.is_building_engine: try:
time.sleep(1)
engine_filename = TensorRTEngine.get_engine_filename(0) engine_filename = TensorRTEngine.get_engine_filename(0)
self.ai_availability_status.set_status(AIAvailabilityEnum.DOWNLOADING)
res = self.loader_client.load_big_small_resource(engine_filename, models_dir) res = self.loader_client.load_big_small_resource(engine_filename, models_dir)
if res.err is not None: if res.err is not None:
raise Exception(res.err) raise Exception(res.err)
self.engine = TensorRTEngine(res.data) self.engine = TensorRTEngine(res.data)
else: self.ai_availability_status.set_status(AIAvailabilityEnum.ENABLED)
res = self.loader_client.load_big_small_resource(constants_inf.AI_ONNX_MODEL_FILE, models_dir) except Exception as e:
self.ai_availability_status.set_status(AIAvailabilityEnum.ERROR, <str>str(e))
onnx_engine_bytes = self.get_onnx_engine_bytes()
self.ai_availability_status.set_status(AIAvailabilityEnum.CONVERTING)
model_bytes = TensorRTEngine.convert_from_onnx(res.data)
self.ai_availability_status.set_status(AIAvailabilityEnum.UPLOADING)
res = self.loader_client.upload_big_small_resource(model_bytes, <str> engine_filename, models_dir)
if res.err is not None: if res.err is not None:
raise Exception(res.err) self.ai_availability_status.set_status(AIAvailabilityEnum.ERROR, res.err)
self.engine = OnnxEngine(res.data) self.ai_availability_status.set_status(AIAvailabilityEnum.ENABLED)
else:
self.engine = OnnxEngine(<bytes>self.get_onnx_engine_bytes())
self.is_building_engine = False
self.model_height, self.model_width = self.engine.get_input_shape() self.model_height, self.model_width = self.engine.get_input_shape()
#todo: temporarily, send it from the client #todo: temporarily, send it from the client
self.tile_width = 550 self.tile_width = 550
self.tile_height = 550 self.tile_height = 550
except Exception as e:
self.ai_availability_status.set_status(AIAvailabilityEnum.ERROR, <str>str(e))
self.is_building_engine = False
cdef preprocess(self, frames): cdef preprocess(self, frames):
blobs = [cv2.dnn.blobFromImage(frame, blobs = [cv2.dnn.blobFromImage(frame,
+2 -2
View File
@@ -44,8 +44,8 @@ cdef class CommandProcessor:
if command.command_type == CommandType.INFERENCE: if command.command_type == CommandType.INFERENCE:
self.inference_queue.put(command) self.inference_queue.put(command)
elif command.command_type == CommandType.AI_AVAILABILITY_CHECK: elif command.command_type == CommandType.AI_AVAILABILITY_CHECK:
self.inference.build_tensor_engine(lambda status: self.remote_handler.send(command.client_id, status = self.inference.ai_availability_status.serialize()
RemoteCommand(CommandType.AI_AVAILABILITY_RESULT, None, status).serialize())) self.remote_handler.send(command.client_id, RemoteCommand(CommandType.AI_AVAILABILITY_RESULT, status).serialize())
elif command.command_type == CommandType.STOP_INFERENCE: elif command.command_type == CommandType.STOP_INFERENCE:
self.inference.stop() self.inference.stop()
elif command.command_type == CommandType.EXIT: elif command.command_type == CommandType.EXIT:
+1
View File
@@ -14,6 +14,7 @@ trace_line = False
extensions = [ extensions = [
Extension('constants_inf', ['constants_inf.pyx'], **debug_args), Extension('constants_inf', ['constants_inf.pyx'], **debug_args),
Extension('ai_availability_status', ['ai_availability_status.pyx'], **debug_args),
Extension('file_data', ['file_data.pyx'], **debug_args), Extension('file_data', ['file_data.pyx'], **debug_args),
Extension('remote_command_inf', ['remote_command_inf.pyx'], **debug_args), Extension('remote_command_inf', ['remote_command_inf.pyx'], **debug_args),
Extension('remote_command_handler_inf', ['remote_command_handler_inf.pyx'], **debug_args), Extension('remote_command_handler_inf', ['remote_command_handler_inf.pyx'], **debug_args),
+1
View File
@@ -11,6 +11,7 @@ using Azaion.Common.DTO.Config;
using Azaion.Common.Events; using Azaion.Common.Events;
using Azaion.Common.Extensions; using Azaion.Common.Extensions;
using Azaion.Common.Services; using Azaion.Common.Services;
using Azaion.Common.Services.Inference;
using Azaion.Dataset; using Azaion.Dataset;
using CommandLine; using CommandLine;
using LibVLCSharp.Shared; using LibVLCSharp.Shared;
+1 -1
View File
@@ -6,8 +6,8 @@ using System.Windows.Media;
using Azaion.Common.Database; using Azaion.Common.Database;
using Azaion.Common.DTO; using Azaion.Common.DTO;
using Azaion.Common.DTO.Config; using Azaion.Common.DTO.Config;
using Azaion.Common.Extensions;
using Azaion.Common.Services; using Azaion.Common.Services;
using Azaion.Common.Services.Inference;
using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options; using Microsoft.Extensions.Options;
using SharpVectors.Converters; using SharpVectors.Converters;
+2 -2
View File
@@ -1,12 +1,12 @@
{ {
"LoaderClientConfig": { "LoaderClientConfig": {
"ZeroMqHost": "127.0.0.1", "ZeroMqHost": "127.0.0.1",
"ZeroMqPort": 5024, "ZeroMqPort": 5025,
"ApiUrl": "https://api.azaion.com" "ApiUrl": "https://api.azaion.com"
}, },
"InferenceClientConfig": { "InferenceClientConfig": {
"ZeroMqHost": "127.0.0.1", "ZeroMqHost": "127.0.0.1",
"ZeroMqPort": 5126, "ZeroMqPort": 5127,
"ApiUrl": "https://api.azaion.com" "ApiUrl": "https://api.azaion.com"
}, },
"GpsDeniedClientConfig": { "GpsDeniedClientConfig": {