add warning ai status, fix ai availability handling 1

This commit is contained in:
Oleksandr Bezdieniezhnykh
2025-09-03 16:36:26 +03:00
parent a7a99c49c0
commit b3665630ed
6 changed files with 31 additions and 12 deletions
@@ -10,6 +10,7 @@ public enum AIAvailabilityEnum
Converting = 20,
Uploading = 30,
Enabled = 200,
Warning = 300,
Error = 500
}
@@ -27,6 +28,7 @@ public class AIAvailabilityStatusEvent : INotification
{ AIAvailabilityEnum.Converting, "Йде налаштування AI під Ваше залізо. (5-12 хвилин в залежності від моделі відеокарти, до 50 хв на старих GTX1650)" },
{ AIAvailabilityEnum.Uploading, "Йде зберігання AI" },
{ AIAvailabilityEnum.Enabled, "AI готовий для розпізнавання" },
{ AIAvailabilityEnum.Warning, "Неможливо запустити AI наразі, йде налаштування під Ваше залізо" },
{ AIAvailabilityEnum.Error, "Помилка під час налаштування AI" }
};
@@ -9,25 +9,37 @@ public interface IInferenceService
{
Task RunInference(List<string> mediaPaths, int tileSize, CancellationToken ct = default);
CancellationTokenSource InferenceCancelTokenSource { get; set; }
CancellationTokenSource CheckAIAvailabilityTokenSource { get; set; }
void StopInference();
}
// SHOULD BE ONLY ONE INSTANCE OF InferenceService. Do not add ANY NotificationHandler to it!
// _inferenceCancelTokenSource should be created only once.
public class InferenceService(
IInferenceClient client,
IAzaionApi azaionApi,
IOptions<AIRecognitionConfig> aiConfigOptions) : IInferenceService
public class InferenceService : IInferenceService
{
private readonly IInferenceClient _client;
private readonly IAzaionApi _azaionApi;
private readonly IOptions<AIRecognitionConfig> _aiConfigOptions;
public InferenceService(IInferenceClient client,
IAzaionApi azaionApi,
IOptions<AIRecognitionConfig> aiConfigOptions)
{
_client = client;
_azaionApi = azaionApi;
_aiConfigOptions = aiConfigOptions;
_ = Task.Run(async () => await CheckAIAvailabilityStatus());
}
public CancellationTokenSource InferenceCancelTokenSource { get; set; } = new();
public CancellationTokenSource CheckAIAvailabilityTokenSource { get; set; } = new();
public async Task CheckAIAvailabilityStatus()
private async Task CheckAIAvailabilityStatus()
{
CheckAIAvailabilityTokenSource = new CancellationTokenSource();
while (!CheckAIAvailabilityTokenSource.IsCancellationRequested)
{
client.Send(RemoteCommand.Create(CommandType.AIAvailabilityCheck));
_client.Send(RemoteCommand.Create(CommandType.AIAvailabilityCheck));
await Task.Delay(10000, CheckAIAvailabilityTokenSource.Token);
}
}
@@ -35,16 +47,16 @@ public class InferenceService(
public async Task RunInference(List<string> mediaPaths, int tileSize, CancellationToken ct = default)
{
InferenceCancelTokenSource = new CancellationTokenSource();
client.Send(RemoteCommand.Create(CommandType.Login, azaionApi.Credentials));
_client.Send(RemoteCommand.Create(CommandType.Login, _azaionApi.Credentials));
var aiConfig = aiConfigOptions.Value;
var aiConfig = _aiConfigOptions.Value;
aiConfig.Paths = mediaPaths;
aiConfig.TileSize = tileSize;
client.Send(RemoteCommand.Create(CommandType.Inference, aiConfig));
_client.Send(RemoteCommand.Create(CommandType.Inference, aiConfig));
using var combinedTokenSource = CancellationTokenSource.CreateLinkedTokenSource(ct, InferenceCancelTokenSource.Token);
await combinedTokenSource.Token.AsTask();
}
public void StopInference() => client.Stop();
public void StopInference() => _client.Stop();
}