Errors sending to UI

notifying client of AI model conversion
This commit is contained in:
dzaitsev
2025-05-07 17:32:29 +03:00
committed by Alex Bezdieniezhnykh
42 changed files with 630 additions and 363 deletions
+58 -109
View File
@@ -14,6 +14,7 @@ using Azaion.Common.DTO.Config;
using Azaion.Common.Events;
using Azaion.Common.Extensions;
using Azaion.Common.Services;
using Azaion.CommonSecurity.DTO.Commands;
using LibVLCSharp.Shared;
using MediatR;
using Microsoft.WindowsAPICodePack.Dialogs;
@@ -37,9 +38,9 @@ public partial class Annotator
private readonly IConfigUpdater _configUpdater;
private readonly HelpWindow _helpWindow;
private readonly ILogger<Annotator> _logger;
private readonly AnnotationService _annotationService;
private readonly IDbFactory _dbFactory;
private readonly IInferenceService _inferenceService;
private readonly IInferenceClient _inferenceClient;
private ObservableCollection<DetectionClass> AnnotationClasses { get; set; } = new();
private bool _suspendLayout;
@@ -47,7 +48,6 @@ public partial class Annotator
public readonly CancellationTokenSource MainCancellationSource = new();
public CancellationTokenSource DetectionCancellationSource = new();
public bool FollowAI = false;
public bool IsInferenceNow = false;
private readonly TimeSpan _thresholdBefore = TimeSpan.FromMilliseconds(50);
@@ -57,6 +57,7 @@ public partial class Annotator
public ObservableCollection<MediaFileInfo> AllMediaFiles { get; set; } = new();
public ObservableCollection<MediaFileInfo> FilteredMediaFiles { get; set; } = new();
public Dictionary<string, MediaFileInfo> MediaFilesDict = new();
public IntervalTree<TimeSpan, Annotation> TimedAnnotations { get; set; } = new();
@@ -69,9 +70,9 @@ public partial class Annotator
FormState formState,
HelpWindow helpWindow,
ILogger<Annotator> logger,
AnnotationService annotationService,
IDbFactory dbFactory,
IInferenceService inferenceService,
IInferenceClient inferenceClient,
IGpsMatcherService gpsMatcherService)
{
InitializeComponent();
@@ -84,9 +85,9 @@ public partial class Annotator
_formState = formState;
_helpWindow = helpWindow;
_logger = logger;
_annotationService = annotationService;
_dbFactory = dbFactory;
_inferenceService = inferenceService;
_inferenceClient = inferenceClient;
_gpsMatcherService = gpsMatcherService;
Loaded += OnLoaded;
@@ -107,6 +108,28 @@ public partial class Annotator
_logger.LogError(e, e.Message);
}
};
_inferenceClient.AIAvailabilityReceived += (_, command) =>
{
Dispatcher.Invoke(() =>
{
_logger.LogInformation(command.Message);
var aiEnabled = command.Message == "enabled";
AIDetectBtn.IsEnabled = aiEnabled;
var aiDisabledText = "Будь ласка, зачекайте, наразі розпізнавання AI недоступне";
var messagesDict = new Dictionary<string, string>
{
{ "disabled", aiDisabledText },
{ "downloading", "Будь ласка зачекайте, йде завантаження AI для Вашої відеокарти" },
{ "converting", "Будь ласка зачекайте, йде налаштування AI під Ваше залізо. (5-12 хвилин в залежності від моделі відеокарти, до 50 хв на старих GTX1650)" },
{ "uploading", "Будь ласка зачекайте, йде зберігання" },
{ "enabled", "AI готовий для розпізнавання" }
};
StatusHelp.Text = messagesDict!.GetValueOrDefault(command.Message, aiDisabledText);
if (aiEnabled)
StatusHelp.Foreground = aiEnabled ? Brushes.White : Brushes.Red;
});
};
_inferenceClient.Send(RemoteCommand.Create(CommandType.AIAvailabilityCheck));
Editor.GetTimeFunc = () => TimeSpan.FromMilliseconds(_mediaPlayer.Time);
MapMatcherComponent.Init(_appConfig, _gpsMatcherService);
@@ -126,9 +149,6 @@ public partial class Annotator
TbFolder.Text = _appConfig.DirectoriesConfig.VideosDirectory;
LvClasses.Init(_appConfig.AnnotationConfig.DetectionClasses);
if (LvFiles.Items.IsEmpty)
BlinkHelp(HelpTexts.HelpTextsDict[HelpTextEnum.Initial]);
}
public void BlinkHelp(string helpText, int times = 2)
@@ -175,8 +195,6 @@ public partial class Annotator
LvFiles.MouseDoubleClick += async (_, _) =>
{
if (IsInferenceNow)
FollowAI = false;
await _mediator.Publish(new AnnotatorControlEvent(PlaybackControlEnum.Play));
};
@@ -238,8 +256,6 @@ public partial class Annotator
public void OpenAnnotationResult(AnnotationResult res)
{
if (IsInferenceNow)
FollowAI = false;
_mediaPlayer.SetPause(true);
Editor.RemoveAllAnns();
_mediaPlayer.Time = (long)res.Annotation.Time.TotalMilliseconds;
@@ -325,6 +341,10 @@ public partial class Annotator
//Add manually
public void AddAnnotation(Annotation annotation)
{
var mediaInfo = (MediaFileInfo)LvFiles.SelectedItem;
if ((mediaInfo?.FName ?? "") != annotation.OriginalMediaName)
return;
var time = annotation.Time;
var previousAnnotations = TimedAnnotations.Query(time);
TimedAnnotations.Remove(previousAnnotations);
@@ -342,10 +362,8 @@ public partial class Annotator
_logger.LogError(e, e.Message);
throw;
}
}
var dict = _formState.AnnotationResults
.Select((x, i) => new { x.Annotation.Time, Index = i })
.ToDictionary(x => x.Time, x => x.Index);
@@ -399,11 +417,8 @@ public partial class Annotator
mediaFile.HasAnnotations = labelsDict.ContainsKey(mediaFile.FName);
AllMediaFiles = new ObservableCollection<MediaFileInfo>(allFiles);
MediaFilesDict = AllMediaFiles.ToDictionary(x => x.FName);
LvFiles.ItemsSource = AllMediaFiles;
BlinkHelp(AllMediaFiles.Count == 0
? HelpTexts.HelpTextsDict[HelpTextEnum.Initial]
: HelpTexts.HelpTextsDict[HelpTextEnum.PlayVideo]);
DataContext = this;
}
@@ -463,14 +478,13 @@ public partial class Annotator
private void TbFilter_OnTextChanged(object sender, TextChangedEventArgs e)
{
FilteredMediaFiles = new ObservableCollection<MediaFileInfo>(AllMediaFiles.Where(x => x.Name.ToLower().Contains(TbFilter.Text.ToLower())).ToList());
MediaFilesDict = FilteredMediaFiles.ToDictionary(x => x.FName);
LvFiles.ItemsSource = FilteredMediaFiles;
LvFiles.ItemsSource = FilteredMediaFiles;
}
private void PlayClick(object sender, RoutedEventArgs e)
{
if (IsInferenceNow)
FollowAI = false;
_mediator.Publish(new AnnotatorControlEvent(_mediaPlayer.CanPause ? PlaybackControlEnum.Pause : PlaybackControlEnum.Play));
}
@@ -501,13 +515,22 @@ public partial class Annotator
LvFilesContextMenu.DataContext = listItem!.DataContext;
}
public void AutoDetect(object sender, RoutedEventArgs e)
private async void AIDetectBtn_OnClick(object sender, RoutedEventArgs e)
{
try
{
await AutoDetect();
}
catch (Exception ex)
{
_logger.LogError(ex, ex.Message);
}
}
public async Task AutoDetect()
{
if (IsInferenceNow)
{
FollowAI = true;
return;
}
if (LvFiles.Items.IsEmpty)
return;
@@ -517,96 +540,22 @@ public partial class Annotator
Dispatcher.Invoke(() => Editor.ResetBackground());
IsInferenceNow = true;
FollowAI = true;
AIDetectBtn.IsEnabled = false;
DetectionCancellationSource = new CancellationTokenSource();
var detectToken = DetectionCancellationSource.Token;
_ = Task.Run(async () =>
{
while (!detectToken.IsCancellationRequested)
{
var files = new List<string>();
await Dispatcher.Invoke(async () =>
{
//Take all medias
files = (LvFiles.ItemsSource as IEnumerable<MediaFileInfo>)?.Skip(LvFiles.SelectedIndex)
//.Where(x => !x.HasAnnotations)
.Take(Constants.DETECTION_BATCH_SIZE)
.Select(x => x.Path)
.ToList() ?? [];
if (files.Count != 0)
{
await _mediator.Publish(new AnnotatorControlEvent(PlaybackControlEnum.Play), detectToken);
await ReloadAnnotations();
}
});
if (files.Count == 0)
break;
await _inferenceService.RunInference(files, async annotationImage => await ProcessDetection(annotationImage, detectToken), detectToken);
var files = (FilteredMediaFiles.Count == 0 ? AllMediaFiles : FilteredMediaFiles)
.Skip(LvFiles.SelectedIndex)
.Select(x => x.Path)
.ToList();
if (files.Count == 0)
return;
Dispatcher.Invoke(() =>
{
if (LvFiles.SelectedIndex + files.Count >= LvFiles.Items.Count)
DetectionCancellationSource.Cancel();
LvFiles.SelectedIndex += files.Count;
});
}
Dispatcher.Invoke(() =>
{
LvFiles.Items.Refresh();
IsInferenceNow = false;
FollowAI = false;
});
});
}
await _inferenceService.RunInference(files, DetectionCancellationSource.Token);
private async Task ProcessDetection(AnnotationImage annotationImage, CancellationToken ct = default)
{
await Dispatcher.Invoke(async () =>
{
try
{
var annotation = await _annotationService.SaveAnnotation(annotationImage, ct);
if (annotation.OriginalMediaName != _formState.CurrentMedia?.FName)
{
var nextFile = (LvFiles.ItemsSource as IEnumerable<MediaFileInfo>)?
.Select((info, i) => new
{
MediaInfo = info,
Index = i
})
.FirstOrDefault(x => x.MediaInfo.FName == annotation.OriginalMediaName);
if (nextFile != null)
{
LvFiles.SelectedIndex = nextFile.Index;
await _mediator.Publish(new AnnotatorControlEvent(PlaybackControlEnum.Play), ct);
}
}
AddAnnotation(annotation);
if (FollowAI)
SeekTo(annotationImage.Milliseconds, false);
var log = string.Join(Environment.NewLine, annotation.Detections.Select(det =>
$"{_appConfig.AnnotationConfig.DetectionClassesDict[det.ClassNumber].Name}: " +
$"xy=({det.CenterX:F2},{det.CenterY:F2}), " +
$"size=({det.Width:F2}, {det.Height:F2}), " +
$"conf: {det.Confidence*100:F0}%"));
Dispatcher.Invoke(() =>
{
if (_formState.CurrentMedia != null)
_formState.CurrentMedia.HasAnnotations = true;
LvFiles.Items.Refresh();
StatusHelp.Text = log;
});
}
catch (Exception e)
{
_logger.LogError(e, e.Message);
}
});
LvFiles.Items.Refresh();
IsInferenceNow = false;
AIDetectBtn.IsEnabled = true;
}
private void SwitchGpsPanel(object sender, RoutedEventArgs e)