Merge remote-tracking branch 'origin/stage' into stage

This commit is contained in:
Denys Zaitsev
2025-09-02 12:29:39 +03:00
80 changed files with 2141 additions and 822 deletions
+1
View File
@@ -23,3 +23,4 @@ azaion\.*\.big
_internal _internal
*.spec *.spec
dist dist
*.jpg
+72 -118
View File
@@ -14,6 +14,7 @@ using Azaion.Common.DTO.Config;
using Azaion.Common.Events; using Azaion.Common.Events;
using Azaion.Common.Extensions; using Azaion.Common.Extensions;
using Azaion.Common.Services; using Azaion.Common.Services;
using Azaion.Common.Services.Inference;
using LibVLCSharp.Shared; using LibVLCSharp.Shared;
using MediatR; using MediatR;
using Microsoft.WindowsAPICodePack.Dialogs; using Microsoft.WindowsAPICodePack.Dialogs;
@@ -29,7 +30,7 @@ namespace Azaion.Annotator;
public partial class Annotator public partial class Annotator
{ {
private readonly AppConfig _appConfig; private readonly AppConfig _appConfig;
private readonly LibVLC _libVLC; private readonly LibVLC _libVlc;
private readonly MediaPlayer _mediaPlayer; private readonly MediaPlayer _mediaPlayer;
private readonly IMediator _mediator; private readonly IMediator _mediator;
private readonly FormState _formState; private readonly FormState _formState;
@@ -42,25 +43,26 @@ public partial class Annotator
private readonly IInferenceClient _inferenceClient; private readonly IInferenceClient _inferenceClient;
private bool _suspendLayout; private bool _suspendLayout;
private bool _gpsPanelVisible = false; private bool _gpsPanelVisible;
public readonly CancellationTokenSource MainCancellationSource = new(); private readonly CancellationTokenSource _mainCancellationSource = new();
public CancellationTokenSource DetectionCancellationSource = new(); public CancellationTokenSource DetectionCancellationSource = new();
public bool IsInferenceNow = false; private bool _isInferenceNow;
private readonly TimeSpan _thresholdBefore = TimeSpan.FromMilliseconds(50); private readonly TimeSpan _thresholdBefore = TimeSpan.FromMilliseconds(50);
private readonly TimeSpan _thresholdAfter = TimeSpan.FromMilliseconds(150); private readonly TimeSpan _thresholdAfter = TimeSpan.FromMilliseconds(150);
public ObservableCollection<MediaFileInfo> AllMediaFiles { get; set; } = new(); public ObservableCollection<MediaFileInfo> AllMediaFiles { get; set; } = new();
public ObservableCollection<MediaFileInfo> FilteredMediaFiles { get; set; } = new(); private ObservableCollection<MediaFileInfo> FilteredMediaFiles { get; set; } = new();
public Dictionary<string, MediaFileInfo> MediaFilesDict = new(); public Dictionary<string, MediaFileInfo> MediaFilesDict = new();
public IntervalTree<TimeSpan, Annotation> TimedAnnotations { get; set; } = new(); public IntervalTree<TimeSpan, Annotation> TimedAnnotations { get; set; } = new();
public string MainTitle { get; set; }
public Annotator( public Annotator(
IConfigUpdater configUpdater, IConfigUpdater configUpdater,
IOptions<AppConfig> appConfig, IOptions<AppConfig> appConfig,
LibVLC libVLC, LibVLC libVlc,
MediaPlayer mediaPlayer, MediaPlayer mediaPlayer,
IMediator mediator, IMediator mediator,
FormState formState, FormState formState,
@@ -73,9 +75,11 @@ public partial class Annotator
{ {
InitializeComponent(); InitializeComponent();
MainTitle = $"Azaion Annotator {Constants.GetLocalVersion()}";
Title = MainTitle;
_appConfig = appConfig.Value; _appConfig = appConfig.Value;
_configUpdater = configUpdater; _configUpdater = configUpdater;
_libVLC = libVLC; _libVlc = libVlc;
_mediaPlayer = mediaPlayer; _mediaPlayer = mediaPlayer;
_mediator = mediator; _mediator = mediator;
_formState = formState; _formState = formState;
@@ -88,7 +92,7 @@ public partial class Annotator
Loaded += OnLoaded; Loaded += OnLoaded;
Closed += OnFormClosed; Closed += OnFormClosed;
Activated += (_, _) => _formState.ActiveWindow = WindowEnum.Annotator; Activated += (_, _) => _formState.ActiveWindow = WindowEnum.Annotator;
TbFolder.TextChanged += async (sender, args) => TbFolder.TextChanged += async (_, _) =>
{ {
if (!Path.Exists(TbFolder.Text)) if (!Path.Exists(TbFolder.Text))
return; return;
@@ -103,38 +107,6 @@ public partial class Annotator
_logger.LogError(e, e.Message); _logger.LogError(e, e.Message);
} }
}; };
_inferenceClient.AIAvailabilityReceived += (_, command) =>
{
Dispatcher.Invoke(() =>
{
_logger.LogInformation(command.Message);
var aiEnabled = command.Message == "enabled";
AIDetectBtn.IsEnabled = aiEnabled;
var aiDisabledText = "Будь ласка, зачекайте, наразі розпізнавання AI недоступне";
var messagesDict = new Dictionary<string, string>
{
{ "disabled", aiDisabledText },
{ "downloading", "Будь ласка зачекайте, йде завантаження AI для Вашої відеокарти" },
{ "converting", "Будь ласка зачекайте, йде налаштування AI під Ваше залізо. (5-12 хвилин в залежності від моделі відеокарти, до 50 хв на старих GTX1650)" },
{ "uploading", "Будь ласка зачекайте, йде зберігання" },
{ "enabled", "AI готовий для розпізнавання" }
};
if (command.Message?.StartsWith("Error") ?? false)
{
_logger.LogError(command.Message);
StatusHelp.Text = command.Message;
}
else
StatusHelp.Text = messagesDict!.GetValueOrDefault(command.Message, aiDisabledText);
if (aiEnabled)
StatusHelp.Foreground = aiEnabled ? Brushes.White : Brushes.Red;
});
};
_inferenceClient.Send(RemoteCommand.Create(CommandType.AIAvailabilityCheck));
Editor.GetTimeFunc = () => TimeSpan.FromMilliseconds(_mediaPlayer.Time); Editor.GetTimeFunc = () => TimeSpan.FromMilliseconds(_mediaPlayer.Time);
MapMatcherComponent.Init(_appConfig, gpsMatcherService); MapMatcherComponent.Init(_appConfig, gpsMatcherService);
} }
@@ -176,25 +148,11 @@ public partial class Annotator
VideoView.MediaPlayer = _mediaPlayer; VideoView.MediaPlayer = _mediaPlayer;
//On start playing media //On start playing media
_mediaPlayer.Playing += async (sender, args) => _mediaPlayer.Playing += (_, _) =>
{ {
if (_formState.CurrentMrl == _mediaPlayer.Media?.Mrl)
return; //already loaded all the info
await Dispatcher.Invoke(async () => await ReloadAnnotations());
//show image
if (_formState.CurrentMedia?.MediaType == MediaTypes.Image)
{
await Task.Delay(100); //wait to load the frame and set on pause
ShowTimeAnnotations(TimeSpan.FromMilliseconds(_mediaPlayer.Time), showImage: true);
return;
}
_formState.CurrentMrl = _mediaPlayer.Media?.Mrl ?? "";
uint vw = 0, vh = 0; uint vw = 0, vh = 0;
_mediaPlayer.Size(0, ref vw, ref vh); _mediaPlayer.Size(0, ref vw, ref vh);
_formState.CurrentVideoSize = new Size(vw, vh); _formState.CurrentMediaSize = new Size(vw, vh);
_formState.CurrentVideoLength = TimeSpan.FromMilliseconds(_mediaPlayer.Length); _formState.CurrentVideoLength = TimeSpan.FromMilliseconds(_mediaPlayer.Length);
}; };
@@ -210,10 +168,10 @@ public partial class Annotator
_mediator.Publish(new AnnClassSelectedEvent(selectedClass)); _mediator.Publish(new AnnClassSelectedEvent(selectedClass));
}; };
_mediaPlayer.PositionChanged += (o, args) => _mediaPlayer.PositionChanged += (_, _) =>
ShowTimeAnnotations(TimeSpan.FromMilliseconds(_mediaPlayer.Time)); ShowTimeAnnotations(TimeSpan.FromMilliseconds(_mediaPlayer.Time));
VideoSlider.ValueChanged += (value, newValue) => VideoSlider.ValueChanged += (_, newValue) =>
_mediaPlayer.Position = (float)(newValue / VideoSlider.Maximum); _mediaPlayer.Position = (float)(newValue / VideoSlider.Maximum);
VideoSlider.KeyDown += (sender, args) => VideoSlider.KeyDown += (sender, args) =>
@@ -224,51 +182,49 @@ public partial class Annotator
DgAnnotations.MouseDoubleClick += (sender, args) => DgAnnotations.MouseDoubleClick += (sender, args) =>
{ {
var dgRow = ItemsControl.ContainerFromElement((DataGrid)sender, (args.OriginalSource as DependencyObject)!) as DataGridRow; if (ItemsControl.ContainerFromElement((DataGrid)sender, (args.OriginalSource as DependencyObject)!) is DataGridRow dgRow)
if (dgRow != null) OpenAnnotationResult((Annotation)dgRow.Item);
OpenAnnotationResult((AnnotationResult)dgRow!.Item);
}; };
DgAnnotations.KeyUp += async (sender, args) => DgAnnotations.KeyUp += async (_, args) =>
{ {
switch (args.Key) switch (args.Key)
{ {
case Key.Up:
case Key.Down: //cursor is already moved by system behaviour case Key.Down: //cursor is already moved by system behaviour
OpenAnnotationResult((AnnotationResult)DgAnnotations.SelectedItem); OpenAnnotationResult((Annotation)DgAnnotations.SelectedItem);
break; break;
case Key.Delete: case Key.Delete:
var result = MessageBox.Show("Чи дійсно видалити аннотації?","Підтвердження видалення", MessageBoxButton.OKCancel, MessageBoxImage.Question); var result = MessageBox.Show("Чи дійсно видалити аннотації?","Підтвердження видалення", MessageBoxButton.OKCancel, MessageBoxImage.Question);
if (result != MessageBoxResult.OK) if (result != MessageBoxResult.OK)
return; return;
var res = DgAnnotations.SelectedItems.Cast<AnnotationResult>().ToList(); var res = DgAnnotations.SelectedItems.Cast<Annotation>().ToList();
var annotationNames = res.Select(x => x.Annotation.Name).ToList(); var annotationNames = res.Select(x => x.Name).ToList();
await _mediator.Publish(new AnnotationsDeletedEvent(annotationNames)); await _mediator.Publish(new AnnotationsDeletedEvent(annotationNames));
break; break;
} }
}; };
Editor.Mediator = _mediator;
DgAnnotations.ItemsSource = _formState.AnnotationResults; DgAnnotations.ItemsSource = _formState.AnnotationResults;
} }
public void OpenAnnotationResult(AnnotationResult res) private void OpenAnnotationResult(Annotation ann)
{ {
_mediaPlayer.SetPause(true); _mediaPlayer.SetPause(true);
if (!ann.IsSplit)
Editor.RemoveAllAnns(); Editor.RemoveAllAnns();
_mediaPlayer.Time = (long)res.Annotation.Time.TotalMilliseconds;
_mediaPlayer.Time = (long)ann.Time.TotalMilliseconds;
Dispatcher.Invoke(() => Dispatcher.Invoke(() =>
{ {
VideoSlider.Value = _mediaPlayer.Position * VideoSlider.Maximum; VideoSlider.Value = _mediaPlayer.Position * VideoSlider.Maximum;
StatusClock.Text = $"{TimeSpan.FromMilliseconds(_mediaPlayer.Time):mm\\:ss} / {_formState.CurrentVideoLength:mm\\:ss}"; StatusClock.Text = $"{TimeSpan.FromMilliseconds(_mediaPlayer.Time):mm\\:ss} / {_formState.CurrentVideoLength:mm\\:ss}";
Editor.ClearExpiredAnnotations(res.Annotation.Time); Editor.ClearExpiredAnnotations(ann.Time);
}); });
ShowAnnotation(res.Annotation, showImage: true); ShowAnnotation(ann, showImage: true, openResult: true);
} }
private void SaveUserSettings() private void SaveUserSettings()
{ {
@@ -281,7 +237,7 @@ public partial class Annotator
_configUpdater.Save(_appConfig); _configUpdater.Save(_appConfig);
} }
private void ShowTimeAnnotations(TimeSpan time, bool showImage = false) public void ShowTimeAnnotations(TimeSpan time, bool showImage = false)
{ {
Dispatcher.Invoke(() => Dispatcher.Invoke(() =>
{ {
@@ -289,31 +245,37 @@ public partial class Annotator
StatusClock.Text = $"{TimeSpan.FromMilliseconds(_mediaPlayer.Time):mm\\:ss} / {_formState.CurrentVideoLength:mm\\:ss}"; StatusClock.Text = $"{TimeSpan.FromMilliseconds(_mediaPlayer.Time):mm\\:ss} / {_formState.CurrentVideoLength:mm\\:ss}";
Editor.ClearExpiredAnnotations(time); Editor.ClearExpiredAnnotations(time);
}); });
var annotations = TimedAnnotations.Query(time).ToList();
ShowAnnotation(TimedAnnotations.Query(time).FirstOrDefault(), showImage); if (!annotations.Any())
return;
foreach (var ann in annotations)
ShowAnnotation(ann, showImage);
} }
private void ShowAnnotation(Annotation? annotation, bool showImage = false) private void ShowAnnotation(Annotation annotation, bool showImage = false, bool openResult = false)
{ {
if (annotation == null)
return;
Dispatcher.Invoke(async () => Dispatcher.Invoke(async () =>
{ {
var videoSize = _formState.CurrentVideoSize; if (showImage && !annotation.IsSplit && File.Exists(annotation.ImagePath))
if (showImage)
{ {
if (File.Exists(annotation.ImagePath)) Editor.SetBackground(await annotation.ImagePath.OpenImage());
{
Editor.SetImageSource(await annotation.ImagePath.OpenImage());
_formState.BackgroundTime = annotation.Time; _formState.BackgroundTime = annotation.Time;
videoSize = Editor.RenderSize;
} }
if (annotation.SplitTile != null && openResult)
{
var canvasTileLocation = new CanvasLabel(new YoloLabel(annotation.SplitTile, _formState.CurrentMediaSize),
RenderSize);
Editor.ZoomTo(new Point(canvasTileLocation.CenterX, canvasTileLocation.CenterY));
} }
Editor.CreateDetections(annotation.Time, annotation.Detections, _appConfig.AnnotationConfig.DetectionClasses, videoSize); else
Editor.CreateDetections(annotation, _appConfig.AnnotationConfig.DetectionClasses, _formState.CurrentMediaSize);
}); });
} }
private async Task ReloadAnnotations() public async Task ReloadAnnotations()
{
await Dispatcher.InvokeAsync(async () =>
{ {
_formState.AnnotationResults.Clear(); _formState.AnnotationResults.Clear();
TimedAnnotations.Clear(); TimedAnnotations.Clear();
@@ -321,32 +283,30 @@ public partial class Annotator
var annotations = await _dbFactory.Run(async db => var annotations = await _dbFactory.Run(async db =>
await db.Annotations.LoadWith(x => x.Detections) await db.Annotations.LoadWith(x => x.Detections)
.Where(x => x.OriginalMediaName == _formState.VideoName) .Where(x => x.OriginalMediaName == _formState.MediaName)
.OrderBy(x => x.Time) .OrderBy(x => x.Time)
.ToListAsync(token: MainCancellationSource.Token)); .ToListAsync(token: _mainCancellationSource.Token));
TimedAnnotations.Clear(); TimedAnnotations.Clear();
_formState.AnnotationResults.Clear(); _formState.AnnotationResults.Clear();
foreach (var ann in annotations) foreach (var ann in annotations)
{ {
// Duplicate for speed
TimedAnnotations.Add(ann.Time.Subtract(_thresholdBefore), ann.Time.Add(_thresholdAfter), ann); TimedAnnotations.Add(ann.Time.Subtract(_thresholdBefore), ann.Time.Add(_thresholdAfter), ann);
_formState.AnnotationResults.Add(new AnnotationResult(_appConfig.AnnotationConfig.DetectionClassesDict, ann)); _formState.AnnotationResults.Add(ann);
} }
});
} }
//Add manually //Add manually
public void AddAnnotation(Annotation annotation) public void AddAnnotation(Annotation annotation)
{ {
var mediaInfo = (MediaFileInfo)LvFiles.SelectedItem;
if ((mediaInfo?.FName ?? "") != annotation.OriginalMediaName)
return;
var time = annotation.Time; var time = annotation.Time;
var previousAnnotations = TimedAnnotations.Query(time); var previousAnnotations = TimedAnnotations.Query(time);
TimedAnnotations.Remove(previousAnnotations); TimedAnnotations.Remove(previousAnnotations);
TimedAnnotations.Add(time.Subtract(_thresholdBefore), time.Add(_thresholdAfter), annotation); TimedAnnotations.Add(time.Subtract(_thresholdBefore), time.Add(_thresholdAfter), annotation);
var existingResult = _formState.AnnotationResults.FirstOrDefault(x => x.Annotation.Time == time); var existingResult = _formState.AnnotationResults.FirstOrDefault(x => x.Time == time);
if (existingResult != null) if (existingResult != null)
{ {
try try
@@ -361,16 +321,14 @@ public partial class Annotator
} }
var dict = _formState.AnnotationResults var dict = _formState.AnnotationResults
.Select((x, i) => new { x.Annotation.Time, Index = i }) .Select((x, i) => new { x.Time, Index = i })
.ToDictionary(x => x.Time, x => x.Index); .ToDictionary(x => x.Time, x => x.Index);
var index = dict.Where(x => x.Key < time) var index = dict.Where(x => x.Key < time)
.OrderBy(x => time - x.Key) .OrderBy(x => time - x.Key)
.Select(x => x.Value + 1) .Select(x => x.Value + 1)
.FirstOrDefault(); .FirstOrDefault();
_formState.AnnotationResults.Insert(index, annotation);
var annRes = new AnnotationResult(_appConfig.AnnotationConfig.DetectionClassesDict, annotation);
_formState.AnnotationResults.Insert(index, annRes);
} }
private async Task ReloadFiles() private async Task ReloadFiles()
@@ -381,7 +339,7 @@ public partial class Annotator
var videoFiles = dir.GetFiles(_appConfig.AnnotationConfig.VideoFormats.ToArray()).Select(x => var videoFiles = dir.GetFiles(_appConfig.AnnotationConfig.VideoFormats.ToArray()).Select(x =>
{ {
using var media = new Media(_libVLC, x.FullName); var media = new Media(_libVlc, x.FullName);
media.Parse(); media.Parse();
var fInfo = new MediaFileInfo var fInfo = new MediaFileInfo
{ {
@@ -404,10 +362,12 @@ public partial class Annotator
var allFileNames = allFiles.Select(x => x.FName).ToList(); var allFileNames = allFiles.Select(x => x.FName).ToList();
var labelsDict = await _dbFactory.Run(async db => await db.Annotations var labelsDict = await _dbFactory.Run(async db =>
.GroupBy(x => x.Name.Substring(0, x.Name.Length - 7)) await db.Annotations
.GroupBy(x => x.OriginalMediaName)
.Where(x => allFileNames.Contains(x.Key)) .Where(x => allFileNames.Contains(x.Key))
.ToDictionaryAsync(x => x.Key, x => x.Key)); .Select(x => x.Key)
.ToDictionaryAsync(x => x, x => x));
foreach (var mediaFile in allFiles) foreach (var mediaFile in allFiles)
mediaFile.HasAnnotations = labelsDict.ContainsKey(mediaFile.FName); mediaFile.HasAnnotations = labelsDict.ContainsKey(mediaFile.FName);
@@ -421,13 +381,13 @@ public partial class Annotator
private void OnFormClosed(object? sender, EventArgs e) private void OnFormClosed(object? sender, EventArgs e)
{ {
MainCancellationSource.Cancel(); _mainCancellationSource.Cancel();
_inferenceService.StopInference(); _inferenceService.StopInference();
DetectionCancellationSource.Cancel(); DetectionCancellationSource.Cancel();
_mediaPlayer.Stop(); _mediaPlayer.Stop();
_mediaPlayer.Dispose(); _mediaPlayer.Dispose();
_libVLC.Dispose(); _libVlc.Dispose();
} }
private void OpenContainingFolder(object sender, RoutedEventArgs e) private void OpenContainingFolder(object sender, RoutedEventArgs e)
@@ -448,13 +408,10 @@ public partial class Annotator
StatusClock.Text = $"{TimeSpan.FromMilliseconds(_mediaPlayer.Time):mm\\:ss} / {_formState.CurrentVideoLength:mm\\:ss}"; StatusClock.Text = $"{TimeSpan.FromMilliseconds(_mediaPlayer.Time):mm\\:ss} / {_formState.CurrentVideoLength:mm\\:ss}";
} }
private void SeekTo(TimeSpan time) => private void OpenFolderItemClick(object sender, RoutedEventArgs e) => OpenFolder();
SeekTo((long)time.TotalMilliseconds); private void OpenFolderButtonClick(object sender, RoutedEventArgs e) => OpenFolder();
private async void OpenFolderItemClick(object sender, RoutedEventArgs e) => await OpenFolder(); private void OpenFolder()
private async void OpenFolderButtonClick(object sender, RoutedEventArgs e) => await OpenFolder();
private async Task OpenFolder()
{ {
var dlg = new CommonOpenFileDialog var dlg = new CommonOpenFileDialog
{ {
@@ -469,7 +426,6 @@ public partial class Annotator
_appConfig.DirectoriesConfig.VideosDirectory = dlg.FileName; _appConfig.DirectoriesConfig.VideosDirectory = dlg.FileName;
TbFolder.Text = dlg.FileName; TbFolder.Text = dlg.FileName;
await Task.CompletedTask;
} }
private void TbFilter_OnTextChanged(object sender, TextChangedEventArgs e) private void TbFilter_OnTextChanged(object sender, TextChangedEventArgs e)
@@ -526,7 +482,7 @@ public partial class Annotator
public async Task AutoDetect() public async Task AutoDetect()
{ {
if (IsInferenceNow) if (_isInferenceNow)
return; return;
if (LvFiles.Items.IsEmpty) if (LvFiles.Items.IsEmpty)
@@ -534,9 +490,9 @@ public partial class Annotator
if (LvFiles.SelectedIndex == -1) if (LvFiles.SelectedIndex == -1)
LvFiles.SelectedIndex = 0; LvFiles.SelectedIndex = 0;
Dispatcher.Invoke(() => Editor.ResetBackground()); Dispatcher.Invoke(() => Editor.SetBackground(null));
IsInferenceNow = true; _isInferenceNow = true;
AIDetectBtn.IsEnabled = false; AIDetectBtn.IsEnabled = false;
DetectionCancellationSource = new CancellationTokenSource(); DetectionCancellationSource = new CancellationTokenSource();
@@ -551,7 +507,7 @@ public partial class Annotator
await _inferenceService.RunInference(files, DetectionCancellationSource.Token); await _inferenceService.RunInference(files, DetectionCancellationSource.Token);
LvFiles.Items.Refresh(); LvFiles.Items.Refresh();
IsInferenceNow = false; _isInferenceNow = false;
StatusHelp.Text = "Розпізнавання зваершено"; StatusHelp.Text = "Розпізнавання зваершено";
AIDetectBtn.IsEnabled = true; AIDetectBtn.IsEnabled = true;
} }
@@ -583,13 +539,11 @@ public partial class Annotator
private void SoundDetections(object sender, RoutedEventArgs e) private void SoundDetections(object sender, RoutedEventArgs e)
{ {
MessageBox.Show("Функція Аудіоаналіз знаходиться в стадії розробки","Система", MessageBoxButton.OK, MessageBoxImage.Information); MessageBox.Show("Функція Аудіоаналіз знаходиться в стадії розробки","Система", MessageBoxButton.OK, MessageBoxImage.Information);
_logger.LogInformation("Denys wishes #1. To be implemented");
} }
private void RunDroneMaintenance(object sender, RoutedEventArgs e) private void RunDroneMaintenance(object sender, RoutedEventArgs e)
{ {
MessageBox.Show("Функція Аналіз стану БПЛА знаходиться в стадії розробки","Система", MessageBoxButton.OK, MessageBoxImage.Information); MessageBox.Show("Функція Аналіз стану БПЛА знаходиться в стадії розробки","Система", MessageBoxButton.OK, MessageBoxImage.Information);
_logger.LogInformation("Denys wishes #2. To be implemented");
} }
#endregion #endregion
@@ -599,7 +553,7 @@ public class GradientStyleSelector : StyleSelector
{ {
public override Style? SelectStyle(object item, DependencyObject container) public override Style? SelectStyle(object item, DependencyObject container)
{ {
if (container is not DataGridRow row || row.DataContext is not AnnotationResult result) if (container is not DataGridRow row || row.DataContext is not Annotation result)
return null; return null;
var style = new Style(typeof(DataGridRow)); var style = new Style(typeof(DataGridRow));
+126 -45
View File
@@ -2,6 +2,7 @@
using System.Windows; using System.Windows;
using System.Windows.Input; using System.Windows.Input;
using System.Windows.Media; using System.Windows.Media;
using System.Windows.Media.Imaging;
using Azaion.Annotator.Controls; using Azaion.Annotator.Controls;
using Azaion.Annotator.DTO; using Azaion.Annotator.DTO;
using Azaion.Common; using Azaion.Common;
@@ -11,6 +12,7 @@ using Azaion.Common.DTO.Config;
using Azaion.Common.Events; using Azaion.Common.Events;
using Azaion.Common.Extensions; using Azaion.Common.Extensions;
using Azaion.Common.Services; using Azaion.Common.Services;
using Azaion.Common.Services.Inference;
using GMap.NET; using GMap.NET;
using GMap.NET.WindowsPresentation; using GMap.NET.WindowsPresentation;
using LibVLCSharp.Shared; using LibVLCSharp.Shared;
@@ -22,7 +24,7 @@ using MediaPlayer = LibVLCSharp.Shared.MediaPlayer;
namespace Azaion.Annotator; namespace Azaion.Annotator;
public class AnnotatorEventHandler( public class AnnotatorEventHandler(
LibVLC libVLC, LibVLC libVlc,
MediaPlayer mediaPlayer, MediaPlayer mediaPlayer,
Annotator mainWindow, Annotator mainWindow,
FormState formState, FormState formState,
@@ -42,11 +44,12 @@ public class AnnotatorEventHandler(
INotificationHandler<AnnotationsDeletedEvent>, INotificationHandler<AnnotationsDeletedEvent>,
INotificationHandler<AnnotationAddedEvent>, INotificationHandler<AnnotationAddedEvent>,
INotificationHandler<SetStatusTextEvent>, INotificationHandler<SetStatusTextEvent>,
INotificationHandler<GPSMatcherResultProcessedEvent> INotificationHandler<GPSMatcherResultProcessedEvent>,
INotificationHandler<AIAvailabilityStatusEvent>
{ {
private const int STEP = 20; private const int STEP = 20;
private const int LARGE_STEP = 5000; private const int LARGE_STEP = 5000;
private const int RESULT_WIDTH = 1280; private readonly string _tempImgPath = Path.Combine(dirConfig.Value.ImagesDirectory, "___temp___.jpg");
private readonly Dictionary<Key, PlaybackControlEnum> _keysControlEnumDict = new() private readonly Dictionary<Key, PlaybackControlEnum> _keysControlEnumDict = new()
{ {
@@ -139,13 +142,22 @@ public class AnnotatorEventHandler(
await Play(cancellationToken); await Play(cancellationToken);
break; break;
case PlaybackControlEnum.Pause: case PlaybackControlEnum.Pause:
if (mediaPlayer.IsPlaying)
{
mediaPlayer.Pause(); mediaPlayer.Pause();
mediaPlayer.TakeSnapshot(0, _tempImgPath, 0, 0);
mainWindow.Editor.SetBackground(await _tempImgPath.OpenImage());
formState.BackgroundTime = TimeSpan.FromMilliseconds(mediaPlayer.Time);
}
else
{
mediaPlayer.Play();
if (formState.BackgroundTime.HasValue) if (formState.BackgroundTime.HasValue)
{ {
mainWindow.Editor.ResetBackground(); mainWindow.Editor.SetBackground(null);
formState.BackgroundTime = null; formState.BackgroundTime = null;
} }
}
break; break;
case PlaybackControlEnum.Stop: case PlaybackControlEnum.Stop:
inferenceService.StopInference(); inferenceService.StopInference();
@@ -159,7 +171,7 @@ public class AnnotatorEventHandler(
mainWindow.SeekTo(mediaPlayer.Time + step); mainWindow.SeekTo(mediaPlayer.Time + step);
break; break;
case PlaybackControlEnum.SaveAnnotations: case PlaybackControlEnum.SaveAnnotations:
await SaveAnnotations(cancellationToken); await SaveAnnotation(cancellationToken);
break; break;
case PlaybackControlEnum.RemoveSelectedAnns: case PlaybackControlEnum.RemoveSelectedAnns:
@@ -226,63 +238,120 @@ public class AnnotatorEventHandler(
if (mainWindow.LvFiles.SelectedItem == null) if (mainWindow.LvFiles.SelectedItem == null)
return; return;
var mediaInfo = (MediaFileInfo)mainWindow.LvFiles.SelectedItem; var mediaInfo = (MediaFileInfo)mainWindow.LvFiles.SelectedItem;
mainWindow.Editor.ResetBackground();
if (formState.CurrentMedia == mediaInfo)
return; //already loaded
formState.CurrentMedia = mediaInfo; formState.CurrentMedia = mediaInfo;
mainWindow.Title = $"Azaion Annotator - {mediaInfo.Name}"; mainWindow.Title = $"{mainWindow.MainTitle} - {mediaInfo.Name}";
await mainWindow.ReloadAnnotations();
if (mediaInfo.MediaType == MediaTypes.Video)
{
mainWindow.Editor.SetBackground(null);
//need to wait a bit for correct VLC playback event handling //need to wait a bit for correct VLC playback event handling
await Task.Delay(100, ct); await Task.Delay(100, ct);
mediaPlayer.Stop(); mediaPlayer.Stop();
mediaPlayer.Play(new Media(libVLC, mediaInfo.Path)); mediaPlayer.Play(new Media(libVlc, mediaInfo.Path));
}
else
{
formState.BackgroundTime = TimeSpan.Zero;
var image = await mediaInfo.Path.OpenImage();
formState.CurrentMediaSize = new Size(image.PixelWidth, image.PixelHeight);
mainWindow.Editor.SetBackground(image);
mediaPlayer.Stop();
mainWindow.ShowTimeAnnotations(TimeSpan.Zero, showImage: true);
}
} }
//SAVE: MANUAL //SAVE: MANUAL
private async Task SaveAnnotations(CancellationToken cancellationToken = default) private async Task SaveAnnotation(CancellationToken cancellationToken = default)
{ {
if (formState.CurrentMedia == null) if (formState.CurrentMedia == null)
return; return;
var time = formState.BackgroundTime ?? TimeSpan.FromMilliseconds(mediaPlayer.Time); var time = formState.BackgroundTime ?? TimeSpan.FromMilliseconds(mediaPlayer.Time);
var originalMediaName = formState.VideoName; var timeName = formState.MediaName.ToTimeName(time);
var fName = originalMediaName.ToTimeName(time);
var currentDetections = mainWindow.Editor.CurrentDetections
.Select(x => new Detection(fName, x.GetLabel(mainWindow.Editor.RenderSize, formState.BackgroundTime.HasValue ? mainWindow.Editor.RenderSize : formState.CurrentVideoSize)))
.ToList();
formState.CurrentMedia.HasAnnotations = currentDetections.Count != 0;
mainWindow.LvFiles.Items.Refresh();
mainWindow.Editor.RemoveAllAnns();
var isVideo = formState.CurrentMedia.MediaType == MediaTypes.Video; var isVideo = formState.CurrentMedia.MediaType == MediaTypes.Video;
var imgPath = Path.Combine(dirConfig.Value.ImagesDirectory, $"{fName}{Constants.JPG_EXT}"); var imgPath = Path.Combine(dirConfig.Value.ImagesDirectory, $"{timeName}{Constants.JPG_EXT}");
if (formState.BackgroundTime.HasValue) formState.CurrentMedia.HasAnnotations = mainWindow.Editor.CurrentDetections.Count != 0;
var annotations = await SaveAnnotationInner(imgPath, cancellationToken);
if (isVideo)
{ {
//no need to save image, it's already there, just remove background foreach (var annotation in annotations)
mainWindow.Editor.ResetBackground(); mainWindow.AddAnnotation(annotation);
formState.BackgroundTime = null; mediaPlayer.Play();
//next item // next item. Probably not needed
var annGrid = mainWindow.DgAnnotations; // var annGrid = mainWindow.DgAnnotations;
annGrid.SelectedIndex = Math.Min(annGrid.Items.Count, annGrid.SelectedIndex + 1); // annGrid.SelectedIndex = Math.Min(annGrid.Items.Count, annGrid.SelectedIndex + 1);
mainWindow.OpenAnnotationResult((AnnotationResult)annGrid.SelectedItem); // mainWindow.OpenAnnotationResult((AnnotationResult)annGrid.SelectedItem);
mainWindow.Editor.SetBackground(null);
formState.BackgroundTime = null;
} }
else else
{ {
var resultHeight = (uint)Math.Round(RESULT_WIDTH / formState.CurrentVideoSize.Width * formState.CurrentVideoSize.Height);
mediaPlayer.TakeSnapshot(0, imgPath, RESULT_WIDTH, resultHeight);
if (isVideo)
mediaPlayer.Play();
else
await NextMedia(ct: cancellationToken); await NextMedia(ct: cancellationToken);
} }
var annotation = await annotationService.SaveAnnotation(originalMediaName, time, currentDetections, token: cancellationToken); mainWindow.LvFiles.Items.Refresh();
if (isVideo) mainWindow.Editor.RemoveAllAnns();
mainWindow.AddAnnotation(annotation); }
private async Task<List<Annotation>> SaveAnnotationInner(string imgPath, CancellationToken cancellationToken = default)
{
var canvasDetections = mainWindow.Editor.CurrentDetections.Select(x => x.ToCanvasLabel()).ToList();
var annotationsResult = new List<Annotation>();
if (!File.Exists(imgPath))
{
var source = (mainWindow.Editor.BackgroundImage.Source as BitmapSource)!;
if (new Size(source.PixelWidth, source.PixelHeight).FitSizeForAI())
await source.SaveImage(imgPath, cancellationToken);
else
{
//Tiling
//1. Convert from RenderSize to CurrentMediaSize
var detectionCoords = canvasDetections.Select(x => new CanvasLabel(
new YoloLabel(x, mainWindow.Editor.RenderSize, formState.CurrentMediaSize), formState.CurrentMediaSize, null, x.Confidence))
.ToList();
//2. Split to frames
var results = TileProcessor.Split(formState.CurrentMediaSize, detectionCoords, cancellationToken);
//3. Save each frame as a separate annotation
foreach (var res in results)
{
var time = TimeSpan.Zero;
var annotationName = $"{formState.MediaName}{Constants.SPLIT_SUFFIX}{res.Tile.Left:0000}_{res.Tile.Top:0000}!".ToTimeName(time);
var tileImgPath = Path.Combine(dirConfig.Value.ImagesDirectory, $"{annotationName}{Constants.JPG_EXT}");
var bitmap = new CroppedBitmap(source, new Int32Rect((int)res.Tile.Left, (int)res.Tile.Top, (int)res.Tile.Width, (int)res.Tile.Height));
await bitmap.SaveImage(tileImgPath, cancellationToken);
var frameSize = new Size(res.Tile.Width, res.Tile.Height);
var detections = res.Detections
.Select(det => det.ReframeToSmall(res.Tile))
.Select(x => new Detection(annotationName, new YoloLabel(x, frameSize)))
.ToList();
annotationsResult.Add(await annotationService.SaveAnnotation(formState.MediaName, annotationName, time, detections, token: cancellationToken));
}
return annotationsResult;
}
}
var timeImg = formState.BackgroundTime ?? TimeSpan.FromMilliseconds(mediaPlayer.Time);
var annName = formState.MediaName.ToTimeName(timeImg);
var currentDetections = canvasDetections.Select(x =>
new Detection(annName, new YoloLabel(x, mainWindow.Editor.RenderSize)))
.ToList();
var annotation = await annotationService.SaveAnnotation(formState.MediaName, annName, timeImg, currentDetections, token: cancellationToken);
return [annotation];
} }
public async Task Handle(AnnotationsDeletedEvent notification, CancellationToken ct) public async Task Handle(AnnotationsDeletedEvent notification, CancellationToken ct)
@@ -294,15 +363,15 @@ public class AnnotatorEventHandler(
var namesSet = notification.AnnotationNames.ToHashSet(); var namesSet = notification.AnnotationNames.ToHashSet();
var remainAnnotations = formState.AnnotationResults var remainAnnotations = formState.AnnotationResults
.Where(x => !namesSet.Contains(x.Annotation?.Name ?? "")).ToList(); .Where(x => !namesSet.Contains(x.Name)).ToList();
formState.AnnotationResults.Clear(); formState.AnnotationResults.Clear();
foreach (var ann in remainAnnotations) foreach (var ann in remainAnnotations)
formState.AnnotationResults.Add(ann); formState.AnnotationResults.Add(ann);
var timedAnnsToRemove = mainWindow.TimedAnnotations var timedAnnotationsToRemove = mainWindow.TimedAnnotations
.Where(x => namesSet.Contains(x.Value.Name)) .Where(x => namesSet.Contains(x.Value.Name))
.Select(x => x.Value).ToList(); .Select(x => x.Value).ToList();
mainWindow.TimedAnnotations.Remove(timedAnnsToRemove); mainWindow.TimedAnnotations.Remove(timedAnnotationsToRemove);
if (formState.AnnotationResults.Count == 0) if (formState.AnnotationResults.Count == 0)
{ {
@@ -317,20 +386,19 @@ public class AnnotatorEventHandler(
await dbFactory.DeleteAnnotations(notification.AnnotationNames, ct); await dbFactory.DeleteAnnotations(notification.AnnotationNames, ct);
try
{
foreach (var name in notification.AnnotationNames) foreach (var name in notification.AnnotationNames)
{
try
{ {
File.Delete(Path.Combine(dirConfig.Value.ImagesDirectory, $"{name}{Constants.JPG_EXT}")); File.Delete(Path.Combine(dirConfig.Value.ImagesDirectory, $"{name}{Constants.JPG_EXT}"));
File.Delete(Path.Combine(dirConfig.Value.LabelsDirectory, $"{name}{Constants.TXT_EXT}")); File.Delete(Path.Combine(dirConfig.Value.LabelsDirectory, $"{name}{Constants.TXT_EXT}"));
File.Delete(Path.Combine(dirConfig.Value.ThumbnailsDirectory, $"{name}{Constants.THUMBNAIL_PREFIX}{Constants.JPG_EXT}")); File.Delete(Path.Combine(dirConfig.Value.ThumbnailsDirectory, $"{name}{Constants.THUMBNAIL_PREFIX}{Constants.JPG_EXT}"));
File.Delete(Path.Combine(dirConfig.Value.ResultsDirectory, $"{name}{Constants.RESULT_PREFIX}{Constants.JPG_EXT}")); File.Delete(Path.Combine(dirConfig.Value.ResultsDirectory, $"{name}{Constants.RESULT_PREFIX}{Constants.JPG_EXT}"));
} }
}
catch (Exception e) catch (Exception e)
{ {
logger.LogError(e, e.Message); logger.LogError(e, e.Message);
throw; }
} }
//Only validators can send Delete to the queue //Only validators can send Delete to the queue
@@ -348,6 +416,9 @@ public class AnnotatorEventHandler(
{ {
mainWindow.Dispatcher.Invoke(() => mainWindow.Dispatcher.Invoke(() =>
{ {
var mediaInfo = (MediaFileInfo)mainWindow.LvFiles.SelectedItem;
if ((mediaInfo?.FName ?? "") == e.Annotation.OriginalMediaName)
mainWindow.AddAnnotation(e.Annotation); mainWindow.AddAnnotation(e.Annotation);
var log = string.Join(Environment.NewLine, e.Annotation.Detections.Select(det => var log = string.Join(Environment.NewLine, e.Annotation.Detections.Select(det =>
@@ -403,4 +474,14 @@ public class AnnotatorEventHandler(
map.SatelliteMap.Position = pointLatLon; map.SatelliteMap.Position = pointLatLon;
map.SatelliteMap.ZoomAndCenterMarkers(null); map.SatelliteMap.ZoomAndCenterMarkers(null);
} }
public async Task Handle(AIAvailabilityStatusEvent e, CancellationToken cancellationToken)
{
mainWindow.Dispatcher.Invoke(() =>
{
logger.LogInformation(e.ToString());
mainWindow.AIDetectBtn.IsEnabled = e.Status == AIAvailabilityEnum.Enabled;
mainWindow.StatusHelp.Text = e.ToString();
});
}
} }
+3 -3
View File
@@ -137,12 +137,12 @@
HorizontalAlignment="Stretch" HorizontalAlignment="Stretch"
VerticalAlignment="Stretch"/> VerticalAlignment="Stretch"/>
<Border Grid.Column="2" ClipToBounds="True">
<controls:CanvasEditor <controls:CanvasEditor
Grid.Column="2"
x:Name="GpsImageEditor" x:Name="GpsImageEditor"
VerticalAlignment="Stretch" VerticalAlignment="Stretch"
HorizontalAlignment="Stretch" > HorizontalAlignment="Stretch" />
</controls:CanvasEditor> </Border>
<GridSplitter <GridSplitter
Background="DarkGray" Background="DarkGray"
+1
View File
@@ -4,6 +4,7 @@
<ImplicitUsings>enable</ImplicitUsings> <ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable> <Nullable>enable</Nullable>
<UseWPF>true</UseWPF> <UseWPF>true</UseWPF>
<LangVersion>12</LangVersion>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
@@ -1,4 +1,5 @@
using System.IO; using System.Diagnostics;
using System.IO;
using Azaion.Common.DTO; using Azaion.Common.DTO;
using Azaion.Common.DTO.Config; using Azaion.Common.DTO.Config;
using Azaion.Common.Extensions; using Azaion.Common.Extensions;
@@ -8,11 +9,14 @@ using System.Windows;
namespace Azaion.Common; namespace Azaion.Common;
public class Constants public static class Constants
{ {
public const string CONFIG_PATH = "config.json"; public const string CONFIG_PATH = "config.json";
public const string LOADER_CONFIG_PATH = "loaderconfig.json";
public const string DEFAULT_API_URL = "https://api.azaion.com";
public const string AZAION_SUITE_EXE = "Azaion.Suite.exe";
private const string DEFAULT_API_URL = "https://api.azaion.com"; public const int AI_TILE_SIZE = 1280;
#region ExternalClientsConfig #region ExternalClientsConfig
@@ -25,11 +29,11 @@ public class Constants
public static readonly string ExternalGpsDeniedPath = Path.Combine(EXTERNAL_GPS_DENIED_FOLDER, "image-matcher.exe"); public static readonly string ExternalGpsDeniedPath = Path.Combine(EXTERNAL_GPS_DENIED_FOLDER, "image-matcher.exe");
public const string DEFAULT_ZMQ_INFERENCE_HOST = "127.0.0.1"; public const string DEFAULT_ZMQ_INFERENCE_HOST = "127.0.0.1";
public const int DEFAULT_ZMQ_INFERENCE_PORT = 5227; private const int DEFAULT_ZMQ_INFERENCE_PORT = 5227;
public const string DEFAULT_ZMQ_GPS_DENIED_HOST = "127.0.0.1"; private const string DEFAULT_ZMQ_GPS_DENIED_HOST = "127.0.0.1";
public const int DEFAULT_ZMQ_GPS_DENIED_PORT = 5255; private const int DEFAULT_ZMQ_GPS_DENIED_PORT = 5255;
public const int DEFAULT_ZMQ_GPS_DENIED_PUBLISH_PORT = 5256; private const int DEFAULT_ZMQ_GPS_DENIED_PUBLISH_PORT = 5256;
#endregion ExternalClientsConfig #endregion ExternalClientsConfig
@@ -44,37 +48,29 @@ public class Constants
public const string TXT_EXT = ".txt"; public const string TXT_EXT = ".txt";
#region DirectoriesConfig #region DirectoriesConfig
public const string DEFAULT_VIDEO_DIR = "video"; private const string DEFAULT_VIDEO_DIR = "video";
public const string DEFAULT_LABELS_DIR = "labels"; private const string DEFAULT_LABELS_DIR = "labels";
public const string DEFAULT_IMAGES_DIR = "images"; private const string DEFAULT_IMAGES_DIR = "images";
public const string DEFAULT_RESULTS_DIR = "results"; private const string DEFAULT_RESULTS_DIR = "results";
public const string DEFAULT_THUMBNAILS_DIR = "thumbnails"; private const string DEFAULT_THUMBNAILS_DIR = "thumbnails";
public const string DEFAULT_GPS_SAT_DIRECTORY = "satellitesDir"; private const string DEFAULT_GPS_SAT_DIRECTORY = "satellitesDir";
public const string DEFAULT_GPS_ROUTE_DIRECTORY = "routeDir"; private const string DEFAULT_GPS_ROUTE_DIRECTORY = "routeDir";
#endregion #endregion
#region AnnotatorConfig #region AnnotatorConfig
public static readonly AnnotationConfig DefaultAnnotationConfig = new()
{
DetectionClasses = DefaultAnnotationClasses!,
VideoFormats = DefaultVideoFormats!,
ImageFormats = DefaultImageFormats!,
AnnotationsDbFile = DEFAULT_ANNOTATIONS_DB_FILE
};
private static readonly List<DetectionClass> DefaultAnnotationClasses = private static readonly List<DetectionClass> DefaultAnnotationClasses =
[ [
new() { Id = 0, Name = "ArmorVehicle", ShortName = "Броня", Color = "#FF0000".ToColor() }, new() { Id = 0, Name = "ArmorVehicle", ShortName = "Броня", Color = "#FF0000".ToColor() },
new() { Id = 1, Name = "Truck", ShortName = "Вантаж.", Color = "#00FF00".ToColor() }, new() { Id = 1, Name = "Truck", ShortName = "Вантаж.", Color = "#00FF00".ToColor() },
new() { Id = 2, Name = "Vehicle", ShortName = "Машина", Color = "#0000FF".ToColor() }, new() { Id = 2, Name = "Vehicle", ShortName = "Машина", Color = "#0000FF".ToColor() },
new() { Id = 3, Name = "Atillery", ShortName = "Арта", Color = "#FFFF00".ToColor() }, new() { Id = 3, Name = "Artillery", ShortName = "Арта", Color = "#FFFF00".ToColor() },
new() { Id = 4, Name = "Shadow", ShortName = "Тінь", Color = "#FF00FF".ToColor() }, new() { Id = 4, Name = "Shadow", ShortName = "Тінь", Color = "#FF00FF".ToColor() },
new() { Id = 5, Name = "Trenches", ShortName = "Окопи", Color = "#00FFFF".ToColor() }, new() { Id = 5, Name = "Trenches", ShortName = "Окопи", Color = "#00FFFF".ToColor() },
new() { Id = 6, Name = "MilitaryMan", ShortName = "Військов", Color = "#188021".ToColor() }, new() { Id = 6, Name = "MilitaryMan", ShortName = "Військов", Color = "#188021".ToColor() },
new() { Id = 7, Name = "TyreTracks", ShortName = "Накати", Color = "#800000".ToColor() }, new() { Id = 7, Name = "TyreTracks", ShortName = "Накати", Color = "#800000".ToColor() },
new() { Id = 8, Name = "AdditArmoredTank", ShortName = "Танк.захист", Color = "#008000".ToColor() }, new() { Id = 8, Name = "AdditionArmoredTank",ShortName = "Танк.захист", Color = "#008000".ToColor() },
new() { Id = 9, Name = "Smoke", ShortName = "Дим", Color = "#000080".ToColor() }, new() { Id = 9, Name = "Smoke", ShortName = "Дим", Color = "#000080".ToColor() },
new() { Id = 10, Name = "Plane", ShortName = "Літак", Color = "#000080".ToColor() }, new() { Id = 10, Name = "Plane", ShortName = "Літак", Color = "#000080".ToColor() },
new() { Id = 11, Name = "Moto", ShortName = "Мото", Color = "#808000".ToColor() }, new() { Id = 11, Name = "Moto", ShortName = "Мото", Color = "#808000".ToColor() },
@@ -85,55 +81,65 @@ public class Constants
new() { Id = 16, Name = "Caponier", ShortName = "Капонір", Color = "#ffb6c1".ToColor() }, new() { Id = 16, Name = "Caponier", ShortName = "Капонір", Color = "#ffb6c1".ToColor() },
]; ];
public static readonly List<string> DefaultVideoFormats = ["mp4", "mov", "avi"]; private static readonly List<string> DefaultVideoFormats = ["mp4", "mov", "avi"];
public static readonly List<string> DefaultImageFormats = ["jpg", "jpeg", "png", "bmp"]; private static readonly List<string> DefaultImageFormats = ["jpg", "jpeg", "png", "bmp"];
public static int DEFAULT_LEFT_PANEL_WIDTH = 250; private static readonly AnnotationConfig DefaultAnnotationConfig = new()
public static int DEFAULT_RIGHT_PANEL_WIDTH = 250; {
DetectionClasses = DefaultAnnotationClasses,
VideoFormats = DefaultVideoFormats,
ImageFormats = DefaultImageFormats,
AnnotationsDbFile = DEFAULT_ANNOTATIONS_DB_FILE
};
public const string DEFAULT_ANNOTATIONS_DB_FILE = "annotations.db"; private const int DEFAULT_LEFT_PANEL_WIDTH = 250;
private const int DEFAULT_RIGHT_PANEL_WIDTH = 250;
private const string DEFAULT_ANNOTATIONS_DB_FILE = "annotations.db";
# endregion AnnotatorConfig # endregion AnnotatorConfig
# region AIRecognitionConfig # region AIRecognitionConfig
public static readonly AIRecognitionConfig DefaultAIRecognitionConfig = new() private static readonly AIRecognitionConfig DefaultAIRecognitionConfig = new()
{ {
FrameRecognitionSeconds = DEFAULT_FRAME_RECOGNITION_SECONDS, FrameRecognitionSeconds = DEFAULT_FRAME_RECOGNITION_SECONDS,
TrackingDistanceConfidence = TRACKING_DISTANCE_CONFIDENCE, TrackingDistanceConfidence = TRACKING_DISTANCE_CONFIDENCE,
TrackingProbabilityIncrease = TRACKING_PROBABILITY_INCREASE, TrackingProbabilityIncrease = TRACKING_PROBABILITY_INCREASE,
TrackingIntersectionThreshold = TRACKING_INTERSECTION_THRESHOLD, TrackingIntersectionThreshold = TRACKING_INTERSECTION_THRESHOLD,
BigImageTileOverlapPercent = DEFAULT_BIG_IMAGE_TILE_OVERLAP_PERCENT,
FramePeriodRecognition = DEFAULT_FRAME_PERIOD_RECOGNITION FramePeriodRecognition = DEFAULT_FRAME_PERIOD_RECOGNITION
}; };
public const double DEFAULT_FRAME_RECOGNITION_SECONDS = 2; private const double DEFAULT_FRAME_RECOGNITION_SECONDS = 2;
public const double TRACKING_DISTANCE_CONFIDENCE = 0.15; private const double TRACKING_DISTANCE_CONFIDENCE = 0.15;
public const double TRACKING_PROBABILITY_INCREASE = 15; private const double TRACKING_PROBABILITY_INCREASE = 15;
public const double TRACKING_INTERSECTION_THRESHOLD = 0.8; private const double TRACKING_INTERSECTION_THRESHOLD = 0.8;
public const int DEFAULT_FRAME_PERIOD_RECOGNITION = 4; private const int DEFAULT_BIG_IMAGE_TILE_OVERLAP_PERCENT = 20;
private const int DEFAULT_FRAME_PERIOD_RECOGNITION = 4;
# endregion AIRecognitionConfig # endregion AIRecognitionConfig
# region GpsDeniedConfig # region GpsDeniedConfig
public static readonly GpsDeniedConfig DefaultGpsDeniedConfig = new() private static readonly GpsDeniedConfig DefaultGpsDeniedConfig = new()
{ {
MinKeyPoints = 15 MinKeyPoints = 11
}; };
# endregion # endregion
#region Thumbnails #region Thumbnails
public static readonly ThumbnailConfig DefaultThumbnailConfig = new() private static readonly Size DefaultThumbnailSize = new(240, 135);
private static readonly ThumbnailConfig DefaultThumbnailConfig = new()
{ {
Size = DefaultThumbnailSize, Size = DefaultThumbnailSize,
Border = DEFAULT_THUMBNAIL_BORDER Border = DEFAULT_THUMBNAIL_BORDER
}; };
public static readonly Size DefaultThumbnailSize = new(240, 135); private const int DEFAULT_THUMBNAIL_BORDER = 10;
public const int DEFAULT_THUMBNAIL_BORDER = 10;
public const string THUMBNAIL_PREFIX = "_thumb"; public const string THUMBNAIL_PREFIX = "_thumb";
public const string RESULT_PREFIX = "_result"; public const string RESULT_PREFIX = "_result";
@@ -159,10 +165,10 @@ public class Constants
#endregion #endregion
public const string CSV_PATH = "matches.csv"; public const string SPLIT_SUFFIX = "!split!";
public static readonly InitConfig DefaultInitConfig = new() private static readonly InitConfig DefaultInitConfig = new()
{ {
LoaderClientConfig = new LoaderClientConfig LoaderClientConfig = new LoaderClientConfig
{ {
@@ -251,4 +257,12 @@ public class Constants
return DefaultInitConfig; return DefaultInitConfig;
} }
} }
public static Version GetLocalVersion()
{
var localFileInfo = FileVersionInfo.GetVersionInfo(AZAION_SUITE_EXE);
if (string.IsNullOrWhiteSpace(localFileInfo.ProductVersion))
throw new Exception($"Can't find {AZAION_SUITE_EXE} and its version");
return new Version(localFileInfo.FileVersion!);
}
} }
+155 -61
View File
@@ -5,7 +5,9 @@ using System.Windows.Input;
using System.Windows.Media; using System.Windows.Media;
using System.Windows.Media.Imaging; using System.Windows.Media.Imaging;
using System.Windows.Shapes; using System.Windows.Shapes;
using Azaion.Common.Database;
using Azaion.Common.DTO; using Azaion.Common.DTO;
using Azaion.Common.Extensions;
using MediatR; using MediatR;
using Color = System.Windows.Media.Color; using Color = System.Windows.Media.Color;
using Image = System.Windows.Controls.Image; using Image = System.Windows.Controls.Image;
@@ -34,11 +36,11 @@ public class CanvasEditor : Canvas
private Point _panStartPoint; private Point _panStartPoint;
private bool _isZoomedIn; private bool _isZoomedIn;
private const int MIN_SIZE = 20; private const int MIN_SIZE = 12;
private readonly TimeSpan _viewThreshold = TimeSpan.FromMilliseconds(400); private readonly TimeSpan _viewThreshold = TimeSpan.FromMilliseconds(400);
private Image _backgroundImage { get; set; } = new() { Stretch = Stretch.Fill }; public Image BackgroundImage { get; set; } = new() { Stretch = Stretch.Uniform };
public IMediator Mediator { get; set; } = null!; private RectangleF? _clampedRect;
public static readonly DependencyProperty GetTimeFuncProp = public static readonly DependencyProperty GetTimeFuncProp =
DependencyProperty.Register( DependencyProperty.Register(
@@ -113,7 +115,7 @@ public class CanvasEditor : Canvas
MouseUp += CanvasMouseUp; MouseUp += CanvasMouseUp;
SizeChanged += CanvasResized; SizeChanged += CanvasResized;
Cursor = Cursors.Cross; Cursor = Cursors.Cross;
Children.Insert(0, _backgroundImage); Children.Insert(0, BackgroundImage);
Children.Add(_newAnnotationRect); Children.Add(_newAnnotationRect);
Children.Add(_horizontalLine); Children.Add(_horizontalLine);
Children.Add(_verticalLine); Children.Add(_verticalLine);
@@ -124,9 +126,27 @@ public class CanvasEditor : Canvas
MouseWheel += CanvasWheel; MouseWheel += CanvasWheel;
} }
public void SetImageSource(ImageSource? source) public void SetBackground(ImageSource? source)
{ {
_backgroundImage.Source = source; SetZoom();
BackgroundImage.Source = source;
UpdateClampedRect();
}
private void SetZoom(Matrix? matrix = null)
{
if (matrix == null)
{
_matrixTransform.Matrix = Matrix.Identity;
_isZoomedIn = false;
}
else
{
_matrixTransform.Matrix = matrix.Value;
_isZoomedIn = true;
}
// foreach (var detection in CurrentDetections)
// detection.UpdateAdornerScale(scale: _matrixTransform.Matrix.M11);
} }
private void CanvasWheel(object sender, MouseWheelEventArgs e) private void CanvasWheel(object sender, MouseWheelEventArgs e)
@@ -139,19 +159,12 @@ public class CanvasEditor : Canvas
var matrix = _matrixTransform.Matrix; var matrix = _matrixTransform.Matrix;
if (scale < 1 && matrix.M11 * scale < 1.0) if (scale < 1 && matrix.M11 * scale < 1.0)
{ SetZoom();
_matrixTransform.Matrix = Matrix.Identity;
_isZoomedIn = false;
}
else else
{ {
matrix.ScaleAt(scale, scale, mousePos.X, mousePos.Y); matrix.ScaleAt(scale, scale, mousePos.X, mousePos.Y);
_matrixTransform.Matrix = matrix; SetZoom(matrix);
_isZoomedIn = true;
} }
foreach (var detection in CurrentDetections)
detection.UpdateAdornerScale(scale: _matrixTransform.Matrix.M11);
} }
private void Init(object sender, RoutedEventArgs e) private void Init(object sender, RoutedEventArgs e)
@@ -165,6 +178,8 @@ public class CanvasEditor : Canvas
private void CanvasMouseDown(object sender, MouseButtonEventArgs e) private void CanvasMouseDown(object sender, MouseButtonEventArgs e)
{ {
ClearSelections(); ClearSelections();
if (e.LeftButton != MouseButtonState.Pressed)
return;
if (Keyboard.Modifiers == ModifierKeys.Control && _isZoomedIn) if (Keyboard.Modifiers == ModifierKeys.Control && _isZoomedIn)
{ {
_panStartPoint = e.GetPosition(this); _panStartPoint = e.GetPosition(this);
@@ -172,11 +187,12 @@ public class CanvasEditor : Canvas
} }
else else
NewAnnotationStart(sender, e); NewAnnotationStart(sender, e);
(sender as UIElement)?.CaptureMouse();
} }
private void CanvasMouseMove(object sender, MouseEventArgs e) private void CanvasMouseMove(object sender, MouseEventArgs e)
{ {
var pos = e.GetPosition(this); var pos = GetClampedPosition(e);
_horizontalLine.Y1 = _horizontalLine.Y2 = pos.Y; _horizontalLine.Y1 = _horizontalLine.Y2 = pos.Y;
_verticalLine.X1 = _verticalLine.X2 = pos.X; _verticalLine.X1 = _verticalLine.X2 = pos.X;
SetLeft(_classNameHint, pos.X + 10); SetLeft(_classNameHint, pos.X + 10);
@@ -185,54 +201,68 @@ public class CanvasEditor : Canvas
switch (SelectionState) switch (SelectionState)
{ {
case SelectionState.NewAnnCreating: case SelectionState.NewAnnCreating:
NewAnnotationCreatingMove(sender, e); NewAnnotationCreatingMove(pos);
break; break;
case SelectionState.AnnResizing: case SelectionState.AnnResizing:
AnnotationResizeMove(sender, e); AnnotationResizeMove(pos);
break; break;
case SelectionState.AnnMoving: case SelectionState.AnnMoving:
AnnotationPositionMove(sender, e); AnnotationPositionMove(pos);
e.Handled = true;
break; break;
case SelectionState.PanZoomMoving: case SelectionState.PanZoomMoving:
PanZoomMove(sender, e); PanZoomMove(pos);
break; break;
} }
} }
private void PanZoomMove(object sender, MouseEventArgs e) private Point GetClampedPosition(MouseEventArgs e)
{ {
var currentPoint = e.GetPosition(this); var pos = e.GetPosition(this);
var delta = currentPoint - _panStartPoint; return !_clampedRect.HasValue
? pos
: new Point
(
Math.Clamp(pos.X, _clampedRect.Value.X, _clampedRect.Value.Right),
Math.Clamp(pos.Y, _clampedRect.Value.Y, _clampedRect.Value.Bottom)
);
}
private void PanZoomMove(Point point)
{
var delta = point - _panStartPoint;
var matrix = _matrixTransform.Matrix; var matrix = _matrixTransform.Matrix;
matrix.Translate(delta.X, delta.Y); matrix.Translate(delta.X, delta.Y);
_matrixTransform.Matrix = matrix; _matrixTransform.Matrix = matrix;
} }
private void CanvasMouseUp(object sender, MouseButtonEventArgs e) private void CanvasMouseUp(object sender, MouseButtonEventArgs e)
{ {
(sender as UIElement)?.ReleaseMouseCapture();
if (SelectionState == SelectionState.NewAnnCreating) if (SelectionState == SelectionState.NewAnnCreating)
{ {
var endPos = e.GetPosition(this); var endPos = GetClampedPosition(e);
_newAnnotationRect.Width = 0; _newAnnotationRect.Width = 0;
_newAnnotationRect.Height = 0; _newAnnotationRect.Height = 0;
var width = Math.Abs(endPos.X - _newAnnotationStartPos.X); var width = Math.Abs(endPos.X - _newAnnotationStartPos.X);
var height = Math.Abs(endPos.Y - _newAnnotationStartPos.Y); var height = Math.Abs(endPos.Y - _newAnnotationStartPos.Y);
if (width < MIN_SIZE || height < MIN_SIZE) if (width >= MIN_SIZE && height >= MIN_SIZE)
return; {
var time = GetTimeFunc(); var time = GetTimeFunc();
var control = CreateDetectionControl(CurrentAnnClass, time, new CanvasLabel var control = CreateDetectionControl(CurrentAnnClass, time, new CanvasLabel
{ {
Width = width, Width = width,
Height = height, Height = height,
X = Math.Min(endPos.X, _newAnnotationStartPos.X), Left = Math.Min(endPos.X, _newAnnotationStartPos.X),
Y = Math.Min(endPos.Y, _newAnnotationStartPos.Y), Top = Math.Min(endPos.Y, _newAnnotationStartPos.Y),
Confidence = 1 Confidence = 1
}); });
control.UpdateLayout(); control.UpdateLayout();
CheckLabelBoundaries(control); CheckLabelBoundaries(control);
} }
}
else if (SelectionState != SelectionState.PanZoomMoving) else if (SelectionState != SelectionState.PanZoomMoving)
CheckLabelBoundaries(_curAnn); CheckLabelBoundaries(_curAnn);
@@ -246,14 +276,14 @@ public class CanvasEditor : Canvas
var origin = lb.TranslatePoint(new Point(0, 0), this); var origin = lb.TranslatePoint(new Point(0, 0), this);
lb.Children[0].Measure(new Size(double.PositiveInfinity, double.PositiveInfinity)); lb.Children[0].Measure(new Size(double.PositiveInfinity, double.PositiveInfinity));
var size = lb.Children[0].DesiredSize; var size = lb.Children[0].DesiredSize;
var lbRect = new RectangleF((float)origin.X, (float)origin.Y, (float)size.Width, (float)size.Height); var controlLabel = new RectangleF((float)origin.X, (float)origin.Y, (float)size.Width, (float)size.Height);
foreach (var c in CurrentDetections) foreach (var c in CurrentDetections)
{ {
if (c == detectionControl) if (c == detectionControl)
continue; continue;
var detRect = new RectangleF((float)GetLeft(c), (float)GetTop(c), (float)c.Width, (float)c.Height); var detRect = new RectangleF((float)GetLeft(c), (float)GetTop(c), (float)c.Width, (float)c.Height);
detRect.Intersect(lbRect); detRect.Intersect(controlLabel);
// var intersect = detections[i].ToRectangle(); // var intersect = detections[i].ToRectangle();
@@ -269,8 +299,44 @@ public class CanvasEditor : Canvas
{ {
_horizontalLine.X2 = e.NewSize.Width; _horizontalLine.X2 = e.NewSize.Width;
_verticalLine.Y2 = e.NewSize.Height; _verticalLine.Y2 = e.NewSize.Height;
_backgroundImage.Width = e.NewSize.Width; BackgroundImage.Width = e.NewSize.Width;
_backgroundImage.Height = e.NewSize.Height; BackgroundImage.Height = e.NewSize.Height;
UpdateClampedRect();
}
private void UpdateClampedRect()
{
if (BackgroundImage.Source is not BitmapSource imageSource)
{
_clampedRect = null;
return;
}
var imgWidth = imageSource.PixelWidth;
var imgHeight = imageSource.PixelHeight;
var canvasWidth = ActualWidth;
var canvasHeight = ActualHeight;
var imgRatio = imgWidth / (double)imgHeight;
var canvasRatio = canvasWidth / canvasHeight;
double renderedWidth;
double renderedHeight;
if (imgRatio > canvasRatio)
{
renderedWidth = canvasWidth;
renderedHeight = canvasWidth / imgRatio;
}
else
{
renderedHeight = canvasHeight;
renderedWidth = canvasHeight * imgRatio;
}
var xOffset = (canvasWidth - renderedWidth) / 2;
var yOffset = (canvasHeight - renderedHeight) / 2;
_clampedRect = new RectangleF((float)xOffset, (float)yOffset, (float)renderedWidth, (float)renderedHeight);
} }
#region Annotation Resizing & Moving #region Annotation Resizing & Moving
@@ -281,20 +347,19 @@ public class CanvasEditor : Canvas
_lastPos = e.GetPosition(this); _lastPos = e.GetPosition(this);
_curRec = (Rectangle)sender; _curRec = (Rectangle)sender;
_curAnn = (DetectionControl)((Grid)_curRec.Parent).Parent; _curAnn = (DetectionControl)((Grid)_curRec.Parent).Parent;
(sender as UIElement)?.CaptureMouse();
e.Handled = true; e.Handled = true;
} }
private void AnnotationResizeMove(object sender, MouseEventArgs e) private void AnnotationResizeMove(Point point)
{ {
if (SelectionState != SelectionState.AnnResizing) if (SelectionState != SelectionState.AnnResizing)
return; return;
var currentPos = e.GetPosition(this);
var x = GetLeft(_curAnn); var x = GetLeft(_curAnn);
var y = GetTop(_curAnn); var y = GetTop(_curAnn);
var offsetX = currentPos.X - _lastPos.X; var offsetX = point.X - _lastPos.X;
var offsetY = currentPos.Y - _lastPos.Y; var offsetY = point.Y - _lastPos.Y;
switch (_curRec.HorizontalAlignment, _curRec.VerticalAlignment) switch (_curRec.HorizontalAlignment, _curRec.VerticalAlignment)
{ {
case (HorizontalAlignment.Left, VerticalAlignment.Top): case (HorizontalAlignment.Left, VerticalAlignment.Top):
@@ -334,7 +399,7 @@ public class CanvasEditor : Canvas
_curAnn.Height = Math.Max(MIN_SIZE, _curAnn.Height + offsetY); _curAnn.Height = Math.Max(MIN_SIZE, _curAnn.Height + offsetY);
break; break;
} }
_lastPos = currentPos; _lastPos = point;
} }
private void AnnotationPositionStart(object sender, MouseEventArgs e) private void AnnotationPositionStart(object sender, MouseEventArgs e)
@@ -351,19 +416,26 @@ public class CanvasEditor : Canvas
e.Handled = true; e.Handled = true;
} }
private void AnnotationPositionMove(object sender, MouseEventArgs e) private void AnnotationPositionMove(Point point)
{ {
if (SelectionState != SelectionState.AnnMoving) if (SelectionState != SelectionState.AnnMoving)
return; return;
var currentPos = e.GetPosition(this); var offsetX = point.X - _lastPos.X;
var offsetX = currentPos.X - _lastPos.X; var offsetY = point.Y - _lastPos.Y;
var offsetY = currentPos.Y - _lastPos.Y;
SetLeft(_curAnn, GetLeft(_curAnn) + offsetX); var nextLeft = GetLeft(_curAnn) + offsetX;
SetTop(_curAnn, GetTop(_curAnn) + offsetY); var nextTop = GetTop(_curAnn) + offsetY;
_lastPos = currentPos;
e.Handled = true; if (_clampedRect.HasValue)
{
nextLeft = Math.Clamp(nextLeft, _clampedRect.Value.X, _clampedRect.Value.Right - _curAnn.Width);
nextTop = Math.Clamp(nextTop, _clampedRect.Value.Y, _clampedRect.Value.Bottom - _curAnn.Height);
}
SetLeft(_curAnn, nextLeft);
SetTop(_curAnn, nextTop);
_lastPos = point;
} }
#endregion #endregion
@@ -373,38 +445,54 @@ public class CanvasEditor : Canvas
private void NewAnnotationStart(object sender, MouseButtonEventArgs e) private void NewAnnotationStart(object sender, MouseButtonEventArgs e)
{ {
_newAnnotationStartPos = e.GetPosition(this); _newAnnotationStartPos = e.GetPosition(this);
SetLeft(_newAnnotationRect, _newAnnotationStartPos.X); SetLeft(_newAnnotationRect, _newAnnotationStartPos.X);
SetTop(_newAnnotationRect, _newAnnotationStartPos.Y); SetTop(_newAnnotationRect, _newAnnotationStartPos.Y);
_newAnnotationRect.MouseMove += NewAnnotationCreatingMove; _newAnnotationRect.MouseMove += (sender, e) =>
{
var currentPos = e.GetPosition(this);
NewAnnotationCreatingMove(currentPos);
};
SelectionState = SelectionState.NewAnnCreating; SelectionState = SelectionState.NewAnnCreating;
} }
private void NewAnnotationCreatingMove(object sender, MouseEventArgs e) private void NewAnnotationCreatingMove(Point point)
{ {
if (SelectionState != SelectionState.NewAnnCreating) if (SelectionState != SelectionState.NewAnnCreating)
return; return;
var currentPos = e.GetPosition(this); var diff = point - _newAnnotationStartPos;
var diff = currentPos - _newAnnotationStartPos;
_newAnnotationRect.Height = Math.Abs(diff.Y); _newAnnotationRect.Height = Math.Abs(diff.Y);
_newAnnotationRect.Width = Math.Abs(diff.X); _newAnnotationRect.Width = Math.Abs(diff.X);
if (diff.X < 0) if (diff.X < 0)
SetLeft(_newAnnotationRect, currentPos.X); SetLeft(_newAnnotationRect, point.X);
if (diff.Y < 0) if (diff.Y < 0)
SetTop(_newAnnotationRect, currentPos.Y); SetTop(_newAnnotationRect, point.Y);
} }
public void CreateDetections(TimeSpan time, IEnumerable<Detection> detections, List<DetectionClass> detectionClasses, Size videoSize) public void CreateDetections(Annotation annotation, List<DetectionClass> detectionClasses, Size mediaSize)
{ {
foreach (var detection in detections) foreach (var detection in annotation.Detections)
{ {
var detectionClass = DetectionClass.FromYoloId(detection.ClassNumber, detectionClasses); var detectionClass = DetectionClass.FromYoloId(detection.ClassNumber, detectionClasses);
var canvasLabel = new CanvasLabel(detection, RenderSize, videoSize, detection.Confidence); CanvasLabel canvasLabel;
CreateDetectionControl(detectionClass, time, canvasLabel); if (!annotation.IsSplit || mediaSize.FitSizeForAI())
canvasLabel = new CanvasLabel(detection, RenderSize, mediaSize, detection.Confidence);
else
{
canvasLabel = new CanvasLabel(detection, new Size(Constants.AI_TILE_SIZE, Constants.AI_TILE_SIZE), null, detection.Confidence)
.ReframeFromSmall(annotation.SplitTile!);
//From CurrentMediaSize to Render Size
var yoloLabel = new YoloLabel(canvasLabel, mediaSize);
canvasLabel = new CanvasLabel(yoloLabel, RenderSize, mediaSize, canvasLabel.Confidence);
}
var control = CreateDetectionControl(detectionClass, annotation.Time, canvasLabel);
control.UpdateLayout();
CheckLabelBoundaries(control);
} }
} }
@@ -412,8 +500,8 @@ public class CanvasEditor : Canvas
{ {
var detectionControl = new DetectionControl(detectionClass, time, AnnotationResizeStart, canvasLabel); var detectionControl = new DetectionControl(detectionClass, time, AnnotationResizeStart, canvasLabel);
detectionControl.MouseDown += AnnotationPositionStart; detectionControl.MouseDown += AnnotationPositionStart;
SetLeft(detectionControl, canvasLabel.X ); SetLeft(detectionControl, canvasLabel.Left );
SetTop(detectionControl, canvasLabel.Y); SetTop(detectionControl, canvasLabel.Top);
Children.Add(detectionControl); Children.Add(detectionControl);
CurrentDetections.Add(detectionControl); CurrentDetections.Add(detectionControl);
_newAnnotationRect.Fill = new SolidColorBrush(detectionClass.Color); _newAnnotationRect.Fill = new SolidColorBrush(detectionClass.Color);
@@ -454,5 +542,11 @@ public class CanvasEditor : Canvas
RemoveAnnotations(expiredAnns); RemoveAnnotations(expiredAnns);
} }
public void ResetBackground() => Background = new SolidColorBrush(Color.FromArgb(1, 0, 0, 0)); public void ZoomTo(Point point)
{
SetZoom();
var matrix = _matrixTransform.Matrix;
matrix.ScaleAt(2, 2, point.X, point.Y);
SetZoom(matrix);
}
} }
+21 -24
View File
@@ -5,22 +5,21 @@ using System.Windows.Media;
using System.Windows.Shapes; using System.Windows.Shapes;
using Azaion.Common.DTO; using Azaion.Common.DTO;
using Azaion.Common.Extensions; using Azaion.Common.Extensions;
using Label = System.Windows.Controls.Label; using Annotation = Azaion.Common.Database.Annotation;
namespace Azaion.Common.Controls; namespace Azaion.Common.Controls;
public class DetectionControl : Border public class DetectionControl : Border
{ {
private readonly Action<object, MouseButtonEventArgs> _resizeStart; private readonly Action<object, MouseButtonEventArgs> _resizeStart;
private const double RESIZE_RECT_SIZE = 12; private const double RESIZE_RECT_SIZE = 10;
private readonly Grid _grid; private readonly Grid _grid;
private readonly Label _detectionLabel; private readonly DetectionLabelPanel _detectionLabelPanel;
public readonly Canvas DetectionLabelContainer; public readonly Canvas DetectionLabelContainer;
public TimeSpan Time { get; set; } public TimeSpan Time { get; set; }
private readonly double _confidence; private readonly List<Rectangle> _resizedRectangles = new();
private List<Rectangle> _resizedRectangles = new();
private DetectionClass _detectionClass = null!; private DetectionClass _detectionClass = null!;
public DetectionClass DetectionClass public DetectionClass DetectionClass
@@ -30,12 +29,11 @@ public class DetectionControl : Border
{ {
var brush = new SolidColorBrush(value.Color.ToConfidenceColor()); var brush = new SolidColorBrush(value.Color.ToConfidenceColor());
BorderBrush = brush; BorderBrush = brush;
BorderThickness = new Thickness(3); BorderThickness = new Thickness(1);
foreach (var rect in _resizedRectangles) foreach (var rect in _resizedRectangles)
rect.Stroke = brush; rect.Stroke = brush;
_detectionLabel.Background = new SolidColorBrush(value.Color.ToConfidenceColor(_confidence)); _detectionLabelPanel.DetectionClass = value;
_detectionLabel.Content = _detectionLabelText(value.UIName);
_detectionClass = value; _detectionClass = value;
} }
} }
@@ -79,9 +77,6 @@ public class DetectionControl : Border
} }
} }
private string _detectionLabelText(string detectionClassName) =>
_confidence >= 0.995 ? detectionClassName : $"{detectionClassName}: {_confidence * 100:F0}%"; //double
public DetectionControl(DetectionClass detectionClass, TimeSpan time, Action<object, public DetectionControl(DetectionClass detectionClass, TimeSpan time, Action<object,
MouseButtonEventArgs> resizeStart, CanvasLabel canvasLabel) MouseButtonEventArgs> resizeStart, CanvasLabel canvasLabel)
{ {
@@ -89,7 +84,6 @@ public class DetectionControl : Border
Height = canvasLabel.Height; Height = canvasLabel.Height;
Time = time; Time = time;
_resizeStart = resizeStart; _resizeStart = resizeStart;
_confidence = canvasLabel.Confidence;
DetectionLabelContainer = new Canvas DetectionLabelContainer = new Canvas
{ {
@@ -97,16 +91,17 @@ public class DetectionControl : Border
VerticalAlignment = VerticalAlignment.Top, VerticalAlignment = VerticalAlignment.Top,
ClipToBounds = false, ClipToBounds = false,
}; };
_detectionLabel = new Label _detectionLabelPanel = new DetectionLabelPanel
{ {
Content = _detectionLabelText(detectionClass.Name), Confidence = canvasLabel.Confidence,
FontSize = 16, DetectionClass = Annotation.DetectionClassesDict[canvasLabel.ClassNumber]
Visibility = Visibility.Visible
}; };
DetectionLabelContainer.Children.Add(_detectionLabel);
DetectionLabelContainer.Children.Add(_detectionLabelPanel);
_selectionFrame = new Rectangle _selectionFrame = new Rectangle
{ {
Margin = new Thickness(-3),
HorizontalAlignment = HorizontalAlignment.Stretch, HorizontalAlignment = HorizontalAlignment.Stretch,
VerticalAlignment = VerticalAlignment.Stretch, VerticalAlignment = VerticalAlignment.Stretch,
Stroke = new SolidColorBrush(Colors.Black), Stroke = new SolidColorBrush(Colors.Black),
@@ -131,9 +126,9 @@ public class DetectionControl : Border
VerticalAlignment = VerticalAlignment.Stretch, VerticalAlignment = VerticalAlignment.Stretch,
Children = { _selectionFrame } Children = { _selectionFrame }
}; };
_grid.Children.Add(DetectionLabelContainer);
foreach (var rect in _resizedRectangles) foreach (var rect in _resizedRectangles)
_grid.Children.Add(rect); _grid.Children.Add(rect);
_grid.Children.Add(DetectionLabelContainer);
Child = _grid; Child = _grid;
Cursor = Cursors.SizeAll; Cursor = Cursors.SizeAll;
@@ -146,23 +141,25 @@ public class DetectionControl : Border
var rect = new Rectangle() // small rectangles at the corners and sides var rect = new Rectangle() // small rectangles at the corners and sides
{ {
ClipToBounds = false, ClipToBounds = false,
Margin = new Thickness(-RESIZE_RECT_SIZE * 0.7), Margin = new Thickness(-1.1 * RESIZE_RECT_SIZE),
HorizontalAlignment = ha, HorizontalAlignment = ha,
VerticalAlignment = va, VerticalAlignment = va,
Width = RESIZE_RECT_SIZE, Width = RESIZE_RECT_SIZE,
Height = RESIZE_RECT_SIZE, Height = RESIZE_RECT_SIZE,
Stroke = new SolidColorBrush(Color.FromArgb(230, 20, 20, 20)), // small rectangles color Stroke = new SolidColorBrush(Color.FromArgb(230, 20, 20, 20)), // small rectangles color
StrokeThickness = 0.8,
Fill = new SolidColorBrush(Color.FromArgb(150, 80, 80, 80)), Fill = new SolidColorBrush(Color.FromArgb(150, 80, 80, 80)),
Cursor = crs, Cursor = crs,
Name = name, Name = name,
}; };
rect.MouseDown += (sender, args) => _resizeStart(sender, args); rect.MouseDown += (sender, args) => _resizeStart(sender, args);
rect.MouseUp += (sender, args) => { (sender as UIElement)?.ReleaseMouseCapture(); };
return rect; return rect;
} }
public YoloLabel GetLabel(Size canvasSize, Size? videoSize = null) public CanvasLabel ToCanvasLabel() =>
{ new(DetectionClass.YoloId, Canvas.GetLeft(this), Canvas.GetTop(this), Width, Height);
var label = new CanvasLabel(DetectionClass.YoloId, Canvas.GetLeft(this), Canvas.GetTop(this), Width, Height);
return new YoloLabel(label, canvasSize, videoSize); public YoloLabel ToYoloLabel(Size canvasSize, Size? videoSize = null) =>
} new(ToCanvasLabel(), canvasSize, videoSize);
} }
@@ -0,0 +1,59 @@
<UserControl x:Class="Azaion.Common.Controls.DetectionLabelPanel"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
xmlns:d="http://schemas.microsoft.com/expression/blend/2008"
mc:Ignorable="d">
<UserControl.Resources>
<!-- Friendly (Light Blue Square) -->
<DrawingImage x:Key="Friendly">
<DrawingImage.Drawing>
<DrawingGroup ClipGeometry="M0,0 V320 H320 V0 H0 Z">
<GeometryDrawing Brush="LightBlue" Geometry="M25,50 l150,0 0,100 -150,0 z">
<GeometryDrawing.Pen>
<Pen Brush="Black" Thickness="8"/>
</GeometryDrawing.Pen>
</GeometryDrawing>
</DrawingGroup>
</DrawingImage.Drawing>
</DrawingImage>
<!-- Hostile (Red Diamond) -->
<DrawingImage x:Key="Hostile">
<DrawingImage.Drawing>
<DrawingGroup ClipGeometry="M0,0 V320 H320 V0 H0 Z">
<GeometryDrawing Brush="Red" Geometry="M 100,28 L172,100 100,172 28,100 100,28 Z">
<GeometryDrawing.Pen>
<Pen Brush="Black" Thickness="8"/>
</GeometryDrawing.Pen>
</GeometryDrawing>
</DrawingGroup>
</DrawingImage.Drawing>
</DrawingImage>
<!-- Unknown (Yellow Quatrefoil) -->
<DrawingImage x:Key="Unknown">
<DrawingImage.Drawing>
<DrawingGroup ClipGeometry="M0,0 V320 H320 V0 H0 Z">
<GeometryDrawing Brush="Yellow" Geometry="M63,63 C63,20 137,20 137,63 C180,63 180,137 137,137 C137,180
63,180 63,137 C20,137 20,63 63,63 Z">
<GeometryDrawing.Pen>
<Pen Brush="Black" Thickness="8"/>
</GeometryDrawing.Pen>
</GeometryDrawing>
</DrawingGroup>
</DrawingImage.Drawing>
</DrawingImage>
</UserControl.Resources>
<Grid x:Name="DetectionGrid">
<Grid.ColumnDefinitions>
<ColumnDefinition Width="2"></ColumnDefinition>
<ColumnDefinition Width="Auto"></ColumnDefinition>
</Grid.ColumnDefinitions>
<Image Grid.Column="0" x:Name="AffiliationImage">
</Image>
<Label Grid.Column="1" x:Name="DetectionClassName" FontSize="16"></Label>
</Grid>
</UserControl>
@@ -0,0 +1,70 @@
using System.Windows.Media;
using Azaion.Common.DTO;
using Azaion.Common.Extensions;
namespace Azaion.Common.Controls
{
public partial class DetectionLabelPanel
{
private AffiliationEnum _affiliation = AffiliationEnum.None;
public AffiliationEnum Affiliation
{
get => _affiliation;
set
{
_affiliation = value;
UpdateAffiliationImage();
}
}
private DetectionClass _detectionClass = new();
public DetectionClass DetectionClass {
get => _detectionClass;
set
{
_detectionClass = value;
SetClassName();
}
}
private double _confidence;
public double Confidence
{
get => _confidence;
set
{
_confidence = value;
SetClassName();
}
}
private void SetClassName()
{
DetectionClassName.Content = _confidence >= 0.995 ? _detectionClass.UIName : $"{_detectionClass.UIName}: {_confidence * 100:F0}%";
DetectionGrid.Background = new SolidColorBrush(_detectionClass.Color.ToConfidenceColor(_confidence));
}
public DetectionLabelPanel()
{
InitializeComponent();
}
private string _detectionLabelText(string detectionClassName) =>
_confidence >= 0.98 ? detectionClassName : $"{detectionClassName}: {_confidence * 100:F0}%";
private void UpdateAffiliationImage()
{
if (_affiliation == AffiliationEnum.None)
{
AffiliationImage.Source = null;
return;
}
if (TryFindResource(_affiliation.ToString()) is DrawingImage drawingImage)
AffiliationImage.Source = drawingImage;
else
AffiliationImage.Source = null;
}
}
}
+33
View File
@@ -0,0 +1,33 @@
using MediatR;
using MessagePack;
namespace Azaion.Common.DTO;
public enum AIAvailabilityEnum
{
None = 0,
Downloading = 10,
Converting = 20,
Uploading = 30,
Enabled = 200,
Error = 500
}
[MessagePackObject]
public class AIAvailabilityStatusEvent : INotification
{
[Key("s")] public AIAvailabilityEnum Status { get; set; }
[Key("m")] public string? ErrorMessage { get; set; }
public override string ToString() => $"{StatusMessageDict.GetValueOrDefault(Status, "Помилка")} {ErrorMessage}";
private static readonly Dictionary<AIAvailabilityEnum, string> StatusMessageDict = new()
{
{ AIAvailabilityEnum.Downloading, "Йде завантаження AI для Вашої відеокарти" },
{ AIAvailabilityEnum.Converting, "Йде налаштування AI під Ваше залізо. (5-12 хвилин в залежності від моделі відеокарти, до 50 хв на старих GTX1650)" },
{ AIAvailabilityEnum.Uploading, "Йде зберігання AI" },
{ AIAvailabilityEnum.Enabled, "AI готовий для розпізнавання" },
{ AIAvailabilityEnum.Error, "Помилка під час налаштування AI" }
};
}
+9
View File
@@ -0,0 +1,9 @@
namespace Azaion.Common.DTO;
public enum AffiliationEnum
{
None = 0,
Friendly = 10,
Hostile = 20,
Unknown = 30
}
+23 -21
View File
@@ -3,31 +3,33 @@ using Azaion.Common.Database;
namespace Azaion.Common.DTO; namespace Azaion.Common.DTO;
public class AnnotationResult // public class AnnotationResult
{ //{
public Annotation Annotation { get; set; } //public Annotation Annotation { get; set; }
public List<(Color Color, double Confidence)> Colors { get; private set; }
public string ImagePath { get; set; }
public string TimeStr { get; set; }
public string ClassName { get; set; }
public AnnotationResult(Dictionary<int, DetectionClass> allDetectionClasses, Annotation annotation) //public string ImagePath { get; set; }
{ //public string TimeStr { get; set; }
Annotation = annotation; //public List<(Color Color, double Confidence)> Colors { get; private set; }
// public string ClassName { get; set; }
TimeStr = $"{annotation.Time:h\\:mm\\:ss}"; // public AnnotationResult(Dictionary<int, DetectionClass> allDetectionClasses, Annotation annotation)
ImagePath = annotation.ImagePath; // {
var detectionClasses = annotation.Detections.Select(x => x.ClassNumber).Distinct().ToList(); //Annotation = annotation;
Colors = annotation.Detections //TimeStr = $"{annotation.Time:h\\:mm\\:ss}";
.Select(d => (allDetectionClasses[d.ClassNumber].Color, d.Confidence)) //ImagePath = annotation.ImagePath;
.ToList();
ClassName = detectionClasses.Count > 1 // var detectionClasses = annotation.Detections.Select(x => x.ClassNumber).Distinct().ToList();
? string.Join(", ", detectionClasses.Select(x => allDetectionClasses[x].UIName)) // ClassName = detectionClasses.Count > 1
: allDetectionClasses[detectionClasses.FirstOrDefault()].UIName; // ? string.Join(", ", detectionClasses.Select(x => allDetectionClasses[x].UIName))
} // : allDetectionClasses[detectionClasses.FirstOrDefault()].UIName;
} //
// Colors = annotation.Detections
// .Select(d => (allDetectionClasses[d.ClassNumber].Color, d.Confidence))
// .ToList();
// }
// }
+9 -1
View File
@@ -4,7 +4,8 @@ using MessagePack;
namespace Azaion.Common.DTO; namespace Azaion.Common.DTO;
[MessagePackObject] [MessagePackObject]
public class ApiCredentials : EventArgs [Verb("credsManual", HelpText = "Manual Credentials")]
public class ApiCredentials
{ {
[Key(nameof(Email))] [Key(nameof(Email))]
[Option('e', "email", Required = true, HelpText = "User Email")] [Option('e', "email", Required = true, HelpText = "User Email")]
@@ -14,3 +15,10 @@ public class ApiCredentials : EventArgs
[Option('p', "pass", Required = true, HelpText = "User Password")] [Option('p', "pass", Required = true, HelpText = "User Password")]
public string Password { get; set; } = null!; public string Password { get; set; } = null!;
} }
[Verb("credsEncrypted", isDefault: true, HelpText = "Encrypted Credentials")]
public class ApiCredentialsEncrypted
{
[Option('c', "creds", Group = "auto", HelpText = "Encrypted Creds")]
public string Creds { get; set; } = null!;
}
@@ -12,6 +12,7 @@ public class AIRecognitionConfig
[Key("t_dc")] public double TrackingDistanceConfidence { get; set; } [Key("t_dc")] public double TrackingDistanceConfidence { get; set; }
[Key("t_pi")] public double TrackingProbabilityIncrease { get; set; } [Key("t_pi")] public double TrackingProbabilityIncrease { get; set; }
[Key("t_it")] public double TrackingIntersectionThreshold { get; set; } [Key("t_it")] public double TrackingIntersectionThreshold { get; set; }
[Key("ov_p")] public double BigImageTileOverlapPercent { get; set; }
[Key("d")] public byte[] Data { get; set; } = null!; [Key("d")] public byte[] Data { get; set; } = null!;
[Key("p")] public List<string> Paths { get; set; } = null!; [Key("p")] public List<string> Paths { get; set; } = null!;
+1 -2
View File
@@ -1,9 +1,8 @@
namespace Azaion.Common.DTO; namespace Azaion.Common.DTO;
public class DirectoriesConfig public class DirectoriesConfig
{ {
public string ApiResourcesDirectory { get; set; } = null!; public string? ApiResourcesDirectory { get; set; } = null!;
public string VideosDirectory { get; set; } = null!; public string VideosDirectory { get; set; } = null!;
public string LabelsDirectory { get; set; } = null!; public string LabelsDirectory { get; set; } = null!;
+4 -4
View File
@@ -1,19 +1,19 @@
using System.Collections.ObjectModel; using System.Collections.ObjectModel;
using System.Windows; using System.Windows;
using Azaion.Common.Database;
namespace Azaion.Common.DTO; namespace Azaion.Common.DTO;
public class FormState public class FormState
{ {
public MediaFileInfo? CurrentMedia { get; set; } public MediaFileInfo? CurrentMedia { get; set; }
public string VideoName => CurrentMedia?.FName ?? ""; public string MediaName => CurrentMedia?.FName ?? "";
public string CurrentMrl { get; set; } = null!; public Size CurrentMediaSize { get; set; }
public Size CurrentVideoSize { get; set; }
public TimeSpan CurrentVideoLength { get; set; } public TimeSpan CurrentVideoLength { get; set; }
public TimeSpan? BackgroundTime { get; set; } public TimeSpan? BackgroundTime { get; set; }
public int CurrentVolume { get; set; } = 100; public int CurrentVolume { get; set; } = 100;
public ObservableCollection<AnnotationResult> AnnotationResults { get; set; } = []; public ObservableCollection<Annotation> AnnotationResults { get; set; } = [];
public WindowEnum ActiveWindow { get; set; } public WindowEnum ActiveWindow { get; set; }
} }
+55 -21
View File
@@ -22,32 +22,56 @@ public abstract class Label
public class CanvasLabel : Label public class CanvasLabel : Label
{ {
public double X { get; set; } public double Left { get; set; }
public double Y { get; set; } public double Top { get; set; }
public double Width { get; set; } public double Width { get; set; }
public double Height { get; set; } public double Height { get; set; }
public double Confidence { get; set; } public double Confidence { get; set; }
public CanvasLabel() public double Bottom
{ {
get => Top + Height;
set => Height = value - Top;
} }
public CanvasLabel(int classNumber, double x, double y, double width, double height, double confidence = 1) : base(classNumber) public double Right
{ {
X = x; get => Left + Width;
Y = y; set => Width = value - Left;
}
public double CenterX => Left + Width / 2.0;
public double CenterY => Top + Height / 2.0;
public Size Size => new(Width, Height);
public CanvasLabel() { }
public CanvasLabel(double left, double right, double top, double bottom)
{
Left = left;
Top = top;
Width = right - left;
Height = bottom - top;
Confidence = 1;
ClassNumber = -1;
}
public CanvasLabel(int classNumber, double left, double top, double width, double height, double confidence = 1) : base(classNumber)
{
Left = left;
Top = top;
Width = width; Width = width;
Height = height; Height = height;
Confidence = confidence; Confidence = confidence;
} }
public CanvasLabel(YoloLabel label, Size canvasSize, Size? videoSize = null, double confidence = 1) public CanvasLabel(YoloLabel label, Size canvasSize, Size? mediaSize = null, double confidence = 1)
{ {
var cw = canvasSize.Width; var cw = canvasSize.Width;
var ch = canvasSize.Height; var ch = canvasSize.Height;
var canvasAr = cw / ch; var canvasAr = cw / ch;
var videoAr = videoSize.HasValue var videoAr = mediaSize.HasValue
? videoSize.Value.Width / videoSize.Value.Height ? mediaSize.Value.Width / mediaSize.Value.Height
: canvasAr; : canvasAr;
ClassNumber = label.ClassNumber; ClassNumber = label.ClassNumber;
@@ -60,8 +84,8 @@ public class CanvasLabel : Label
var realHeight = cw / videoAr; //real video height in pixels on canvas var realHeight = cw / videoAr; //real video height in pixels on canvas
var blackStripHeight = (ch - realHeight) / 2.0; //height of black strips at the top and bottom var blackStripHeight = (ch - realHeight) / 2.0; //height of black strips at the top and bottom
X = left * cw; Left = left * cw;
Y = top * realHeight + blackStripHeight; Top = top * realHeight + blackStripHeight;
Width = label.Width * cw; Width = label.Width * cw;
Height = label.Height * realHeight; Height = label.Height * realHeight;
} }
@@ -70,13 +94,20 @@ public class CanvasLabel : Label
var realWidth = ch * videoAr; //real video width in pixels on canvas var realWidth = ch * videoAr; //real video width in pixels on canvas
var blackStripWidth = (cw - realWidth) / 2.0; //height of black strips at the top and bottom var blackStripWidth = (cw - realWidth) / 2.0; //height of black strips at the top and bottom
X = left * realWidth + blackStripWidth; Left = left * realWidth + blackStripWidth;
Y = top * ch; Top = top * ch;
Width = label.Width * realWidth; Width = label.Width * realWidth;
Height = label.Height * ch; Height = label.Height * ch;
} }
Confidence = confidence; Confidence = confidence;
} }
public CanvasLabel ReframeToSmall(CanvasLabel smallTile) =>
new(ClassNumber, Left - smallTile.Left, Top - smallTile.Top, Width, Height, Confidence);
public CanvasLabel ReframeFromSmall(CanvasLabel smallTile) =>
new(ClassNumber, Left + smallTile.Left, Top + smallTile.Top, Width, Height, Confidence);
} }
[MessagePackObject] [MessagePackObject]
@@ -105,13 +136,13 @@ public class YoloLabel : Label
public RectangleF ToRectangle() => public RectangleF ToRectangle() =>
new((float)(CenterX - Width / 2.0), (float)(CenterY - Height / 2.0), (float)Width, (float)Height); new((float)(CenterX - Width / 2.0), (float)(CenterY - Height / 2.0), (float)Width, (float)Height);
public YoloLabel(CanvasLabel canvasLabel, Size canvasSize, Size? videoSize = null) public YoloLabel(CanvasLabel canvasLabel, Size canvasSize, Size? mediaSize = null)
{ {
var cw = canvasSize.Width; var cw = canvasSize.Width;
var ch = canvasSize.Height; var ch = canvasSize.Height;
var canvasAr = cw / ch; var canvasAr = cw / ch;
var videoAr = videoSize.HasValue var videoAr = mediaSize.HasValue
? videoSize.Value.Width / videoSize.Value.Height ? mediaSize.Value.Width / mediaSize.Value.Height
: canvasAr; : canvasAr;
ClassNumber = canvasLabel.ClassNumber; ClassNumber = canvasLabel.ClassNumber;
@@ -119,20 +150,20 @@ public class YoloLabel : Label
double left, top; double left, top;
if (videoAr > canvasAr) //100% width if (videoAr > canvasAr) //100% width
{ {
left = canvasLabel.X / cw; left = canvasLabel.Left / cw;
Width = canvasLabel.Width / cw; Width = canvasLabel.Width / cw;
var realHeight = cw / videoAr; //real video height in pixels on canvas var realHeight = cw / videoAr; //real video height in pixels on canvas
var blackStripHeight = (ch - realHeight) / 2.0; //height of black strips at the top and bottom var blackStripHeight = (ch - realHeight) / 2.0; //height of black strips at the top and bottom
top = (canvasLabel.Y - blackStripHeight) / realHeight; top = (canvasLabel.Top - blackStripHeight) / realHeight;
Height = canvasLabel.Height / realHeight; Height = canvasLabel.Height / realHeight;
} }
else //100% height else //100% height
{ {
top = canvasLabel.Y / ch; top = canvasLabel.Top / ch;
Height = canvasLabel.Height / ch; Height = canvasLabel.Height / ch;
var realWidth = ch * videoAr; //real video width in pixels on canvas var realWidth = ch * videoAr; //real video width in pixels on canvas
var blackStripWidth = (cw - realWidth) / 2.0; //height of black strips at the top and bottom var blackStripWidth = (cw - realWidth) / 2.0; //height of black strips at the top and bottom
left = (canvasLabel.X - blackStripWidth) / realWidth; left = (canvasLabel.Left - blackStripWidth) / realWidth;
Width = canvasLabel.Width / realWidth; Width = canvasLabel.Width / realWidth;
} }
@@ -193,13 +224,16 @@ public class Detection : YoloLabel
{ {
[JsonProperty(PropertyName = "an")][Key("an")] public string AnnotationName { get; set; } = null!; [JsonProperty(PropertyName = "an")][Key("an")] public string AnnotationName { get; set; } = null!;
[JsonProperty(PropertyName = "p")][Key("p")] public double Confidence { get; set; } [JsonProperty(PropertyName = "p")][Key("p")] public double Confidence { get; set; }
[JsonProperty(PropertyName = "dn")][Key("dn")] public string Description { get; set; }
[JsonProperty(PropertyName = "af")][Key("af")] public AffiliationEnum Affiliation { get; set; }
//For db & serialization //For db & serialization
public Detection(){} public Detection(){}
public Detection(string annotationName, YoloLabel label, double confidence = 1) public Detection(string annotationName, YoloLabel label, string description = "", double confidence = 1)
{ {
AnnotationName = annotationName; AnnotationName = annotationName;
Description = description;
ClassNumber = label.ClassNumber; ClassNumber = label.ClassNumber;
CenterX = label.CenterX; CenterX = label.CenterX;
CenterY = label.CenterY; CenterY = label.CenterY;
+60 -6
View File
@@ -1,6 +1,6 @@
using System.IO; using System.IO;
using System.Windows.Media;
using Azaion.Common.DTO; using Azaion.Common.DTO;
using Azaion.Common.DTO.Config;
using Azaion.Common.DTO.Queue; using Azaion.Common.DTO.Queue;
using MessagePack; using MessagePack;
@@ -12,12 +12,14 @@ public class Annotation
private static string _labelsDir = null!; private static string _labelsDir = null!;
private static string _imagesDir = null!; private static string _imagesDir = null!;
private static string _thumbDir = null!; private static string _thumbDir = null!;
public static Dictionary<int, DetectionClass> DetectionClassesDict = null!;
public static void InitializeDirs(DirectoriesConfig config) public static void Init(DirectoriesConfig config, Dictionary<int, DetectionClass> detectionClassesDict)
{ {
_labelsDir = config.LabelsDirectory; _labelsDir = config.LabelsDirectory;
_imagesDir = config.ImagesDirectory; _imagesDir = config.ImagesDirectory;
_thumbDir = config.ThumbnailsDirectory; _thumbDir = config.ThumbnailsDirectory;
DetectionClassesDict = detectionClassesDict;
} }
[Key("n")] public string Name { get; set; } = null!; [Key("n")] public string Name { get; set; } = null!;
@@ -40,12 +42,64 @@ public class Annotation
[Key("lon")]public double Lon { get; set; } [Key("lon")]public double Lon { get; set; }
#region Calculated #region Calculated
[IgnoreMember]public List<int> Classes => Detections.Select(x => x.ClassNumber).ToList(); [IgnoreMember] public List<int> Classes => Detections.Select(x => x.ClassNumber).ToList();
[IgnoreMember]public string ImagePath => Path.Combine(_imagesDir, $"{Name}{ImageExtension}"); [IgnoreMember] public string ImagePath => Path.Combine(_imagesDir, $"{Name}{ImageExtension}");
[IgnoreMember]public string LabelPath => Path.Combine(_labelsDir, $"{Name}.txt"); [IgnoreMember] public string LabelPath => Path.Combine(_labelsDir, $"{Name}.txt");
[IgnoreMember]public string ThumbPath => Path.Combine(_thumbDir, $"{Name}{Constants.THUMBNAIL_PREFIX}.jpg"); [IgnoreMember] public string ThumbPath => Path.Combine(_thumbDir, $"{Name}{Constants.THUMBNAIL_PREFIX}.jpg");
[IgnoreMember] public bool IsSplit => Name.Contains(Constants.SPLIT_SUFFIX);
private CanvasLabel? _splitTile;
[IgnoreMember] public CanvasLabel? SplitTile
{
get
{
if (!IsSplit)
return null;
if (_splitTile != null)
return _splitTile;
var startCoordIndex = Name.IndexOf(Constants.SPLIT_SUFFIX, StringComparison.Ordinal) + Constants.SPLIT_SUFFIX.Length;
var coordsStr = Name.Substring(startCoordIndex, 9).Split('_');
_splitTile = new CanvasLabel
{
Left = double.Parse(coordsStr[0]),
Top = double.Parse(coordsStr[1]),
Width = Constants.AI_TILE_SIZE,
Height = Constants.AI_TILE_SIZE
};
return _splitTile;
}
}
[IgnoreMember] public string TimeStr => $"{Time:h\\:mm\\:ss}";
private List<(Color Color, double Confidence)>? _colors;
[IgnoreMember] public List<(Color Color, double Confidence)> Colors => _colors ??= Detections
.Select(d => (DetectionClassesDict[d.ClassNumber].Color, d.Confidence))
.ToList();
private string _className;
[IgnoreMember] public string ClassName
{
get
{
if (string.IsNullOrEmpty(_className))
{
var detectionClasses = Detections.Select(x => x.ClassNumber).Distinct().ToList();
_className = detectionClasses.Count > 1
? string.Join(", ", detectionClasses.Select(x => DetectionClassesDict[x].UIName))
: DetectionClassesDict[detectionClasses.FirstOrDefault()].UIName;
}
return _className;
}
}
#endregion Calculated #endregion Calculated
} }
[MessagePackObject] [MessagePackObject]
+4 -1
View File
@@ -1,4 +1,5 @@
using System.Data.SQLite; using System.Data.SQLite;
using System.Diagnostics;
using System.IO; using System.IO;
using Azaion.Common.DTO; using Azaion.Common.DTO;
using Azaion.Common.DTO.Config; using Azaion.Common.DTO.Config;
@@ -48,7 +49,7 @@ public class DbFactory : IDbFactory
.UseDataProvider(SQLiteTools.GetDataProvider()) .UseDataProvider(SQLiteTools.GetDataProvider())
.UseConnection(_memoryConnection) .UseConnection(_memoryConnection)
.UseMappingSchema(AnnotationsDbSchemaHolder.MappingSchema) .UseMappingSchema(AnnotationsDbSchemaHolder.MappingSchema)
;//.UseTracing(TraceLevel.Info, t => logger.LogInformation(t.SqlText)); .UseTracing(TraceLevel.Info, t => logger.LogInformation(t.SqlText));
_fileConnection = new SQLiteConnection(FileConnStr); _fileConnection = new SQLiteConnection(FileConnStr);
@@ -62,6 +63,8 @@ public class DbFactory : IDbFactory
RecreateTables(); RecreateTables();
_fileConnection.Open(); _fileConnection.Open();
using var db = new AnnotationsDb(_fileDataOptions);
SchemaMigrator.EnsureSchemaUpdated(db, typeof(Annotation), typeof(Detection));
_fileConnection.BackupDatabase(_memoryConnection, "main", "main", -1, null, -1); _fileConnection.BackupDatabase(_memoryConnection, "main", "main", -1, null, -1);
} }
+97
View File
@@ -0,0 +1,97 @@
using System.Data;
using LinqToDB.Data;
using LinqToDB.Mapping;
namespace Azaion.Common.Database;
public static class SchemaMigrator
{
public static void EnsureSchemaUpdated(DataConnection dbConnection, params Type[] entityTypes)
{
var connection = dbConnection.Connection;
var mappingSchema = dbConnection.MappingSchema;
if (connection.State == ConnectionState.Closed)
{
connection.Open();
}
foreach (var type in entityTypes)
{
var entityDescriptor = mappingSchema.GetEntityDescriptor(type);
var tableName = entityDescriptor.Name.Name;
var existingColumns = GetTableColumns(connection, tableName);
foreach (var column in entityDescriptor.Columns)
{
if (existingColumns.Contains(column.ColumnName, StringComparer.OrdinalIgnoreCase))
continue;
var columnDefinition = GetColumnDefinition(column);
dbConnection.Execute($"ALTER TABLE {tableName} ADD COLUMN {columnDefinition}");
}
}
}
private static HashSet<string> GetTableColumns(IDbConnection connection, string tableName)
{
var columns = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
using var cmd = connection.CreateCommand();
cmd.CommandText = $"PRAGMA table_info({tableName})";
using var reader = cmd.ExecuteReader();
while (reader.Read())
columns.Add(reader.GetString(1)); // "name" is in the second column
return columns;
}
private static string GetColumnDefinition(ColumnDescriptor column)
{
var type = column.MemberType;
var underlyingType = Nullable.GetUnderlyingType(type) ?? type;
var sqliteType = GetSqliteType(underlyingType);
var defaultClause = GetSqlDefaultValue(type, underlyingType);
return $"\"{column.ColumnName}\" {sqliteType} {defaultClause}";
}
private static string GetSqliteType(Type type) =>
type switch
{
_ when type == typeof(int)
|| type == typeof(long)
|| type == typeof(bool)
|| type.IsEnum
=> "INTEGER",
_ when type == typeof(double)
|| type == typeof(float)
|| type == typeof(decimal)
=> "REAL",
_ when type == typeof(byte[])
=> "BLOB",
_ => "TEXT"
};
private static string GetSqlDefaultValue(Type originalType, Type underlyingType)
{
var isNullable = originalType.IsClass || Nullable.GetUnderlyingType(originalType) != null;
if (isNullable)
return "NULL";
var defaultValue = Activator.CreateInstance(underlyingType);
if (underlyingType == typeof(bool))
return $"NOT NULL DEFAULT {(Convert.ToBoolean(defaultValue) ? 1 : 0)}";
if (underlyingType.IsEnum)
return $"NOT NULL DEFAULT {(int)defaultValue}";
if (underlyingType.IsValueType && defaultValue is IFormattable f)
return $"NOT NULL DEFAULT {f.ToString(null, System.Globalization.CultureInfo.InvariantCulture)}";
return $"NOT NULL DEFAULT '{defaultValue}'";
}
}
@@ -26,4 +26,14 @@ public static class BitmapExtensions
public static Color CreateTransparent(this Color color, byte transparency) => public static Color CreateTransparent(this Color color, byte transparency) =>
Color.FromArgb(transparency, color.R, color.G, color.B); Color.FromArgb(transparency, color.R, color.G, color.B);
public static async Task SaveImage(this BitmapSource bitmap, string path, CancellationToken ct = default)
{
await using var stream = new FileStream(path, FileMode.Create);
var encoder = new JpegBitmapEncoder();
encoder.Frames.Add(BitmapFrame.Create(bitmap));
encoder.Save(stream);
await stream.FlushAsync(ct);
}
} }
@@ -0,0 +1,12 @@
namespace Azaion.Common.Extensions;
public static class EnumExtensions
{
public static T GetValueOrDefault<T>(this string value, T defaultValue) where T : struct
{
if (string.IsNullOrEmpty(value))
return defaultValue;
return Enum.TryParse(value, true, out T result) ? result : defaultValue;
}
}
@@ -0,0 +1,10 @@
using System.Windows;
namespace Azaion.Common.Extensions;
public static class SizeExtensions
{
public static bool FitSizeForAI(this Size size) =>
// Allow to be up to FullHD to save as 1280*1280
size.Width <= Constants.AI_TILE_SIZE * 1.5 && size.Height <= Constants.AI_TILE_SIZE * 1.5;
}
+59
View File
@@ -0,0 +1,59 @@
using System.Security.Cryptography;
using System.Text;
using Newtonsoft.Json;
namespace Azaion.Common;
public class Security
{
private static string GenDefaultKey()
{
var date = DateTime.UtcNow;
return $"sAzaion_default_dfvkjhg_{date:yyyy}-{date:MM}_{date:dd}_{date:HH}_key";
}
public static string Encrypt<T>(T model, string? key = null) where T : class
{
var json = JsonConvert.SerializeObject(model);
var inputBytes = Encoding.UTF8.GetBytes(json);
var keyBytes = SHA256.HashData(Encoding.UTF8.GetBytes(key ?? GenDefaultKey()));
var iv = RandomNumberGenerator.GetBytes(16);
using var aes = Aes.Create();
aes.Key = keyBytes;
aes.IV = iv;
aes.Mode = CipherMode.CFB;
aes.Padding = PaddingMode.ISO10126;
using var encryptor = aes.CreateEncryptor();
var ciphertext = encryptor.TransformFinalBlock(inputBytes, 0, inputBytes.Length);
var result = new byte[iv.Length + ciphertext.Length];
iv.CopyTo(result, 0);
ciphertext.CopyTo(result, iv.Length);
return Convert.ToBase64String(result);
}
public static T Decrypt<T>(string encryptedData, string? key = null) where T : class
{
var ciphertextWithIv = Convert.FromBase64String(encryptedData);
var keyBytes = SHA256.HashData(Encoding.UTF8.GetBytes(key ?? GenDefaultKey()));
var iv = ciphertextWithIv[..16];
var ciphertext = ciphertextWithIv[16..];
using var aes = Aes.Create();
aes.Key = keyBytes;
aes.IV = iv;
aes.Mode = CipherMode.CFB;
aes.Padding = PaddingMode.ISO10126;
using var decryptor = aes.CreateDecryptor();
var plaintext = decryptor.TransformFinalBlock(ciphertext, 0, ciphertext.Length);
var json = Encoding.UTF8.GetString(plaintext);
return JsonConvert.DeserializeObject<T>(json)!;
}
}
+11 -10
View File
@@ -21,6 +21,7 @@ using RabbitMQ.Stream.Client.Reliable;
namespace Azaion.Common.Services; namespace Azaion.Common.Services;
// SHOULD BE ONLY ONE INSTANCE OF AnnotationService. Do not add ANY NotificationHandler to it! // SHOULD BE ONLY ONE INSTANCE OF AnnotationService. Do not add ANY NotificationHandler to it!
// Queue consumer should be created only once.
public class AnnotationService : IAnnotationService public class AnnotationService : IAnnotationService
{ {
private readonly IDbFactory _dbFactory; private readonly IDbFactory _dbFactory;
@@ -94,6 +95,7 @@ public class AnnotationService : IAnnotationService
await SaveAnnotationInner( await SaveAnnotationInner(
msg.CreatedDate, msg.CreatedDate,
msg.OriginalMediaName, msg.OriginalMediaName,
msg.Name,
msg.Time, msg.Time,
JsonConvert.DeserializeObject<List<Detection>>(msg.Detections) ?? [], JsonConvert.DeserializeObject<List<Detection>>(msg.Detections) ?? [],
msg.Source, msg.Source,
@@ -136,16 +138,16 @@ public class AnnotationService : IAnnotationService
public async Task<Annotation> SaveAnnotation(AnnotationImage a, CancellationToken ct = default) public async Task<Annotation> SaveAnnotation(AnnotationImage a, CancellationToken ct = default)
{ {
a.Time = TimeSpan.FromMilliseconds(a.Milliseconds); a.Time = TimeSpan.FromMilliseconds(a.Milliseconds);
return await SaveAnnotationInner(DateTime.UtcNow, a.OriginalMediaName, a.Time, a.Detections.ToList(), return await SaveAnnotationInner(DateTime.UtcNow, a.OriginalMediaName, a.Name, a.Time, a.Detections.ToList(),
SourceEnum.AI, new MemoryStream(a.Image), _api.CurrentUser.Role, _api.CurrentUser.Email, token: ct); SourceEnum.AI, new MemoryStream(a.Image), _api.CurrentUser.Role, _api.CurrentUser.Email, token: ct);
} }
//Manual //Manual
public async Task<Annotation> SaveAnnotation(string originalMediaName, TimeSpan time, List<Detection> detections, Stream? stream = null, CancellationToken token = default) => public async Task<Annotation> SaveAnnotation(string originalMediaName, string annotationName, TimeSpan time, List<Detection> detections, Stream? stream = null, CancellationToken token = default) =>
await SaveAnnotationInner(DateTime.UtcNow, originalMediaName, time, detections, SourceEnum.Manual, stream, await SaveAnnotationInner(DateTime.UtcNow, originalMediaName, annotationName, time, detections, SourceEnum.Manual, stream,
_api.CurrentUser.Role, _api.CurrentUser.Email, token: token); _api.CurrentUser.Role, _api.CurrentUser.Email, token: token);
private async Task<Annotation> SaveAnnotationInner(DateTime createdDate, string originalMediaName, TimeSpan time, private async Task<Annotation> SaveAnnotationInner(DateTime createdDate, string originalMediaName, string annotationName, TimeSpan time,
List<Detection> detections, SourceEnum source, Stream? stream, List<Detection> detections, SourceEnum source, Stream? stream,
RoleEnum userRole, RoleEnum userRole,
string createdEmail, string createdEmail,
@@ -153,21 +155,20 @@ public class AnnotationService : IAnnotationService
CancellationToken token = default) CancellationToken token = default)
{ {
var status = AnnotationStatus.Created; var status = AnnotationStatus.Created;
var fName = originalMediaName.ToTimeName(time);
var annotation = await _dbFactory.RunWrite(async db => var annotation = await _dbFactory.RunWrite(async db =>
{ {
var ann = await db.Annotations var ann = await db.Annotations
.LoadWith(x => x.Detections) .LoadWith(x => x.Detections)
.FirstOrDefaultAsync(x => x.Name == fName, token: token); .FirstOrDefaultAsync(x => x.Name == annotationName, token: token);
await db.Detections.DeleteAsync(x => x.AnnotationName == fName, token: token); await db.Detections.DeleteAsync(x => x.AnnotationName == annotationName, token: token);
if (ann != null) //Annotation is already exists if (ann != null) //Annotation is already exists
{ {
status = AnnotationStatus.Edited; status = AnnotationStatus.Edited;
var annotationUpdatable = db.Annotations var annotationUpdatable = db.Annotations
.Where(x => x.Name == fName) .Where(x => x.Name == annotationName)
.Set(x => x.Source, source); .Set(x => x.Source, source);
if (userRole.IsValidator() && source == SourceEnum.Manual) if (userRole.IsValidator() && source == SourceEnum.Manual)
@@ -188,7 +189,7 @@ public class AnnotationService : IAnnotationService
ann = new Annotation ann = new Annotation
{ {
CreatedDate = createdDate, CreatedDate = createdDate,
Name = fName, Name = annotationName,
OriginalMediaName = originalMediaName, OriginalMediaName = originalMediaName,
Time = time, Time = time,
ImageExtension = Constants.JPG_EXT, ImageExtension = Constants.JPG_EXT,
@@ -264,6 +265,6 @@ public class AnnotationService : IAnnotationService
public interface IAnnotationService public interface IAnnotationService
{ {
Task<Annotation> SaveAnnotation(AnnotationImage a, CancellationToken ct = default); Task<Annotation> SaveAnnotation(AnnotationImage a, CancellationToken ct = default);
Task<Annotation> SaveAnnotation(string originalMediaName, TimeSpan time, List<Detection> detections, Stream? stream = null, CancellationToken token = default); Task<Annotation> SaveAnnotation(string originalMediaName, string annotationName, TimeSpan time, List<Detection> detections, Stream? stream = null, CancellationToken token = default);
Task ValidateAnnotations(List<string> annotationNames, bool fromQueue = false, CancellationToken token = default); Task ValidateAnnotations(List<string> annotationNames, bool fromQueue = false, CancellationToken token = default);
} }
+7 -7
View File
@@ -237,11 +237,11 @@ public class GalleryService(
.ToList(); .ToList();
if (annotation.Detections.Any()) if (annotation.Detections.Any())
{ {
var labelsMinX = labels.Min(x => x.X); var labelsMinX = labels.Min(x => x.Left);
var labelsMaxX = labels.Max(x => x.X + x.Width); var labelsMaxX = labels.Max(x => x.Left + x.Width);
var labelsMinY = labels.Min(x => x.Y); var labelsMinY = labels.Min(x => x.Top);
var labelsMaxY = labels.Max(x => x.Y + x.Height); var labelsMaxY = labels.Max(x => x.Top + x.Height);
var labelsHeight = labelsMaxY - labelsMinY + 2 * border; var labelsHeight = labelsMaxY - labelsMinY + 2 * border;
var labelsWidth = labelsMaxX - labelsMinX + 2 * border; var labelsWidth = labelsMaxX - labelsMinX + 2 * border;
@@ -270,7 +270,7 @@ public class GalleryService(
var color = _annotationConfig.DetectionClassesDict[label.ClassNumber].Color; var color = _annotationConfig.DetectionClassesDict[label.ClassNumber].Color;
var brush = new SolidBrush(Color.FromArgb(color.A, color.R, color.G, color.B)); var brush = new SolidBrush(Color.FromArgb(color.A, color.R, color.G, color.B));
g.DrawRectangle(new Pen(brush, width: 3), (float)((label.X - frameX) / scale), (float)((label.Y - frameY) / scale), (float)(label.Width / scale), (float)(label.Height / scale)); g.DrawRectangle(new Pen(brush, width: 3), (float)((label.Left - frameX) / scale), (float)((label.Top - frameY) / scale), (float)(label.Width / scale), (float)(label.Height / scale));
} }
bitmap.Save(annotation.ThumbPath, ImageFormat.Jpeg); bitmap.Save(annotation.ThumbPath, ImageFormat.Jpeg);
@@ -291,10 +291,10 @@ public class GalleryService(
var color = detClass.Color; var color = detClass.Color;
var brush = new SolidBrush(Color.FromArgb(color.A, color.R, color.G, color.B)); var brush = new SolidBrush(Color.FromArgb(color.A, color.R, color.G, color.B));
var det = new CanvasLabel(detection, new Size(originalImage.Width, originalImage.Height)); var det = new CanvasLabel(detection, new Size(originalImage.Width, originalImage.Height));
g.DrawRectangle(new Pen(brush, width: 3), (float)det.X, (float)det.Y, (float)det.Width, (float)det.Height); g.DrawRectangle(new Pen(brush, width: 3), (float)det.Left, (float)det.Top, (float)det.Width, (float)det.Height);
var label = detection.Confidence >= 0.995 ? detClass.UIName : $"{detClass.UIName}: {detection.Confidence * 100:F0}%"; var label = detection.Confidence >= 0.995 ? detClass.UIName : $"{detClass.UIName}: {detection.Confidence * 100:F0}%";
g.DrawTextBox(label, new PointF((float)(det.X + det.Width / 2.0), (float)(det.Y - 24)), brush, Brushes.Black); g.DrawTextBox(label, new PointF((float)(det.Left + det.Width / 2.0), (float)(det.Top - 24)), brush, Brushes.Black);
} }
var imagePath = Path.Combine(_dirConfig.ResultsDirectory, $"{annotation.Name}{Constants.RESULT_PREFIX}.jpg"); var imagePath = Path.Combine(_dirConfig.ResultsDirectory, $"{annotation.Name}{Constants.RESULT_PREFIX}.jpg");
@@ -24,7 +24,7 @@ public class GpsMatcherService(IGpsMatcherClient gpsMatcherClient,
private readonly DirectoriesConfig _dirConfig = dirConfig.Value; private readonly DirectoriesConfig _dirConfig = dirConfig.Value;
private const int ZOOM_LEVEL = 18; private const int ZOOM_LEVEL = 18;
private const int POINTS_COUNT = 10; private const int POINTS_COUNT = 10;
private const int DISTANCE_BETWEEN_POINTS_M = 120; private const int DISTANCE_BETWEEN_POINTS_M = 140;
private const double SATELLITE_RADIUS_M = DISTANCE_BETWEEN_POINTS_M * (POINTS_COUNT + 1); private const double SATELLITE_RADIUS_M = DISTANCE_BETWEEN_POINTS_M * (POINTS_COUNT + 1);
private const int MAX_AVG_POINTS = 2; private const int MAX_AVG_POINTS = 2;
@@ -88,7 +88,7 @@ public class GpsMatcherService(IGpsMatcherClient gpsMatcherClient,
_currentIndex = _currentRouteImages[result.Image]; _currentIndex = _currentRouteImages[result.Image];
_currentRouteImages.Remove(result.Image); _currentRouteImages.Remove(result.Image);
if (result.KeyPoints > gpsDeniedConfig.Value.MinKeyPoints) if (result.KeyPoints >= gpsDeniedConfig.Value.MinKeyPoints)
{ {
var direction = _lastGeoPoint.DirectionTo(result.GeoPoint); var direction = _lastGeoPoint.DirectionTo(result.GeoPoint);
_directions.Enqueue(direction); _directions.Enqueue(direction);
@@ -1,18 +1,17 @@
using System.Diagnostics; using System.Diagnostics;
using System.Text; using System.Text;
using Azaion.Common.DTO; using Azaion.Common.DTO;
using MediatR;
using MessagePack; using MessagePack;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options; using Microsoft.Extensions.Options;
using NetMQ; using NetMQ;
using NetMQ.Sockets; using NetMQ.Sockets;
namespace Azaion.Common.Services; namespace Azaion.Common.Services.Inference;
public interface IInferenceClient : IDisposable public interface IInferenceClient : IDisposable
{ {
event EventHandler<RemoteCommand>? InferenceDataReceived;
event EventHandler<RemoteCommand>? AIAvailabilityReceived;
void Send(RemoteCommand create); void Send(RemoteCommand create);
void Stop(); void Stop();
} }
@@ -20,21 +19,22 @@ public interface IInferenceClient : IDisposable
public class InferenceClient : IInferenceClient public class InferenceClient : IInferenceClient
{ {
private readonly ILogger<InferenceClient> _logger; private readonly ILogger<InferenceClient> _logger;
public event EventHandler<RemoteCommand>? BytesReceived;
public event EventHandler<RemoteCommand>? InferenceDataReceived;
public event EventHandler<RemoteCommand>? AIAvailabilityReceived;
private readonly DealerSocket _dealer = new(); private readonly DealerSocket _dealer = new();
private readonly NetMQPoller _poller = new(); private readonly NetMQPoller _poller = new();
private readonly Guid _clientId = Guid.NewGuid(); private readonly Guid _clientId = Guid.NewGuid();
private readonly InferenceClientConfig _inferenceClientConfig; private readonly InferenceClientConfig _inferenceClientConfig;
private readonly LoaderClientConfig _loaderClientConfig; private readonly LoaderClientConfig _loaderClientConfig;
private readonly IMediator _mediator;
public InferenceClient(ILogger<InferenceClient> logger, IOptions<InferenceClientConfig> inferenceConfig, IOptions<LoaderClientConfig> loaderConfig) public InferenceClient(ILogger<InferenceClient> logger, IOptions<InferenceClientConfig> inferenceConfig,
IMediator mediator,
IOptions<LoaderClientConfig> loaderConfig)
{ {
_logger = logger; _logger = logger;
_inferenceClientConfig = inferenceConfig.Value; _inferenceClientConfig = inferenceConfig.Value;
_loaderClientConfig = loaderConfig.Value; _loaderClientConfig = loaderConfig.Value;
_mediator = mediator;
Start(); Start();
} }
@@ -59,32 +59,31 @@ public class InferenceClient : IInferenceClient
_dealer.Options.Identity = Encoding.UTF8.GetBytes(_clientId.ToString("N")); _dealer.Options.Identity = Encoding.UTF8.GetBytes(_clientId.ToString("N"));
_dealer.Connect($"tcp://{_inferenceClientConfig.ZeroMqHost}:{_inferenceClientConfig.ZeroMqPort}"); _dealer.Connect($"tcp://{_inferenceClientConfig.ZeroMqHost}:{_inferenceClientConfig.ZeroMqPort}");
_dealer.ReceiveReady += (_, e) => ProcessClientCommand(e.Socket); _dealer.ReceiveReady += async (_, e) => await ProcessClientCommand(e.Socket);
_poller.Add(_dealer); _poller.Add(_dealer);
_ = Task.Run(() => _poller.RunAsync()); _ = Task.Run(() => _poller.RunAsync());
} }
private void ProcessClientCommand(NetMQSocket socket, CancellationToken ct = default) private async Task ProcessClientCommand(NetMQSocket socket, CancellationToken ct = default)
{ {
while (socket.TryReceiveFrameBytes(TimeSpan.Zero, out var bytes)) while (socket.TryReceiveFrameBytes(TimeSpan.Zero, out var bytes))
{ {
if (bytes?.Length == 0) if (bytes.Length == 0)
continue; continue;
var remoteCommand = MessagePackSerializer.Deserialize<RemoteCommand>(bytes, cancellationToken: ct); var remoteCommand = MessagePackSerializer.Deserialize<RemoteCommand>(bytes, cancellationToken: ct);
switch (remoteCommand.CommandType) switch (remoteCommand.CommandType)
{ {
case CommandType.DataBytes:
BytesReceived?.Invoke(this, remoteCommand);
break;
case CommandType.InferenceData: case CommandType.InferenceData:
InferenceDataReceived?.Invoke(this, remoteCommand); await _mediator.Publish(new InferenceDataEvent(remoteCommand), ct);
break; break;
case CommandType.AIAvailabilityResult: case CommandType.AIAvailabilityResult:
AIAvailabilityReceived?.Invoke(this, remoteCommand); var aiAvailabilityStatus = MessagePackSerializer.Deserialize<AIAvailabilityStatusEvent>(remoteCommand.Data, cancellationToken: ct);
await _mediator.Publish(aiAvailabilityStatus, ct);
break; break;
default:
throw new ArgumentOutOfRangeException();
} }
} }
} }
@@ -0,0 +1,56 @@
using Azaion.Common.DTO;
using Azaion.Common.DTO.Config;
using Azaion.Common.Extensions;
using Microsoft.Extensions.Options;
namespace Azaion.Common.Services.Inference;
public interface IInferenceService
{
Task RunInference(List<string> mediaPaths, CancellationToken ct = default);
CancellationTokenSource InferenceCancelTokenSource { get; set; }
void StopInference();
}
// SHOULD BE ONLY ONE INSTANCE OF InferenceService. Do not add ANY NotificationHandler to it!
// _inferenceCancelTokenSource should be created only once.
public class InferenceService : IInferenceService
{
private readonly IInferenceClient _client;
private readonly IAzaionApi _azaionApi;
private readonly IOptions<AIRecognitionConfig> _aiConfigOptions;
public CancellationTokenSource InferenceCancelTokenSource { get; set; } = new();
public CancellationTokenSource CheckAIAvailabilityTokenSource { get; set; } = new();
public InferenceService(IInferenceClient client, IAzaionApi azaionApi, IOptions<AIRecognitionConfig> aiConfigOptions)
{
_client = client;
_azaionApi = azaionApi;
_aiConfigOptions = aiConfigOptions;
}
public async Task CheckAIAvailabilityStatus()
{
CheckAIAvailabilityTokenSource = new CancellationTokenSource();
while (!CheckAIAvailabilityTokenSource.IsCancellationRequested)
{
_client.Send(RemoteCommand.Create(CommandType.AIAvailabilityCheck));
await Task.Delay(10000, CheckAIAvailabilityTokenSource.Token);
}
}
public async Task RunInference(List<string> mediaPaths, CancellationToken ct = default)
{
InferenceCancelTokenSource = new CancellationTokenSource();
_client.Send(RemoteCommand.Create(CommandType.Login, _azaionApi.Credentials));
var aiConfig = _aiConfigOptions.Value;
aiConfig.Paths = mediaPaths;
_client.Send(RemoteCommand.Create(CommandType.Inference, aiConfig));
using var combinedTokenSource = CancellationTokenSource.CreateLinkedTokenSource(ct, InferenceCancelTokenSource.Token);
await combinedTokenSource.Token.AsTask();
}
public void StopInference() => _client.Stop();
}
@@ -0,0 +1,43 @@
using Azaion.Common.Database;
using Azaion.Common.DTO;
using Azaion.Common.Events;
using MediatR;
using MessagePack;
using Microsoft.Extensions.Logging;
namespace Azaion.Common.Services.Inference;
public class InferenceServiceEventHandler(IInferenceService inferenceService,
IAnnotationService annotationService,
IMediator mediator,
ILogger<InferenceServiceEventHandler> logger) :
INotificationHandler<InferenceDataEvent>,
INotificationHandler<AIAvailabilityStatusEvent>
{
public async Task Handle(InferenceDataEvent e, CancellationToken ct)
{
try
{
if (e.Command.Message == "DONE")
{
await inferenceService.InferenceCancelTokenSource.CancelAsync();
return;
}
var annImage = MessagePackSerializer.Deserialize<AnnotationImage>(e.Command.Data, cancellationToken: ct);
var annotation = await annotationService.SaveAnnotation(annImage, ct);
await mediator.Publish(new AnnotationAddedEvent(annotation), ct);
}
catch (Exception ex)
{
logger.LogError(ex, ex.Message);
}
}
public async Task Handle(AIAvailabilityStatusEvent e, CancellationToken ct)
{
e.Status = AIAvailabilityEnum.Enabled;
}
}
@@ -0,0 +1,9 @@
using Azaion.Common.DTO;
using MediatR;
namespace Azaion.Common.Services.Inference;
public class InferenceDataEvent(RemoteCommand command) : INotification
{
public RemoteCommand Command { get; set; } = command;
}
@@ -1,82 +0,0 @@
using Azaion.Common.Database;
using Azaion.Common.DTO;
using Azaion.Common.DTO.Config;
using Azaion.Common.Events;
using Azaion.Common.Extensions;
using MediatR;
using MessagePack;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace Azaion.Common.Services;
public interface IInferenceService
{
Task RunInference(List<string> mediaPaths, CancellationToken ct = default);
void StopInference();
}
public class InferenceService : IInferenceService
{
private readonly IInferenceClient _client;
private readonly IAzaionApi _azaionApi;
private readonly IOptions<AIRecognitionConfig> _aiConfigOptions;
private readonly IAnnotationService _annotationService;
private readonly IMediator _mediator;
private CancellationTokenSource _inferenceCancelTokenSource = new();
public InferenceService(
ILogger<InferenceService> logger,
IInferenceClient client,
IAzaionApi azaionApi,
IOptions<AIRecognitionConfig> aiConfigOptions,
IAnnotationService annotationService,
IMediator mediator)
{
_client = client;
_azaionApi = azaionApi;
_aiConfigOptions = aiConfigOptions;
_annotationService = annotationService;
_mediator = mediator;
client.InferenceDataReceived += async (sender, command) =>
{
try
{
if (command.Message == "DONE")
{
_inferenceCancelTokenSource?.Cancel();
return;
}
var annImage = MessagePackSerializer.Deserialize<AnnotationImage>(command.Data);
await ProcessDetection(annImage);
}
catch (Exception e)
{
logger.LogError(e, e.Message);
}
};
}
private async Task ProcessDetection(AnnotationImage annotationImage, CancellationToken ct = default)
{
var annotation = await _annotationService.SaveAnnotation(annotationImage, ct);
await _mediator.Publish(new AnnotationAddedEvent(annotation), ct);
}
public async Task RunInference(List<string> mediaPaths, CancellationToken ct = default)
{
_inferenceCancelTokenSource = new CancellationTokenSource();
_client.Send(RemoteCommand.Create(CommandType.Login, _azaionApi.Credentials));
var aiConfig = _aiConfigOptions.Value;
aiConfig.Paths = mediaPaths;
_client.Send(RemoteCommand.Create(CommandType.Inference, aiConfig));
using var combinedTokenSource = CancellationTokenSource.CreateLinkedTokenSource(ct, _inferenceCancelTokenSource.Token);
await combinedTokenSource.Token.AsTask();
}
public void StopInference() => _client.Stop();
}
+77
View File
@@ -0,0 +1,77 @@
using System.Windows;
using System.Windows.Media.Imaging;
using Azaion.Common.DTO;
namespace Azaion.Common.Services;
public class TileResult
{
public CanvasLabel Tile { get; set; }
public List<CanvasLabel> Detections { get; set; }
public TileResult(CanvasLabel tile, List<CanvasLabel> detections)
{
Tile = tile;
Detections = detections;
}
}
public static class TileProcessor
{
public const int BORDER = 10;
public static List<TileResult> Split(Size originalSize, List<CanvasLabel> detections, CancellationToken cancellationToken)
{
var results = new List<TileResult>();
var processingDetectionList = new List<CanvasLabel>(detections);
while (processingDetectionList.Count > 0 && !cancellationToken.IsCancellationRequested)
{
var topMostDetection = processingDetectionList
.OrderBy(d => d.Top)
.First();
var result = GetDetectionsInTile(originalSize, topMostDetection, processingDetectionList);
processingDetectionList.RemoveAll(x => result.Detections.Contains(x));
results.Add(result);
}
return results;
}
private static TileResult GetDetectionsInTile(Size originalSize, CanvasLabel startDet, List<CanvasLabel> allDetections)
{
var tile = new CanvasLabel(startDet.Left, startDet.Right, startDet.Top, startDet.Bottom);
var maxSize = new List<double> { startDet.Width + BORDER, startDet.Height + BORDER, Constants.AI_TILE_SIZE }.Max();
var selectedDetections = new List<CanvasLabel>{startDet};
foreach (var det in allDetections)
{
if (det == startDet)
continue;
var commonTile = new CanvasLabel(
left: Math.Min(tile.Left, det.Left),
right: Math.Max(tile.Right, det.Right),
top: Math.Min(tile.Top, det.Top),
bottom: Math.Max(tile.Bottom, det.Bottom)
);
if (commonTile.Width + BORDER > maxSize || commonTile.Height + BORDER > maxSize)
continue;
tile = commonTile;
selectedDetections.Add(det);
}
// boundary-aware centering
var centerX = selectedDetections.Average(x => x.CenterX);
var centerY = selectedDetections.Average(d => d.CenterY);
tile.Width = maxSize;
tile.Height = maxSize;
tile.Left = Math.Max(0, Math.Min(originalSize.Width - maxSize, centerX - tile.Width / 2.0));
tile.Top = Math.Max(0, Math.Min(originalSize.Height - maxSize, centerY - tile.Height / 2.0));
return new TileResult(tile, selectedDetections);
}
}
+23 -1
View File
@@ -80,7 +80,7 @@
<Grid.RowDefinitions> <Grid.RowDefinitions>
<RowDefinition Height="*"></RowDefinition> <RowDefinition Height="*"></RowDefinition>
<RowDefinition Height="35"></RowDefinition> <RowDefinition Height="35"></RowDefinition>
<RowDefinition Height="35"></RowDefinition> <RowDefinition Height="30"></RowDefinition>
</Grid.RowDefinitions> </Grid.RowDefinitions>
<controls:DetectionClasses <controls:DetectionClasses
x:Name="LvClasses" x:Name="LvClasses"
@@ -93,6 +93,27 @@
Click="ShowWithObjectsOnly_OnClick"> Click="ShowWithObjectsOnly_OnClick">
Показувати лише анотації з об'єктами Показувати лише анотації з об'єктами
</CheckBox> </CheckBox>
<Grid Name="LeftPaneSearch"
ShowGridLines="False"
Background="Black"
HorizontalAlignment="Stretch"
Grid.Row="2">
<Grid.ColumnDefinitions>
<ColumnDefinition Width="60" />
<ColumnDefinition Width="*" />
</Grid.ColumnDefinitions>
<Label
Grid.Column="0"
Grid.Row="0"
HorizontalAlignment="Stretch"
Margin="1"
Foreground="LightGray"
Content="Фільтр: "/>
<TextBox Name="TbSearch" TextChanged="TbSearch_OnTextChanged"
Grid.Column="1"
Foreground="Gray"/>
</Grid>
</Grid> </Grid>
<TabControl <TabControl
Name="Switcher" Name="Switcher"
@@ -117,6 +138,7 @@
Header="Редактор" Header="Редактор"
Visibility="Collapsed"> Visibility="Collapsed">
<controls:CanvasEditor x:Name="ExplorerEditor" <controls:CanvasEditor x:Name="ExplorerEditor"
Background="#01000000"
VerticalAlignment="Stretch" VerticalAlignment="Stretch"
HorizontalAlignment="Stretch" > HorizontalAlignment="Stretch" >
</controls:CanvasEditor> </controls:CanvasEditor>
+11 -6
View File
@@ -40,6 +40,8 @@ public partial class DatasetExplorer
public AnnotationThumbnail? CurrentAnnotation { get; set; } public AnnotationThumbnail? CurrentAnnotation { get; set; }
private static readonly Guid SearchActionId = Guid.NewGuid();
public DatasetExplorer( public DatasetExplorer(
IOptions<AppConfig> appConfig, IOptions<AppConfig> appConfig,
ILogger<DatasetExplorer> logger, ILogger<DatasetExplorer> logger,
@@ -193,15 +195,11 @@ public partial class DatasetExplorer
ThumbnailsView.SelectedIndex = index; ThumbnailsView.SelectedIndex = index;
var ann = CurrentAnnotation.Annotation; var ann = CurrentAnnotation.Annotation;
ExplorerEditor.Background = new ImageBrush ExplorerEditor.SetBackground(await ann.ImagePath.OpenImage());
{
ImageSource = await ann.ImagePath.OpenImage()
};
SwitchTab(toEditor: true); SwitchTab(toEditor: true);
var time = ann.Time;
ExplorerEditor.RemoveAllAnns(); ExplorerEditor.RemoveAllAnns();
ExplorerEditor.CreateDetections(time, ann.Detections, _appConfig.AnnotationConfig.DetectionClasses, ExplorerEditor.RenderSize); ExplorerEditor.CreateDetections(ann, _appConfig.AnnotationConfig.DetectionClasses, ExplorerEditor.RenderSize);
} }
catch (Exception e) catch (Exception e)
{ {
@@ -261,6 +259,7 @@ public partial class DatasetExplorer
SelectedAnnotationDict.Clear(); SelectedAnnotationDict.Clear();
var annThumbnails = _annotationsDict[ExplorerEditor.CurrentAnnClass.YoloId] var annThumbnails = _annotationsDict[ExplorerEditor.CurrentAnnClass.YoloId]
.WhereIf(withDetectionsOnly, x => x.Value.Detections.Any()) .WhereIf(withDetectionsOnly, x => x.Value.Detections.Any())
.WhereIf(TbSearch.Text.Length > 2, x => x.Key.ToLower().Contains(TbSearch.Text))
.Select(x => new AnnotationThumbnail(x.Value, _azaionApi.CurrentUser.Role.IsValidator())) .Select(x => new AnnotationThumbnail(x.Value, _azaionApi.CurrentUser.Role.IsValidator()))
.OrderBy(x => !x.IsSeed) .OrderBy(x => !x.IsSeed)
.ThenByDescending(x =>x.Annotation.CreatedDate); .ThenByDescending(x =>x.Annotation.CreatedDate);
@@ -295,4 +294,10 @@ public partial class DatasetExplorer
_configUpdater.Save(_appConfig); _configUpdater.Save(_appConfig);
await ReloadThumbnails(); await ReloadThumbnails();
} }
private void TbSearch_OnTextChanged(object sender, TextChangedEventArgs e)
{
TbSearch.Foreground = TbSearch.Text.Length > 2 ? Brushes.Black : Brushes.Gray;
ThrottleExt.Throttle(ReloadThumbnails, SearchActionId, TimeSpan.FromMilliseconds(400));;
}
} }
@@ -67,10 +67,10 @@ public class DatasetExplorerEventHandler(
var a = datasetExplorer.CurrentAnnotation!.Annotation; var a = datasetExplorer.CurrentAnnotation!.Annotation;
var detections = datasetExplorer.ExplorerEditor.CurrentDetections var detections = datasetExplorer.ExplorerEditor.CurrentDetections
.Select(x => new Detection(a.Name, x.GetLabel(datasetExplorer.ExplorerEditor.RenderSize))) .Select(x => new Detection(a.Name, x.ToYoloLabel(datasetExplorer.ExplorerEditor.RenderSize)))
.ToList(); .ToList();
var index = datasetExplorer.ThumbnailsView.SelectedIndex; var index = datasetExplorer.ThumbnailsView.SelectedIndex;
var annotation = await annotationService.SaveAnnotation(a.OriginalMediaName, a.Time, detections, token: token); var annotation = await annotationService.SaveAnnotation(a.OriginalMediaName, a.Name, a.Time, detections, token: token);
await ValidateAnnotations([annotation], token); await ValidateAnnotations([annotation], token);
await datasetExplorer.EditAnnotation(index + 1); await datasetExplorer.EditAnnotation(index + 1);
break; break;
+13 -11
View File
@@ -13,23 +13,14 @@ Results (file or annotations) is putted to the other queue, or the same socket,
<h2>Installation</h2> <h2>Installation</h2>
Prepare correct onnx model from YOLO:
```python
from ultralytics import YOLO
import netron
model = YOLO("azaion.pt")
model.export(format="onnx", imgsz=1280, nms=True, batch=4)
netron.start('azaion.onnx')
```
Read carefully about [export arguments](https://docs.ultralytics.com/modes/export/), you have to use nms=True, and batching with a proper batch size
<h3>Install libs</h3> <h3>Install libs</h3>
https://www.python.org/downloads/ https://www.python.org/downloads/
Windows Windows
- [Install CUDA](https://developer.nvidia.com/cuda-12-1-0-download-archive) - [Install CUDA](https://developer.nvidia.com/cuda-12-1-0-download-archive)
- [Install Visual Studio Build Tools 2019](https://visualstudio.microsoft.com/downloads/?q=build+tools)
Linux Linux
``` ```
@@ -44,6 +35,17 @@ Linux
nvcc --version nvcc --version
``` ```
Prepare correct onnx model from YOLO:
```python
from ultralytics import YOLO
import netron
model = YOLO("azaion.pt")
model.export(format="onnx", imgsz=1280, nms=True, batch=4)
netron.start('azaion.onnx')
```
Read carefully about [export arguments](https://docs.ultralytics.com/modes/export/), you have to use nms=True, and batching with a proper batch size
<h3>Install dependencies</h3> <h3>Install dependencies</h3>
1. Install python with max version 3.11. Pytorch for now supports 3.11 max 1. Install python with max version 3.11. Pytorch for now supports 3.11 max
@@ -0,0 +1,14 @@
cdef enum AIAvailabilityEnum:
NONE = 0
DOWNLOADING = 10
CONVERTING = 20
UPLOADING = 30
ENABLED = 200
ERROR = 500
cdef class AIAvailabilityStatus:
cdef AIAvailabilityEnum status
cdef str error_message
cdef bytes serialize(self)
cdef set_status(self, AIAvailabilityEnum status, str error_message=*)
@@ -0,0 +1,36 @@
cimport constants_inf
import msgpack
AIStatus2Text = {
AIAvailabilityEnum.NONE: "None",
AIAvailabilityEnum.DOWNLOADING: "Downloading",
AIAvailabilityEnum.CONVERTING: "Converting",
AIAvailabilityEnum.UPLOADING: "Uploading",
AIAvailabilityEnum.ENABLED: "Enabled",
AIAvailabilityEnum.ERROR: "Error",
}
cdef class AIAvailabilityStatus:
def __init__(self):
self.status = AIAvailabilityEnum.NONE
self.error_message = None
def __str__(self):
status_text = AIStatus2Text.get(self.status, "Unknown")
error_text = self.error_message if self.error_message else ""
return f"{status_text} {error_text}"
cdef bytes serialize(self):
return msgpack.packb({
"s": self.status,
"m": self.error_message
})
cdef set_status(self, AIAvailabilityEnum status, str error_message=None):
self.status = status
self.error_message = error_message
if error_message is not None:
constants_inf.logerror(<str>error_message)
else:
constants_inf.log(<str>str(self))
+2
View File
@@ -7,6 +7,8 @@ cdef class AIRecognitionConfig:
cdef public double tracking_probability_increase cdef public double tracking_probability_increase
cdef public double tracking_intersection_threshold cdef public double tracking_intersection_threshold
cdef public int big_image_tile_overlap_percent
cdef public bytes file_data cdef public bytes file_data
cdef public list[str] paths cdef public list[str] paths
cdef public int model_batch_size cdef public int model_batch_size
+4
View File
@@ -9,6 +9,7 @@ cdef class AIRecognitionConfig:
tracking_distance_confidence, tracking_distance_confidence,
tracking_probability_increase, tracking_probability_increase,
tracking_intersection_threshold, tracking_intersection_threshold,
big_image_tile_overlap_percent,
file_data, file_data,
paths, paths,
@@ -21,6 +22,7 @@ cdef class AIRecognitionConfig:
self.tracking_distance_confidence = tracking_distance_confidence self.tracking_distance_confidence = tracking_distance_confidence
self.tracking_probability_increase = tracking_probability_increase self.tracking_probability_increase = tracking_probability_increase
self.tracking_intersection_threshold = tracking_intersection_threshold self.tracking_intersection_threshold = tracking_intersection_threshold
self.big_image_tile_overlap_percent = big_image_tile_overlap_percent
self.file_data = file_data self.file_data = file_data
self.paths = paths self.paths = paths
@@ -31,6 +33,7 @@ cdef class AIRecognitionConfig:
f'probability_increase : {self.tracking_probability_increase}, ' f'probability_increase : {self.tracking_probability_increase}, '
f'intersection_threshold : {self.tracking_intersection_threshold}, ' f'intersection_threshold : {self.tracking_intersection_threshold}, '
f'frame_period_recognition : {self.frame_period_recognition}, ' f'frame_period_recognition : {self.frame_period_recognition}, '
f'big_image_tile_overlap_percent: {self.big_image_tile_overlap_percent}, '
f'paths: {self.paths}, ' f'paths: {self.paths}, '
f'model_batch_size: {self.model_batch_size}') f'model_batch_size: {self.model_batch_size}')
@@ -45,6 +48,7 @@ cdef class AIRecognitionConfig:
unpacked.get("t_dc", 0.0), unpacked.get("t_dc", 0.0),
unpacked.get("t_pi", 0.0), unpacked.get("t_pi", 0.0),
unpacked.get("t_it", 0.0), unpacked.get("t_it", 0.0),
unpacked.get("ov_p", 20),
unpacked.get("d", b''), unpacked.get("d", b''),
unpacked.get("p", []), unpacked.get("p", []),
+1 -2
View File
@@ -3,7 +3,7 @@ cdef class Detection:
cdef public str annotation_name cdef public str annotation_name
cdef public int cls cdef public int cls
cdef public overlaps(self, Detection det2) cdef public overlaps(self, Detection det2, float confidence_threshold)
cdef class Annotation: cdef class Annotation:
cdef public str name cdef public str name
@@ -12,5 +12,4 @@ cdef class Annotation:
cdef public list[Detection] detections cdef public list[Detection] detections
cdef public bytes image cdef public bytes image
cdef format_time(self, ms)
cdef bytes serialize(self) cdef bytes serialize(self)
+17 -17
View File
@@ -1,5 +1,5 @@
import msgpack import msgpack
from pathlib import Path cimport constants_inf
cdef class Detection: cdef class Detection:
def __init__(self, double x, double y, double w, double h, int cls, double confidence): def __init__(self, double x, double y, double w, double h, int cls, double confidence):
@@ -14,18 +14,29 @@ cdef class Detection:
def __str__(self): def __str__(self):
return f'{self.cls}: {self.x:.2f} {self.y:.2f} {self.w:.2f} {self.h:.2f}, prob: {(self.confidence*100):.1f}%' return f'{self.cls}: {self.x:.2f} {self.y:.2f} {self.w:.2f} {self.h:.2f}, prob: {(self.confidence*100):.1f}%'
cdef overlaps(self, Detection det2): def __eq__(self, other):
if not isinstance(other, Detection):
return False
if max(abs(self.x - other.x),
abs(self.y - other.y),
abs(self.w - other.w),
abs(self.h - other.h)) > constants_inf.TILE_DUPLICATE_CONFIDENCE_THRESHOLD:
return False
return True
cdef overlaps(self, Detection det2, float confidence_threshold):
cdef double overlap_x = 0.5 * (self.w + det2.w) - abs(self.x - det2.x) cdef double overlap_x = 0.5 * (self.w + det2.w) - abs(self.x - det2.x)
cdef double overlap_y = 0.5 * (self.h + det2.h) - abs(self.y - det2.y) cdef double overlap_y = 0.5 * (self.h + det2.h) - abs(self.y - det2.y)
cdef double overlap_area = max(0.0, overlap_x) * max(0.0, overlap_y) cdef double overlap_area = max(0.0, overlap_x) * max(0.0, overlap_y)
cdef double min_area = min(self.w * self.h, det2.w * det2.h) cdef double min_area = min(self.w * self.h, det2.w * det2.h)
return overlap_area / min_area > 0.6 return overlap_area / min_area > confidence_threshold
cdef class Annotation: cdef class Annotation:
def __init__(self, str name, long ms, list[Detection] detections): def __init__(self, str name, str original_media_name, long ms, list[Detection] detections):
self.original_media_name = Path(<str>name).stem.replace(" ", "") self.name = name
self.name = f'{self.original_media_name}_{self.format_time(ms)}' self.original_media_name = original_media_name
self.time = ms self.time = ms
self.detections = detections if detections is not None else [] self.detections = detections if detections is not None else []
for d in self.detections: for d in self.detections:
@@ -42,17 +53,6 @@ cdef class Annotation:
) )
return f"{self.name}: {detections_str}" return f"{self.name}: {detections_str}"
cdef format_time(self, ms):
# Calculate hours, minutes, seconds, and hundreds of milliseconds.
h = ms // 3600000 # Total full hours.
ms_remaining = ms % 3600000
m = ms_remaining // 60000 # Full minutes.
ms_remaining %= 60000
s = ms_remaining // 1000 # Full seconds.
f = (ms_remaining % 1000) // 100 # Hundreds of milliseconds.
h = h % 10
return f"{h}{m:02}{s:02}{f}"
cdef bytes serialize(self): cdef bytes serialize(self):
return msgpack.packb({ return msgpack.packb({
"n": self.name, "n": self.name,
+1 -1
View File
@@ -4,7 +4,7 @@ from PyInstaller.utils.hooks import collect_all
datas = [('venv\\Lib\\site-packages\\cv2', 'cv2')] datas = [('venv\\Lib\\site-packages\\cv2', 'cv2')]
binaries = [] binaries = []
hiddenimports = ['constants_inf', 'file_data', 'remote_command_inf', 'remote_command_handler_inf', 'annotation', 'loader_client', 'ai_config', 'tensorrt_engine', 'onnx_engine', 'inference_engine', 'inference', 'main-inf'] hiddenimports = ['constants_inf', 'file_data', 'remote_command_inf', 'remote_command_handler_inf', 'annotation', 'loader_client', 'ai_config', 'tensorrt_engine', 'onnx_engine', 'inference_engine', 'inference']
hiddenimports += collect_submodules('cv2') hiddenimports += collect_submodules('cv2')
tmp_ret = collect_all('psutil') tmp_ret = collect_all('psutil')
datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2] datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]
+3 -1
View File
@@ -35,6 +35,7 @@ venv\Scripts\pyinstaller --name=azaion-inference ^
--collect-all jwt ^ --collect-all jwt ^
--collect-all loguru ^ --collect-all loguru ^
--hidden-import constants_inf ^ --hidden-import constants_inf ^
--hidden-import ai_availability_status ^
--hidden-import file_data ^ --hidden-import file_data ^
--hidden-import remote_command_inf ^ --hidden-import remote_command_inf ^
--hidden-import remote_command_handler_inf ^ --hidden-import remote_command_handler_inf ^
@@ -49,8 +50,9 @@ start.py
robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "ai_config.cp312-win_amd64.pyd" "annotation.cp312-win_amd64.pyd" robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "ai_config.cp312-win_amd64.pyd" "annotation.cp312-win_amd64.pyd"
robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "constants_inf.cp312-win_amd64.pyd" "file_data.cp312-win_amd64.pyd" robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "constants_inf.cp312-win_amd64.pyd" "file_data.cp312-win_amd64.pyd"
robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "ai_availability_status.pyd"
robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "remote_command_inf.cp312-win_amd64.pyd" "remote_command_handler_inf.cp312-win_amd64.pyd" robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "remote_command_inf.cp312-win_amd64.pyd" "remote_command_handler_inf.cp312-win_amd64.pyd"
robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "inference.cp312-win_amd64.pyd" "inference_engine.cp312-win_amd64.pyd" robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "inference.cp312-win_amd64.py=d" "inference_engine.cp312-win_amd64.pyd"
robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "loader_client.cp312-win_amd64.pyd" "tensorrt_engine.cp312-win_amd64.pyd" robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "loader_client.cp312-win_amd64.pyd" "tensorrt_engine.cp312-win_amd64.pyd"
robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "onnx_engine.cp312-win_amd64.pyd" "main_inference.cp312-win_amd64.pyd" robocopy "dist\azaion-inference\_internal" "..\dist-azaion\_internal" "onnx_engine.cp312-win_amd64.pyd" "main_inference.cp312-win_amd64.pyd"
+55
View File
@@ -0,0 +1,55 @@
/* Generated by Cython 3.1.2 */
#ifndef __PYX_HAVE__constants_inf
#define __PYX_HAVE__constants_inf
#include "Python.h"
#ifndef __PYX_HAVE_API__constants_inf
#ifdef CYTHON_EXTERN_C
#undef __PYX_EXTERN_C
#define __PYX_EXTERN_C CYTHON_EXTERN_C
#elif defined(__PYX_EXTERN_C)
#ifdef _MSC_VER
#pragma message ("Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead.")
#else
#warning Please do not define the '__PYX_EXTERN_C' macro externally. Use 'CYTHON_EXTERN_C' instead.
#endif
#else
#ifdef __cplusplus
#define __PYX_EXTERN_C extern "C"
#else
#define __PYX_EXTERN_C extern
#endif
#endif
#ifndef DL_IMPORT
#define DL_IMPORT(_T) _T
#endif
__PYX_EXTERN_C int TILE_DUPLICATE_CONFIDENCE_THRESHOLD;
#endif /* !__PYX_HAVE_API__constants_inf */
/* WARNING: the interface of the module init function changed in CPython 3.5. */
/* It now returns a PyModuleDef instance instead of a PyModule instance. */
/* WARNING: Use PyImport_AppendInittab("constants_inf", PyInit_constants_inf) instead of calling PyInit_constants_inf directly from Python 3.5 */
PyMODINIT_FUNC PyInit_constants_inf(void);
#if PY_VERSION_HEX >= 0x03050000 && (defined(__GNUC__) || defined(__clang__) || defined(_MSC_VER) || (defined(__cplusplus) && __cplusplus >= 201402L))
#if defined(__cplusplus) && __cplusplus >= 201402L
[[deprecated("Use PyImport_AppendInittab(\"constants_inf\", PyInit_constants_inf) instead of calling PyInit_constants_inf directly.")]] inline
#elif defined(__GNUC__) || defined(__clang__)
__attribute__ ((__deprecated__("Use PyImport_AppendInittab(\"constants_inf\", PyInit_constants_inf) instead of calling PyInit_constants_inf directly."), __unused__)) __inline__
#elif defined(_MSC_VER)
__declspec(deprecated("Use PyImport_AppendInittab(\"constants_inf\", PyInit_constants_inf) instead of calling PyInit_constants_inf directly.")) __inline
#endif
static PyObject* __PYX_WARN_IF_PyInit_constants_inf_INIT_CALLED(PyObject* res) {
return res;
}
#define PyInit_constants_inf() __PYX_WARN_IF_PyInit_constants_inf_INIT_CALLED(PyInit_constants_inf())
#endif
#endif /* !__PYX_HAVE__constants_inf */
+4
View File
@@ -13,5 +13,9 @@ cdef str MODELS_FOLDER
cdef int SMALL_SIZE_KB cdef int SMALL_SIZE_KB
cdef str SPLIT_SUFFIX
cdef int TILE_DUPLICATE_CONFIDENCE_THRESHOLD
cdef log(str log_message) cdef log(str log_message)
cdef logerror(str error) cdef logerror(str error)
cdef format_time(int ms)
+14
View File
@@ -12,6 +12,9 @@ cdef str MODELS_FOLDER = "models"
cdef int SMALL_SIZE_KB = 3 cdef int SMALL_SIZE_KB = 3
cdef str SPLIT_SUFFIX = "!split!"
cdef int TILE_DUPLICATE_CONFIDENCE_THRESHOLD = 5
logger.remove() logger.remove()
log_format = "[{time:HH:mm:ss} {level}] {message}" log_format = "[{time:HH:mm:ss} {level}] {message}"
logger.add( logger.add(
@@ -41,3 +44,14 @@ cdef log(str log_message):
cdef logerror(str error): cdef logerror(str error):
logger.error(error) logger.error(error)
cdef format_time(int ms):
# Calculate hours, minutes, seconds, and hundreds of milliseconds.
h = ms // 3600000 # Total full hours.
ms_remaining = ms % 3600000
m = ms_remaining // 60000 # Full minutes.
ms_remaining %= 60000
s = ms_remaining // 1000 # Full seconds.
f = (ms_remaining % 1000) // 100 # Hundreds of milliseconds.
h = h % 10
return f"{h}{m:02}{s:02}{f}"
+12 -3
View File
@@ -1,3 +1,4 @@
from ai_availability_status cimport AIAvailabilityStatus
from remote_command_inf cimport RemoteCommand from remote_command_inf cimport RemoteCommand
from annotation cimport Annotation, Detection from annotation cimport Annotation, Detection
from ai_config cimport AIRecognitionConfig from ai_config cimport AIRecognitionConfig
@@ -9,14 +10,18 @@ cdef class Inference:
cdef InferenceEngine engine cdef InferenceEngine engine
cdef object on_annotation cdef object on_annotation
cdef Annotation _previous_annotation cdef Annotation _previous_annotation
cdef dict[str, list(Detection)] _tile_detections
cdef AIRecognitionConfig ai_config cdef AIRecognitionConfig ai_config
cdef bint stop_signal cdef bint stop_signal
cdef AIAvailabilityStatus ai_availability_status
cdef str model_input cdef str model_input
cdef int model_width cdef int model_width
cdef int model_height cdef int model_height
cdef int tile_width
cdef int tile_height
cdef build_tensor_engine(self, object updater_callback) cdef bytes get_onnx_engine_bytes(self)
cdef init_ai(self) cdef init_ai(self)
cdef bint is_building_engine cdef bint is_building_engine
cdef bint is_video(self, str filepath) cdef bint is_video(self, str filepath)
@@ -24,11 +29,15 @@ cdef class Inference:
cdef run_inference(self, RemoteCommand cmd) cdef run_inference(self, RemoteCommand cmd)
cdef _process_video(self, RemoteCommand cmd, AIRecognitionConfig ai_config, str video_name) cdef _process_video(self, RemoteCommand cmd, AIRecognitionConfig ai_config, str video_name)
cdef _process_images(self, RemoteCommand cmd, AIRecognitionConfig ai_config, list[str] image_paths) cdef _process_images(self, RemoteCommand cmd, AIRecognitionConfig ai_config, list[str] image_paths)
cdef _process_images_inner(self, RemoteCommand cmd, AIRecognitionConfig ai_config, list frame_data)
cdef split_to_tiles(self, frame, path, overlap_percent)
cdef stop(self) cdef stop(self)
cdef preprocess(self, frames) cdef preprocess(self, frames)
cdef remove_overlapping_detections(self, list[Detection] detections) cdef remove_overlapping_detections(self, list[Detection] detections, float confidence_threshold=?)
cdef postprocess(self, output, ai_config) cdef postprocess(self, output, ai_config)
cdef split_list_extend(self, lst, chunk_size) cdef split_list_extend(self, lst, chunk_size)
cdef bint is_valid_annotation(self, Annotation annotation, AIRecognitionConfig ai_config) cdef bint is_valid_video_annotation(self, Annotation annotation, AIRecognitionConfig ai_config)
cdef bint is_valid_image_annotation(self, Annotation annotation)
cdef remove_tiled_duplicates(self, Annotation annotation)
+140 -63
View File
@@ -1,8 +1,12 @@
import mimetypes import mimetypes
import time import time
from pathlib import Path
import cv2 import cv2
import numpy as np import numpy as np
cimport constants_inf cimport constants_inf
from ai_availability_status cimport AIAvailabilityEnum, AIAvailabilityStatus
from remote_command_inf cimport RemoteCommand from remote_command_inf cimport RemoteCommand
from annotation cimport Detection, Annotation from annotation cimport Detection, Annotation
from ai_config cimport AIRecognitionConfig from ai_config cimport AIRecognitionConfig
@@ -54,66 +58,63 @@ cdef class Inference:
self.model_input = None self.model_input = None
self.model_width = 0 self.model_width = 0
self.model_height = 0 self.model_height = 0
self.tile_width = 0
self.tile_height = 0
self.engine = None self.engine = None
self.is_building_engine = False self.is_building_engine = False
self.ai_availability_status = AIAvailabilityStatus()
self.init_ai()
cdef build_tensor_engine(self, object updater_callback): cdef bytes get_onnx_engine_bytes(self):
if tensor_gpu_index == -1:
return
try:
engine_filename = TensorRTEngine.get_engine_filename(0)
models_dir = constants_inf.MODELS_FOLDER models_dir = constants_inf.MODELS_FOLDER
self.ai_availability_status.set_status(AIAvailabilityEnum.DOWNLOADING)
self.is_building_engine = True
updater_callback('downloading')
res = self.loader_client.load_big_small_resource(engine_filename, models_dir)
if res.err is None:
constants_inf.log('tensor rt engine is here, no need to build')
self.is_building_engine = False
updater_callback('enabled')
return
constants_inf.logerror(res.err)
# time.sleep(8) # prevent simultaneously loading dll and models
updater_callback('converting')
constants_inf.log('try to load onnx')
res = self.loader_client.load_big_small_resource(constants_inf.AI_ONNX_MODEL_FILE, models_dir) res = self.loader_client.load_big_small_resource(constants_inf.AI_ONNX_MODEL_FILE, models_dir)
if res.err is not None: if res.err is not None:
updater_callback(f'Error. {res.err}') raise Exception(res.err)
model_bytes = TensorRTEngine.convert_from_onnx(res.data) return res.data
updater_callback('uploading')
res = self.loader_client.upload_big_small_resource(model_bytes, <str> engine_filename, models_dir)
if res.err is not None:
updater_callback(f'Error. {res.err}')
constants_inf.log(f'uploaded {engine_filename} to CDN and API')
self.is_building_engine = False
updater_callback('enabled')
except Exception as e:
updater_callback(f'Error. {str(e)}')
cdef init_ai(self): cdef init_ai(self):
constants_inf.log(<str> 'init AI...')
try:
while self.is_building_engine:
time.sleep(1)
if self.engine is not None: if self.engine is not None:
return return
self.is_building_engine = True
models_dir = constants_inf.MODELS_FOLDER models_dir = constants_inf.MODELS_FOLDER
if tensor_gpu_index > -1: if tensor_gpu_index > -1:
while self.is_building_engine: try:
time.sleep(1)
engine_filename = TensorRTEngine.get_engine_filename(0) engine_filename = TensorRTEngine.get_engine_filename(0)
self.ai_availability_status.set_status(AIAvailabilityEnum.DOWNLOADING)
res = self.loader_client.load_big_small_resource(engine_filename, models_dir) res = self.loader_client.load_big_small_resource(engine_filename, models_dir)
if res.err is not None: if res.err is not None:
raise Exception(res.err) raise Exception(res.err)
self.engine = TensorRTEngine(res.data) self.engine = TensorRTEngine(res.data)
else: self.ai_availability_status.set_status(AIAvailabilityEnum.ENABLED)
res = self.loader_client.load_big_small_resource(constants_inf.AI_ONNX_MODEL_FILE, models_dir) except Exception as e:
self.ai_availability_status.set_status(AIAvailabilityEnum.ERROR, <str>str(e))
onnx_engine_bytes = self.get_onnx_engine_bytes()
self.ai_availability_status.set_status(AIAvailabilityEnum.CONVERTING)
model_bytes = TensorRTEngine.convert_from_onnx(res.data)
self.ai_availability_status.set_status(AIAvailabilityEnum.UPLOADING)
res = self.loader_client.upload_big_small_resource(model_bytes, <str> engine_filename, models_dir)
if res.err is not None: if res.err is not None:
raise Exception(res.err) self.ai_availability_status.set_status(AIAvailabilityEnum.ERROR, res.err)
self.engine = OnnxEngine(res.data) self.ai_availability_status.set_status(AIAvailabilityEnum.ENABLED)
else:
self.engine = OnnxEngine(<bytes>self.get_onnx_engine_bytes())
self.is_building_engine = False
self.model_height, self.model_width = self.engine.get_input_shape() self.model_height, self.model_width = self.engine.get_input_shape()
#todo: temporarily, send it from the client
self.tile_width = 550
self.tile_height = 550
except Exception as e:
self.ai_availability_status.set_status(AIAvailabilityEnum.ERROR, <str>str(e))
self.is_building_engine = False
cdef preprocess(self, frames): cdef preprocess(self, frames):
blobs = [cv2.dnn.blobFromImage(frame, blobs = [cv2.dnn.blobFromImage(frame,
@@ -150,13 +151,13 @@ cdef class Inference:
h = y2 - y1 h = y2 - y1
if conf >= ai_config.probability_threshold: if conf >= ai_config.probability_threshold:
detections.append(Detection(x, y, w, h, class_id, conf)) detections.append(Detection(x, y, w, h, class_id, conf))
filtered_detections = self.remove_overlapping_detections(detections) filtered_detections = self.remove_overlapping_detections(detections, ai_config.tracking_intersection_threshold)
results.append(filtered_detections) results.append(filtered_detections)
return results return results
except Exception as e: except Exception as e:
raise RuntimeError(f"Failed to postprocess: {str(e)}") raise RuntimeError(f"Failed to postprocess: {str(e)}")
cdef remove_overlapping_detections(self, list[Detection] detections): cdef remove_overlapping_detections(self, list[Detection] detections, float confidence_threshold=0.6):
cdef Detection det1, det2 cdef Detection det1, det2
filtered_output = [] filtered_output = []
filtered_out_indexes = [] filtered_out_indexes = []
@@ -168,7 +169,7 @@ cdef class Inference:
res = det1_index res = det1_index
for det2_index in range(det1_index + 1, len(detections)): for det2_index in range(det1_index + 1, len(detections)):
det2 = detections[det2_index] det2 = detections[det2_index]
if det1.overlaps(det2): if det1.overlaps(det2, confidence_threshold):
if det1.confidence > det2.confidence or ( if det1.confidence > det2.confidence or (
det1.confidence == det2.confidence and det1.cls < det2.cls): # det1 has higher confidence or lower class_id det1.confidence == det2.confidence and det1.cls < det2.cls): # det1 has higher confidence or lower class_id
filtered_out_indexes.append(det2_index) filtered_out_indexes.append(det2_index)
@@ -211,12 +212,11 @@ cdef class Inference:
images.append(m) images.append(m)
# images first, it's faster # images first, it's faster
if len(images) > 0: if len(images) > 0:
for chunk in self.split_list_extend(images, self.engine.get_batch_size()): constants_inf.log(<str>f'run inference on {" ".join(images)}...')
constants_inf.log(f'run inference on {" ".join(chunk)}...') self._process_images(cmd, ai_config, images)
self._process_images(cmd, ai_config, chunk)
if len(videos) > 0: if len(videos) > 0:
for v in videos: for v in videos:
constants_inf.log(f'run inference on {v}...') constants_inf.log(<str>f'run inference on {v}...')
self._process_video(cmd, ai_config, v) self._process_video(cmd, ai_config, v)
@@ -224,8 +224,10 @@ cdef class Inference:
cdef int frame_count = 0 cdef int frame_count = 0
cdef list batch_frames = [] cdef list batch_frames = []
cdef list[int] batch_timestamps = [] cdef list[int] batch_timestamps = []
cdef Annotation annotation
self._previous_annotation = None self._previous_annotation = None
v_input = cv2.VideoCapture(<str>video_name) v_input = cv2.VideoCapture(<str>video_name)
while v_input.isOpened() and not self.stop_signal: while v_input.isOpened() and not self.stop_signal:
ret, frame = v_input.read() ret, frame = v_input.read()
@@ -245,13 +247,15 @@ cdef class Inference:
list_detections = self.postprocess(outputs, ai_config) list_detections = self.postprocess(outputs, ai_config)
for i in range(len(list_detections)): for i in range(len(list_detections)):
detections = list_detections[i] detections = list_detections[i]
annotation = Annotation(video_name, batch_timestamps[i], detections)
if self.is_valid_annotation(annotation, ai_config): original_media_name = Path(<str>video_name).stem.replace(" ", "")
name = f'{original_media_name}_{constants_inf.format_time(batch_timestamps[i])}'
annotation = Annotation(name, original_media_name, batch_timestamps[i], detections)
if self.is_valid_video_annotation(annotation, ai_config):
_, image = cv2.imencode('.jpg', batch_frames[i]) _, image = cv2.imencode('.jpg', batch_frames[i])
annotation.image = image.tobytes() annotation.image = image.tobytes()
self._previous_annotation = annotation self._previous_annotation = annotation
print(annotation)
self.on_annotation(cmd, annotation) self.on_annotation(cmd, annotation)
batch_frames.clear() batch_frames.clear()
@@ -260,22 +264,68 @@ cdef class Inference:
cdef _process_images(self, RemoteCommand cmd, AIRecognitionConfig ai_config, list[str] image_paths): cdef _process_images(self, RemoteCommand cmd, AIRecognitionConfig ai_config, list[str] image_paths):
cdef list frames = [] cdef list frame_data
cdef list timestamps = [] self._tile_detections = {}
self._previous_annotation = None for path in image_paths:
for image in image_paths: frame_data = []
frame = cv2.imread(image) frame = cv2.imread(<str>path)
frames.append(frame) img_h, img_w, _ = frame.shape
timestamps.append(0) if frame is None:
constants_inf.logerror(<str>f'Failed to read image {path}')
continue
original_media_name = Path(<str> path).stem.replace(" ", "")
if img_h <= 1.5 * self.model_height and img_w <= 1.5 * self.model_width:
frame_data.append((frame, original_media_name, f'{original_media_name}_000000'))
else:
res = self.split_to_tiles(frame, path, ai_config.big_image_tile_overlap_percent)
frame_data.extend(res)
if len(frame_data) > self.engine.get_batch_size():
for chunk in self.split_list_extend(frame_data, self.engine.get_batch_size()):
self._process_images_inner(cmd, ai_config, chunk)
for chunk in self.split_list_extend(frame_data, self.engine.get_batch_size()):
self._process_images_inner(cmd, ai_config, chunk)
cdef split_to_tiles(self, frame, path, overlap_percent):
constants_inf.log(<str>f'splitting image {path} to tiles...')
img_h, img_w, _ = frame.shape
stride_w = int(self.tile_width * (1 - overlap_percent / 100))
stride_h = int(self.tile_height * (1 - overlap_percent / 100))
results = []
original_media_name = Path(<str> path).stem.replace(" ", "")
for y in range(0, img_h, stride_h):
for x in range(0, img_w, stride_w):
x_end = min(x + self.tile_width, img_w)
y_end = min(y + self.tile_height, img_h)
# correct x,y for the close-to-border tiles
if x_end - x < self.tile_width:
if img_w - (x - stride_w) <= self.tile_width:
continue # the previous tile already covered the last gap
x = img_w - self.tile_width
if y_end - y < self.tile_height:
if img_h - (y - stride_h) <= self.tile_height:
continue # the previous tile already covered the last gap
y = img_h - self.tile_height
tile = frame[y:y_end, x:x_end]
name = f'{original_media_name}{constants_inf.SPLIT_SUFFIX}{x:04d}_{y:04d}!_000000'
results.append((tile, original_media_name, name))
return results
cdef _process_images_inner(self, RemoteCommand cmd, AIRecognitionConfig ai_config, list frame_data):
cdef list frames, original_media_names, names
cdef Annotation annotation
frames, original_media_names, names = map(list, zip(*frame_data))
input_blob = self.preprocess(frames) input_blob = self.preprocess(frames)
outputs = self.engine.run(input_blob) outputs = self.engine.run(input_blob)
list_detections = self.postprocess(outputs, ai_config) list_detections = self.postprocess(outputs, ai_config)
for i in range(len(list_detections)): for i in range(len(list_detections)):
detections = list_detections[i] annotation = Annotation(names[i], original_media_names[i], 0, list_detections[i])
annotation = Annotation(image_paths[i], timestamps[i], detections) if self.is_valid_image_annotation(annotation):
_, image = cv2.imencode('.jpg', frames[i]) _, image = cv2.imencode('.jpg', frames[i])
annotation.image = image.tobytes() annotation.image = image.tobytes()
self.on_annotation(cmd, annotation) self.on_annotation(cmd, annotation)
@@ -284,8 +334,33 @@ cdef class Inference:
cdef stop(self): cdef stop(self):
self.stop_signal = True self.stop_signal = True
cdef bint is_valid_annotation(self, Annotation annotation, AIRecognitionConfig ai_config): cdef remove_tiled_duplicates(self, Annotation annotation):
# No detections, invalid right = annotation.name.rindex('!')
left = annotation.name.index(constants_inf.SPLIT_SUFFIX) + len(constants_inf.SPLIT_SUFFIX)
x_str, y_str = annotation.name[left:right].split('_')
x = int(x_str)
y = int(y_str)
for det in annotation.detections:
x1 = det.x * self.tile_width
y1 = det.y * self.tile_height
det_abs = Detection(x + x1, y + y1, det.w * self.tile_width, det.h * self.tile_height, det.cls, det.confidence)
detections = self._tile_detections.setdefault(annotation.original_media_name, [])
if det_abs in detections:
annotation.detections.remove(det)
else:
detections.append(det_abs)
cdef bint is_valid_image_annotation(self, Annotation annotation):
if constants_inf.SPLIT_SUFFIX in annotation.name:
self.remove_tiled_duplicates(annotation)
if not annotation.detections:
return False
return True
cdef bint is_valid_video_annotation(self, Annotation annotation, AIRecognitionConfig ai_config):
if constants_inf.SPLIT_SUFFIX in annotation.name:
self.remove_tiled_duplicates(annotation)
if not annotation.detections: if not annotation.detections:
return False return False
@@ -322,7 +397,9 @@ cdef class Inference:
closest_det = prev_det closest_det = prev_det
# Check if beyond tracking distance # Check if beyond tracking distance
if min_distance_sq > ai_config.tracking_distance_confidence: dist_px = ai_config.tracking_distance_confidence * self.model_width
dist_px_sq = dist_px * dist_px
if min_distance_sq > dist_px_sq:
return True return True
# Check probability increase # Check probability increase
+2 -2
View File
@@ -44,8 +44,8 @@ cdef class CommandProcessor:
if command.command_type == CommandType.INFERENCE: if command.command_type == CommandType.INFERENCE:
self.inference_queue.put(command) self.inference_queue.put(command)
elif command.command_type == CommandType.AI_AVAILABILITY_CHECK: elif command.command_type == CommandType.AI_AVAILABILITY_CHECK:
self.inference.build_tensor_engine(lambda status: self.remote_handler.send(command.client_id, status = self.inference.ai_availability_status.serialize()
RemoteCommand(CommandType.AI_AVAILABILITY_RESULT, None, status).serialize())) self.remote_handler.send(command.client_id, RemoteCommand(CommandType.AI_AVAILABILITY_RESULT, status).serialize())
elif command.command_type == CommandType.STOP_INFERENCE: elif command.command_type == CommandType.STOP_INFERENCE:
self.inference.stop() self.inference.stop()
elif command.command_type == CommandType.EXIT: elif command.command_type == CommandType.EXIT:
+3 -3
View File
@@ -15,12 +15,12 @@ cdef class OnnxEngine(InferenceEngine):
model_meta = self.session.get_modelmeta() model_meta = self.session.get_modelmeta()
constants_inf.log(f"Metadata: {model_meta.custom_metadata_map}") constants_inf.log(f"Metadata: {model_meta.custom_metadata_map}")
cpdef tuple get_input_shape(self): cdef tuple get_input_shape(self):
shape = self.input_shape shape = self.input_shape
return shape[2], shape[3] return shape[2], shape[3]
cpdef int get_batch_size(self): cdef int get_batch_size(self):
return self.batch_size return self.batch_size
cpdef run(self, input_data): cdef run(self, input_data):
return self.session.run(None, {self.input_name: input_data}) return self.session.run(None, {self.input_name: input_data})
+3 -2
View File
@@ -7,11 +7,12 @@ cryptography==44.0.2
psutil psutil
msgpack msgpack
pyjwt pyjwt
zmq pyzmq
requests requests
pyyaml pyyaml
pycuda pycuda
tensorrt tensorrt==10.11.0.33
pynvml pynvml
boto3 boto3
loguru loguru
pytest
+27 -14
View File
@@ -2,19 +2,31 @@ from setuptools import setup, Extension
from Cython.Build import cythonize from Cython.Build import cythonize
import numpy as np import numpy as np
debug_args = {}
trace_line = False
# debug_args = {
# 'extra_compile_args': ['-O0', '-g'],
# 'extra_link_args': ['-g'],
# 'define_macros': [('CYTHON_TRACE_NOGIL', '1')]
# }
# trace_line = True
extensions = [ extensions = [
Extension('constants_inf', ['constants_inf.pyx']), Extension('constants_inf', ['constants_inf.pyx'], **debug_args),
Extension('file_data', ['file_data.pyx']), Extension('ai_availability_status', ['ai_availability_status.pyx'], **debug_args),
Extension('remote_command_inf', ['remote_command_inf.pyx']), Extension('file_data', ['file_data.pyx'], **debug_args),
Extension('remote_command_handler_inf', ['remote_command_handler_inf.pyx']), Extension('remote_command_inf', ['remote_command_inf.pyx'], **debug_args),
Extension('annotation', ['annotation.pyx']), Extension('remote_command_handler_inf', ['remote_command_handler_inf.pyx'], **debug_args),
Extension('loader_client', ['loader_client.pyx']), Extension('annotation', ['annotation.pyx'], **debug_args),
Extension('ai_config', ['ai_config.pyx']), Extension('loader_client', ['loader_client.pyx'], **debug_args),
Extension('tensorrt_engine', ['tensorrt_engine.pyx'], include_dirs=[np.get_include()]), Extension('ai_config', ['ai_config.pyx'], **debug_args),
Extension('onnx_engine', ['onnx_engine.pyx'], include_dirs=[np.get_include()]), Extension('tensorrt_engine', ['tensorrt_engine.pyx'], include_dirs=[np.get_include()], **debug_args),
Extension('inference_engine', ['inference_engine.pyx'], include_dirs=[np.get_include()]), Extension('onnx_engine', ['onnx_engine.pyx'], include_dirs=[np.get_include()], **debug_args),
Extension('inference', ['inference.pyx'], include_dirs=[np.get_include()]), Extension('inference_engine', ['inference_engine.pyx'], include_dirs=[np.get_include()], **debug_args),
Extension('main_inference', ['main_inference.pyx']), Extension('inference', ['inference.pyx'], include_dirs=[np.get_include()], **debug_args),
Extension('main_inference', ['main_inference.pyx'], **debug_args),
] ]
setup( setup(
@@ -23,10 +35,11 @@ setup(
extensions, extensions,
compiler_directives={ compiler_directives={
"language_level": 3, "language_level": 3,
"emit_code_comments" : False, "emit_code_comments": False,
"binding": True, "binding": True,
'boundscheck': False, 'boundscheck': False,
'wraparound': False 'wraparound': False,
'linetrace': trace_line
} }
), ),
install_requires=[ install_requires=[
+37
View File
@@ -0,0 +1,37 @@
from setuptools import setup, Extension
from Cython.Build import cythonize
import numpy as np
extensions = [
Extension('constants_inf', ['constants_inf.pyx']),
Extension('file_data', ['file_data.pyx']),
Extension('remote_command_inf', ['remote_command_inf.pyx']),
Extension('remote_command_handler_inf', ['remote_command_handler_inf.pyx']),
Extension('annotation', ['annotation.pyx']),
Extension('loader_client', ['loader_client.pyx']),
Extension('ai_config', ['ai_config.pyx']),
Extension('tensorrt_engine', ['tensorrt_engine.pyx'], include_dirs=[np.get_include()]),
Extension('onnx_engine', ['onnx_engine.pyx'], include_dirs=[np.get_include()]),
Extension('inference_engine', ['inference_engine.pyx'], include_dirs=[np.get_include()]),
Extension('inference', ['inference.pyx'], include_dirs=[np.get_include()]),
Extension('main_inference', ['main_inference.pyx'])
]
setup(
name="azaion.ai",
ext_modules=cythonize(
extensions,
compiler_directives={
"language_level": 3,
"emit_code_comments" : False,
"binding": True,
'boundscheck': False,
'wraparound': False
}
),
install_requires=[
'ultralytics>=8.0.0',
'pywin32; platform_system=="Windows"'
],
zip_safe=False
)
+3 -3
View File
@@ -17,8 +17,8 @@ cdef class TensorRTEngine(InferenceEngine):
cdef object stream cdef object stream
cpdef tuple get_input_shape(self) cdef tuple get_input_shape(self)
cpdef int get_batch_size(self) cdef int get_batch_size(self)
cpdef run(self, input_data) cdef run(self, input_data)
+3 -3
View File
@@ -112,13 +112,13 @@ cdef class TensorRTEngine(InferenceEngine):
constants_inf.log('conversion done!') constants_inf.log('conversion done!')
return bytes(plan) return bytes(plan)
cpdef tuple get_input_shape(self): cdef tuple get_input_shape(self):
return self.input_shape[2], self.input_shape[3] return self.input_shape[2], self.input_shape[3]
cpdef int get_batch_size(self): cdef int get_batch_size(self):
return self.batch_size return self.batch_size
cpdef run(self, input_data): cdef run(self, input_data):
try: try:
cuda.memcpy_htod_async(self.d_input, input_data, self.stream) cuda.memcpy_htod_async(self.d_input, input_data, self.stream)
self.context.set_tensor_address(self.input_name, int(self.d_input)) # input buffer self.context.set_tensor_address(self.input_name, int(self.d_input)) # input buffer
+30
View File
@@ -0,0 +1,30 @@
import inference
from ai_config import AIRecognitionConfig
from unittest.mock import Mock
import numpy as np
from loader_client import LoaderClient
def test_split_to_tiles():
loader_client = LoaderClient("test", 0)
ai_config = AIRecognitionConfig(
frame_period_recognition=4,
frame_recognition_seconds=2,
probability_threshold=0.2,
tracking_distance_confidence=0.15,
tracking_probability_increase=0.15,
tracking_intersection_threshold=0.6,
big_image_tile_overlap_percent=20,
file_data=None,
paths=[],
model_batch_size=4
)
inf = inference.Inference(loader_client, ai_config)
test_frame = np.zeros((6336, 8448, 3), dtype=np.uint8)
inf.init_ai()
inf.split_to_tiles(test_frame, 'test_image.jpg', ai_config.big_image_tile_overlap_percent)
+7 -1
View File
@@ -2,9 +2,14 @@ import os
import subprocess import subprocess
cimport constants cimport constants
cdef class HardwareService: cdef class HardwareService:
cdef str _CACHED_HW_INFO = None
@staticmethod @staticmethod
cdef str get_hardware_info(): cdef str get_hardware_info():
global _CACHED_HW_INFO
if _CACHED_HW_INFO is not None:
return <str> _CACHED_HW_INFO
if os.name == 'nt': # windows if os.name == 'nt': # windows
os_command = ( os_command = (
"powershell -Command \"" "powershell -Command \""
@@ -34,5 +39,6 @@ cdef class HardwareService:
cdef str drive_serial = lines[len_lines-1] cdef str drive_serial = lines[len_lines-1]
cdef str res = f'CPU: {cpu}. GPU: {gpu}. Memory: {memory}. DriveSerial: {drive_serial}' cdef str res = f'CPU: {cpu}. GPU: {gpu}. Memory: {memory}. DriveSerial: {drive_serial}'
constants.log(f'Gathered hardware: {res}') constants.log(<str>f'Gathered hardware: {res}')
_CACHED_HW_INFO = res
return res return res
+1 -1
View File
@@ -3,7 +3,7 @@ Cython
psutil psutil
msgpack msgpack
pyjwt pyjwt
zmq pyzmq
requests requests
pyyaml pyyaml
boto3 boto3
-3
View File
@@ -10,7 +10,4 @@ public class ApiCredentials
[Key(nameof(Password))] [Key(nameof(Password))]
public string Password { get; set; } = null!; public string Password { get; set; } = null!;
public bool IsValid() =>
!string.IsNullOrWhiteSpace(Email) && !string.IsNullOrWhiteSpace(Password);
} }
+3 -8
View File
@@ -1,4 +1,5 @@
using System.Windows; using System.Windows;
using Azaion.Common;
using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Hosting;
@@ -28,7 +29,7 @@ public partial class App
var host = Host.CreateDefaultBuilder() var host = Host.CreateDefaultBuilder()
.ConfigureAppConfiguration((_, config) => config .ConfigureAppConfiguration((_, config) => config
.AddCommandLine(Environment.GetCommandLineArgs()) .AddCommandLine(Environment.GetCommandLineArgs())
.AddJsonFile(Constants.CONFIG_JSON_FILE, optional: true)) .AddJsonFile(Constants.LOADER_CONFIG_PATH, optional: true))
.UseSerilog() .UseSerilog()
.ConfigureServices((context, services) => .ConfigureServices((context, services) =>
{ {
@@ -36,7 +37,7 @@ public partial class App
services.Configure<DirectoriesConfig>(context.Configuration.GetSection(nameof(DirectoriesConfig))); services.Configure<DirectoriesConfig>(context.Configuration.GetSection(nameof(DirectoriesConfig)));
services.AddHttpClient<IAzaionApi, AzaionApi>((sp, client) => services.AddHttpClient<IAzaionApi, AzaionApi>((sp, client) =>
{ {
client.BaseAddress = new Uri(Constants.API_URL); client.BaseAddress = new Uri(Constants.DEFAULT_API_URL);
client.DefaultRequestHeaders.Add("Accept", "application/json"); client.DefaultRequestHeaders.Add("Accept", "application/json");
client.DefaultRequestHeaders.Add("User-Agent", "Azaion.LoaderUI"); client.DefaultRequestHeaders.Add("User-Agent", "Azaion.LoaderUI");
}); });
@@ -46,11 +47,5 @@ public partial class App
host.Services.GetRequiredService<Login>().Show(); host.Services.GetRequiredService<Login>().Show();
} }
//AFter:
//_loaderClient.Login(credentials);
//_loaderClient.Dispose();
} }
+12 -2
View File
@@ -24,7 +24,6 @@
<PackageReference Include="Microsoft.Extensions.Hosting" Version="9.0.5" /> <PackageReference Include="Microsoft.Extensions.Hosting" Version="9.0.5" />
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="9.0.5" /> <PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="9.0.5" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.5" /> <PackageReference Include="Microsoft.Extensions.Http" Version="9.0.5" />
<PackageReference Include="NetMQ" Version="4.0.1.13" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" /> <PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="Serilog.Extensions.Hosting" Version="9.0.0" /> <PackageReference Include="Serilog.Extensions.Hosting" Version="9.0.0" />
<PackageReference Include="Serilog.Extensions.Logging" Version="9.0.1" /> <PackageReference Include="Serilog.Extensions.Logging" Version="9.0.1" />
@@ -34,9 +33,20 @@
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
<Content Include="loaderconfig.json"> <Content Include="loaderconfig.prod.json">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>
<None Remove="updater.cmd" />
<Content Include="updater.cmd">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>
<Content Update="loaderconfig.stage.json">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content> </Content>
</ItemGroup> </ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Azaion.Common\Azaion.Common.csproj" />
</ItemGroup>
</Project> </Project>
-13
View File
@@ -1,13 +0,0 @@
namespace Azaion.LoaderUI;
public static class Constants
{
public const string CONFIG_JSON_FILE = "loaderconfig.json";
public const string API_URL = "https://api.azaion.com";
public const string AZAION_SUITE_EXE = "Azaion.Suite.exe";
public const string SUITE_FOLDER = "suite";
public const string INFERENCE_EXE = "azaion-inference";
public const string EXTERNAL_LOADER_PATH = "azaion-loader.exe";
public const int EXTERNAL_LOADER_PORT = 5020;
public const string EXTERNAL_LOADER_HOST = "127.0.0.1";
}
+7
View File
@@ -0,0 +1,7 @@
namespace Azaion.LoaderUI;
public static class ConstantsLoader
{
public const string SUITE_FOLDER = "suite";
public const int EXTERNAL_LOADER_PORT = 5020;
}
+45 -58
View File
@@ -1,10 +1,12 @@
using System.Diagnostics; using System.Diagnostics;
using System.IO; using System.IO;
using System.Text; using System.Text;
using System.Text.RegularExpressions;
using System.Windows; using System.Windows;
using System.Windows.Controls; using System.Windows.Controls;
using System.Windows.Input; using System.Windows.Input;
using System.Windows.Media; using System.Windows.Media;
using Azaion.Common;
using MessagePack; using MessagePack;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options; using Microsoft.Extensions.Options;
@@ -44,14 +46,47 @@ public partial class Login
Email = TbEmail.Text, Email = TbEmail.Text,
Password = TbPassword.Password Password = TbPassword.Password
}; };
if (!creds.IsValid()) if (string.IsNullOrWhiteSpace(creds.Email) || string.IsNullOrWhiteSpace(creds.Password))
return; return;
SetControlsStatus(isLoading: true);
_azaionApi.Login(creds);
try try
{ {
SetControlsStatus(isLoading: true);
_azaionApi.Login(creds);
Validate(creds); Validate(creds);
TbStatus.Foreground = Brushes.Black;
var installerVersion = await GetInstallerVer();
var localVersion = Constants.GetLocalVersion();
var credsEncrypted = Security.Encrypt(creds);
if (installerVersion > localVersion)
{
TbStatus.Text = $"Updating from {localVersion} to {installerVersion}...";
var (installerName, stream) = await _azaionApi.DownloadInstaller(_dirConfig?.SuiteInstallerDirectory ?? "");
var localFileStream = new FileStream(installerName, FileMode.Create, FileAccess.Write);
await stream.CopyToAsync(localFileStream);
localFileStream.Close();
stream.Close();
Process.Start(new ProcessStartInfo
{
FileName = "cmd.exe",
Arguments = $"/c updater.cmd {Process.GetCurrentProcess().Id} {installerName} {Constants.AZAION_SUITE_EXE} \"{credsEncrypted}\""
});
}
else
{
TbStatus.Text = "Your version is up to date!";
Process.Start(Constants.AZAION_SUITE_EXE, $"-c {credsEncrypted}");
await Task.Delay(800);
TbStatus.Text = "Loading...";
while (!Process.GetProcessesByName(Path.GetFileNameWithoutExtension(Constants.EXTERNAL_INFERENCE_PATH)).Any())
await Task.Delay(500);
await Task.Delay(1500);
}
Close();
} }
catch (Exception exception) catch (Exception exception)
{ {
@@ -59,28 +94,7 @@ public partial class Login
TbStatus.Foreground = Brushes.Red; TbStatus.Foreground = Brushes.Red;
TbStatus.Text = exception.Message; TbStatus.Text = exception.Message;
SetControlsStatus(isLoading: false); SetControlsStatus(isLoading: false);
return;
} }
TbStatus.Foreground = Brushes.Black;
var installerVersion = await GetInstallerVer();
var localVersion = GetLocalVer();
if (installerVersion > localVersion)
{
TbStatus.Text = $"Updating from {localVersion} to {installerVersion}...";
await DownloadAndRunInstaller();
TbStatus.Text = $"Installed {installerVersion}!";
}
else
TbStatus.Text = "Your version is up to date!";
Process.Start(Constants.AZAION_SUITE_EXE, $"-e {creds.Email} -p {creds.Password}");
await Task.Delay(800);
TbStatus.Text = "Loading...";
while (!Process.GetProcessesByName(Constants.INFERENCE_EXE).Any())
await Task.Delay(500);
await Task.Delay(1500);
Close();
} }
private void Validate(ApiCredentials creds) private void Validate(ApiCredentials creds)
@@ -92,12 +106,12 @@ public partial class Login
process.StartInfo = new ProcessStartInfo process.StartInfo = new ProcessStartInfo
{ {
FileName = Constants.EXTERNAL_LOADER_PATH, FileName = Constants.EXTERNAL_LOADER_PATH,
Arguments = $"--port {Constants.EXTERNAL_LOADER_PORT} --api {Constants.API_URL}", Arguments = $"--port {ConstantsLoader.EXTERNAL_LOADER_PORT} --api {Constants.DEFAULT_API_URL}",
CreateNoWindow = true CreateNoWindow = true
}; };
process.Start(); process.Start();
dealer.Options.Identity = Encoding.UTF8.GetBytes(Guid.NewGuid().ToString("N")); dealer.Options.Identity = Encoding.UTF8.GetBytes(Guid.NewGuid().ToString("N"));
dealer.Connect($"tcp://{Constants.EXTERNAL_LOADER_HOST}:{Constants.EXTERNAL_LOADER_PORT}"); dealer.Connect($"tcp://{Constants.DEFAULT_ZMQ_INFERENCE_HOST}:{ConstantsLoader.EXTERNAL_LOADER_PORT}");
var result = SendCommand(dealer, RemoteCommand.Create(CommandType.Login, creds)); var result = SendCommand(dealer, RemoteCommand.Create(CommandType.Login, creds));
if (result.CommandType != CommandType.Ok) if (result.CommandType != CommandType.Ok)
@@ -146,44 +160,17 @@ public partial class Login
} }
} }
private async Task DownloadAndRunInstaller()
{
var (installerName, stream) = await _azaionApi.DownloadInstaller(_dirConfig?.SuiteInstallerDirectory ?? "");
var localFileStream = new FileStream(installerName, FileMode.Create, FileAccess.Write);
await stream.CopyToAsync(localFileStream);
localFileStream.Close();
stream.Close();
var processInfo = new ProcessStartInfo(installerName)
{
UseShellExecute = true,
Arguments = "/VERYSILENT"
};
var process = Process.Start(processInfo);
await process!.WaitForExitAsync();
File.Delete(installerName);
}
private async Task<Version> GetInstallerVer() private async Task<Version> GetInstallerVer()
{ {
TbStatus.Text = "Checking for the newer version..."; TbStatus.Text = "Checking for the newer version...";
var installerDir = string.IsNullOrWhiteSpace(_dirConfig?.SuiteInstallerDirectory) var installerDir = string.IsNullOrWhiteSpace(_dirConfig?.SuiteInstallerDirectory)
? Constants.SUITE_FOLDER ? ConstantsLoader.SUITE_FOLDER
: _dirConfig.SuiteInstallerDirectory; : _dirConfig.SuiteInstallerDirectory;
var installerName = await _azaionApi.GetLastInstallerName(installerDir); var installerName = await _azaionApi.GetLastInstallerName(installerDir);
var version = installerName var match = Regex.Match(installerName, @"\d+(\.\d+)+");
.Replace("AzaionSuite.Iterative.", "") if (!match.Success)
.Replace(".exe", ""); throw new Exception($"Can't find version in {installerName}");
return new Version(version); return new Version(match.Value);
}
private Version GetLocalVer()
{
var localFileInfo = FileVersionInfo.GetVersionInfo(Constants.AZAION_SUITE_EXE);
if (string.IsNullOrWhiteSpace(localFileInfo.ProductVersion))
throw new Exception($"Can't find {Constants.AZAION_SUITE_EXE} and its version");
return new Version(localFileInfo.FileVersion!);
} }
private void CloseClick(object sender, RoutedEventArgs e) => Close(); private void CloseClick(object sender, RoutedEventArgs e) => Close();
+6
View File
@@ -0,0 +1,6 @@
{
"DirectoriesConfig":
{
"SuiteInstallerDirectory": "suite-stage"
}
}
+39
View File
@@ -0,0 +1,39 @@
@echo off
setlocal
REM Verify that all four arguments were provided
if "%~4"=="" (
echo Error: Missing arguments.
echo Usage: %0 ^<parent_pid^> ^<installer_path^> ^<app_path^> ^<encrypted_creds^>
exit /b 1
)
set "PARENT_PID=%1"
set "INSTALLER_PATH=%2"
set "MAIN_APP_PATH=%3"
set "CREDS=%~4"
:WAIT_FOR_PARENT_EXIT
echo Waiting for parent process (PID: %PARENT_PID%) to close...
tasklist /fi "pid eq %PARENT_PID%" | find "%PARENT_PID%" >nul
if %errorlevel% == 0 (
timeout /t 1 /nobreak >nul
goto WAIT_FOR_PARENT_EXIT
)
start "" /wait "%INSTALLER_PATH%" /VERYSILENT
del "%INSTALLER_PATH%"
echo Installed new version %INSTALLER_PATH%
start "" "%MAIN_APP_PATH%" -c "%CREDS%"
echo Loading...
:WAIT_FOR_APP_START
timeout /t 1 /nobreak >nul
tasklist /fi "imagename eq azaion-inference.exe" | find "azaion-inference.exe" >nul
if %errorlevel% neq 0 goto WAIT_FOR_APP_START
timeout /t 5 /nobreak >nul
echo Process started.
endlocal
+22 -32
View File
@@ -11,6 +11,7 @@ using Azaion.Common.DTO.Config;
using Azaion.Common.Events; using Azaion.Common.Events;
using Azaion.Common.Extensions; using Azaion.Common.Extensions;
using Azaion.Common.Services; using Azaion.Common.Services;
using Azaion.Common.Services.Inference;
using Azaion.Dataset; using Azaion.Dataset;
using CommandLine; using CommandLine;
using LibVLCSharp.Shared; using LibVLCSharp.Shared;
@@ -54,8 +55,9 @@ public partial class App
rollingInterval: RollingInterval.Day) rollingInterval: RollingInterval.Day)
.CreateLogger(); .CreateLogger();
Parser.Default.ParseArguments<ApiCredentials>(e.Args) Parser.Default.ParseArguments<ApiCredentials, ApiCredentialsEncrypted>(e.Args)
.WithParsed(Start) .WithParsed<ApiCredentials>(Start)
.WithParsed<ApiCredentialsEncrypted>(StartEncrypted)
.WithNotParsed(ErrorHandling); .WithNotParsed(ErrorHandling);
} }
@@ -70,30 +72,11 @@ public partial class App
Current.Shutdown(); Current.Shutdown();
} }
private Stream GetSystemConfig(LoaderClient loaderClient, string apiDir) private Stream GetConfig(LoaderClient loaderClient, string filename, string? apiDir)
{ {
try try
{ {
return loaderClient.LoadFile("config.system.json", apiDir); return loaderClient.LoadFile(filename, apiDir ?? "");
}
catch (Exception e)
{
Log.Logger.Error(e, e.Message);
return new MemoryStream(Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(new
{
AnnotationConfig = Constants.DefaultAnnotationConfig,
AIRecognitionConfig = Constants.DefaultAIRecognitionConfig,
GpsDeniedConfig = Constants.DefaultGpsDeniedConfig,
ThumbnailConfig = Constants.DefaultThumbnailConfig,
})));
}
}
private Stream GetSecuredConfig(LoaderClient loaderClient, string apiDir)
{
try
{
return loaderClient.LoadFile("config.secured.json", apiDir);
} }
catch (Exception e) catch (Exception e)
{ {
@@ -102,6 +85,13 @@ public partial class App
} }
} }
private void StartEncrypted(ApiCredentialsEncrypted credsEncrypted)
{
Log.Logger.Information(credsEncrypted.Creds);
Start(Security.Decrypt<ApiCredentials>(credsEncrypted.Creds));
}
private void Start(ApiCredentials credentials) private void Start(ApiCredentials credentials)
{ {
try try
@@ -109,8 +99,8 @@ public partial class App
new ConfigUpdater().CheckConfig(); new ConfigUpdater().CheckConfig();
var initConfig = Constants.ReadInitConfig(Log.Logger); var initConfig = Constants.ReadInitConfig(Log.Logger);
var apiDir = initConfig.DirectoriesConfig.ApiResourcesDirectory; var apiDir = initConfig.DirectoriesConfig.ApiResourcesDirectory;
_loaderClient = new LoaderClient(initConfig.LoaderClientConfig, Log.Logger, _mainCTokenSource.Token);
_loaderClient = new LoaderClient(initConfig.LoaderClientConfig, Log.Logger, _mainCTokenSource.Token);
_loaderClient.StartClient(); _loaderClient.StartClient();
_loaderClient.Connect(); _loaderClient.Connect();
_loaderClient.Login(credentials); _loaderClient.Login(credentials);
@@ -121,8 +111,8 @@ public partial class App
.ConfigureAppConfiguration((_, config) => config .ConfigureAppConfiguration((_, config) => config
.AddCommandLine(Environment.GetCommandLineArgs()) .AddCommandLine(Environment.GetCommandLineArgs())
.AddJsonFile(Constants.CONFIG_PATH, optional: true, reloadOnChange: true) .AddJsonFile(Constants.CONFIG_PATH, optional: true, reloadOnChange: true)
.AddJsonStream(GetSystemConfig(_loaderClient, apiDir)) .AddJsonStream(GetConfig(_loaderClient, "config.system.json", apiDir))
.AddJsonStream(GetSecuredConfig(_loaderClient, apiDir))) .AddJsonStream(GetConfig(_loaderClient, "config.secured.json", apiDir)))
.UseSerilog() .UseSerilog()
.ConfigureServices((context, services) => .ConfigureServices((context, services) =>
{ {
@@ -164,12 +154,12 @@ public partial class App
typeof(Annotator.Annotator).Assembly, typeof(Annotator.Annotator).Assembly,
typeof(DatasetExplorer).Assembly, typeof(DatasetExplorer).Assembly,
typeof(AnnotationService).Assembly)); typeof(AnnotationService).Assembly));
services.AddSingleton<LibVLC>(_ => new LibVLC()); services.AddSingleton<LibVLC>(_ => new LibVLC("--no-osd", "--no-video-title-show", "--no-snapshot-preview"));
services.AddSingleton<FormState>(); services.AddSingleton<FormState>();
services.AddSingleton<MediaPlayer>(sp => services.AddSingleton<MediaPlayer>(sp =>
{ {
var libVLC = sp.GetRequiredService<LibVLC>(); var libVlc = sp.GetRequiredService<LibVLC>();
return new MediaPlayer(libVLC); return new MediaPlayer(libVlc);
}); });
services.AddSingleton<AnnotatorEventHandler>(); services.AddSingleton<AnnotatorEventHandler>();
services.AddSingleton<IDbFactory, DbFactory>(); services.AddSingleton<IDbFactory, DbFactory>();
@@ -186,10 +176,10 @@ public partial class App
}) })
.Build(); .Build();
Annotation.InitializeDirs(_host.Services.GetRequiredService<IOptions<DirectoriesConfig>>().Value); Annotation.Init(_host.Services.GetRequiredService<IOptions<DirectoriesConfig>>().Value,
_host.Services.GetRequiredService<IOptions<AnnotationConfig>>().Value.DetectionClassesDict);
_host.Services.GetRequiredService<DatasetExplorer>(); _host.Services.GetRequiredService<DatasetExplorer>();
// datasetExplorer.Show();
// datasetExplorer.Hide();
_mediator = _host.Services.GetRequiredService<IMediator>(); _mediator = _host.Services.GetRequiredService<IMediator>();
+1 -1
View File
@@ -6,8 +6,8 @@ using System.Windows.Media;
using Azaion.Common.Database; using Azaion.Common.Database;
using Azaion.Common.DTO; using Azaion.Common.DTO;
using Azaion.Common.DTO.Config; using Azaion.Common.DTO.Config;
using Azaion.Common.Extensions;
using Azaion.Common.Services; using Azaion.Common.Services;
using Azaion.Common.Services.Inference;
using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options; using Microsoft.Extensions.Options;
using SharpVectors.Converters; using SharpVectors.Converters;
+2 -2
View File
@@ -1,12 +1,12 @@
{ {
"LoaderClientConfig": { "LoaderClientConfig": {
"ZeroMqHost": "127.0.0.1", "ZeroMqHost": "127.0.0.1",
"ZeroMqPort": 5024, "ZeroMqPort": 5025,
"ApiUrl": "https://api.azaion.com" "ApiUrl": "https://api.azaion.com"
}, },
"InferenceClientConfig": { "InferenceClientConfig": {
"ZeroMqHost": "127.0.0.1", "ZeroMqHost": "127.0.0.1",
"ZeroMqPort": 5126, "ZeroMqPort": 5127,
"ApiUrl": "https://api.azaion.com" "ApiUrl": "https://api.azaion.com"
}, },
"GpsDeniedClientConfig": { "GpsDeniedClientConfig": {
+4 -3
View File
@@ -29,13 +29,14 @@
"ProbabilityThreshold": 0.25, "ProbabilityThreshold": 0.25,
"TrackingDistanceConfidence": 0.15, "TrackingDistanceConfidence": 0.15,
"TrackingProbabilityIncrease": 15.0, "TrackingProbabilityIncrease": 0.15,
"TrackingIntersectionThreshold": 0.8, "TrackingIntersectionThreshold": 0.6,
"BigImageTileOverlapPercent": 20,
"ModelBatchSize": 4 "ModelBatchSize": 4
}, },
"GpsDeniedConfig": { "GpsDeniedConfig": {
"MinKeyPoints": 12 "MinKeyPoints": 11
}, },
"ThumbnailConfig": { "Size": "240,135", "Border": 10 } "ThumbnailConfig": { "Size": "240,135", "Border": 10 }
} }
+263
View File
@@ -0,0 +1,263 @@
using System.Windows;
using Azaion.Common;
using Azaion.Common.DTO;
using Azaion.Common.Services;
using Xunit;
namespace Azaion.Annotator.Test;
public class TileProcessorTest
{
private const int IMAGE_SIZE = 5000;
[Fact]
public void Split_DetectionsNearImageCorners_ShouldCreateFourTiles()
{
// Arrange
var originalSize = new Size(IMAGE_SIZE, IMAGE_SIZE);
var detections = new List<CanvasLabel>
{
new(10, 60, 10, 60), // Top-left corner
new(IMAGE_SIZE - 60, IMAGE_SIZE - 10, 10, 60), // Top-right corner
new(10, 60, IMAGE_SIZE - 60, IMAGE_SIZE - 10), // Bottom-left corner
new(IMAGE_SIZE - 60, IMAGE_SIZE - 10, IMAGE_SIZE - 60, IMAGE_SIZE - 10) // Bottom-right corner
};
// Act
var results = TileProcessor.Split(originalSize, detections, CancellationToken.None);
// Assert
Assert.Equal(4, results.Count);
}
[Fact]
public void Split_DetectionsFarApartButFitInOneTile_ShouldCreateOneTile()
{
// Arrange
var originalSize = new Size(IMAGE_SIZE, IMAGE_SIZE);
var detections = new List<CanvasLabel>
{
new(100, 150, 100, 150),
new(1200, 1250, 1200, 1250)
};
// Act
var results = TileProcessor.Split(originalSize, detections, CancellationToken.None);
// Assert
Assert.Single(results);
Assert.Equal(2, results[0].Detections.Count);
}
[Fact]
public void Split_DetectionsTooFarApart_ShouldCreateMultipleTiles()
{
// Arrange
var originalSize = new Size(IMAGE_SIZE, IMAGE_SIZE);
var detections = new List<CanvasLabel>
{
new(100, 150, 100, 150),
new(2000, 2050, 2000, 2050) // More than Constants.AI_TILE_SIZE away
};
// Act
var results = TileProcessor.Split(originalSize, detections, CancellationToken.None);
// Assert
Assert.Equal(2, results.Count);
Assert.Contains(results, r => r.Detections.Count == 1 && r.Detections.Contains(detections[0]));
Assert.Contains(results, r => r.Detections.Count == 1 && r.Detections.Contains(detections[1]));
}
[Fact]
public void Split_ComplexScenario_ShouldCreateCorrectNumberOfTiles()
{
// Arrange
var originalSize = new Size(IMAGE_SIZE, IMAGE_SIZE);
var detections = new List<CanvasLabel>
{
// Group 1 (should be tiled together)
new(100, 150, 100, 150),
new(200, 250, 200, 250),
new(500, 550, 500, 550),
// Group 2 (far from group 1, should be in a separate tile)
new(3000, 3050, 3000, 3050),
new(3100, 3150, 3100, 3150),
};
// Act
var results = TileProcessor.Split(originalSize, detections, CancellationToken.None);
// Assert
Assert.Equal(2, results.Count);
var group1Tile = results.FirstOrDefault(r => r.Detections.Count == 3);
var group2Tile = results.FirstOrDefault(r => r.Detections.Count == 2);
Assert.NotNull(group1Tile);
Assert.NotNull(group2Tile);
Assert.Contains(detections[0], group1Tile.Detections);
Assert.Contains(detections[1], group1Tile.Detections);
Assert.Contains(detections[2], group1Tile.Detections);
Assert.Contains(detections[3], group2Tile.Detections);
Assert.Contains(detections[4], group2Tile.Detections);
}
[Fact]
public void Split_NoDetections_ShouldReturnEmptyList()
{
// Arrange
var originalSize = new Size(IMAGE_SIZE, IMAGE_SIZE);
var detections = new List<CanvasLabel>();
// Act
var results = TileProcessor.Split(originalSize, detections, CancellationToken.None);
// Assert
Assert.Empty(results);
}
[Fact]
public void Split_OneDetection_ShouldCreateOneTile()
{
// Arrange
var originalSize = new Size(IMAGE_SIZE, IMAGE_SIZE);
var detections = new List<CanvasLabel> { new(100, 150, 100, 150) };
// Act
var results = TileProcessor.Split(originalSize, detections, CancellationToken.None);
// Assert
Assert.Single(results);
Assert.Single(results[0].Detections);
Assert.Equal(detections[0], results[0].Detections[0]);
}
[Fact]
public void Split_DetectionsOnTileBoundary_ShouldFitInOneTile()
{
// Arrange
var originalSize = new Size(IMAGE_SIZE, IMAGE_SIZE);
// Combined width is 1270. 1270 + BORDER (10) is not > Constants.AI_TILE_SIZE (1280), so they fit.
var detections = new List<CanvasLabel>
{
new(0, 50, 0, 50),
new(Constants.AI_TILE_SIZE - TileProcessor.BORDER - 50, Constants.AI_TILE_SIZE - TileProcessor.BORDER, 0, 50)
};
// Act
var results = TileProcessor.Split(originalSize, detections, CancellationToken.None);
// Assert
Assert.Single(results);
Assert.Equal(2, results[0].Detections.Count);
}
[Fact]
public void Split_DetectionsJustOverTileBoundary_ShouldCreateTwoTiles()
{
// Arrange
var originalSize = new Size(IMAGE_SIZE, IMAGE_SIZE);
// Combined width is 1271. 1271 + BORDER (10) is > Constants.AI_TILE_SIZE (1280), so they don't fit.
var detections = new List<CanvasLabel>
{
new(0, 50, 1000, 1050), // Top-most
new(Constants.AI_TILE_SIZE - TileProcessor.BORDER - 49, Constants.AI_TILE_SIZE - TileProcessor.BORDER + 1, 0, 50)
};
// Act
var results = TileProcessor.Split(originalSize, detections, CancellationToken.None);
// Assert
Assert.Equal(2, results.Count);
}
[Fact]
public void Split_ResultingTiles_ShouldBeWithinImageBoundaries()
{
// Arrange
var originalSize = new Size(IMAGE_SIZE, IMAGE_SIZE);
var detections = new List<CanvasLabel>
{
new(10, 60, 10, 60), // Top-left corner
new(IMAGE_SIZE - 60, IMAGE_SIZE - 10, IMAGE_SIZE - 60, IMAGE_SIZE - 10) // Bottom-right corner
};
// Act
var results = TileProcessor.Split(originalSize, detections, CancellationToken.None);
// Assert
Assert.Equal(2, results.Count);
foreach (var result in results)
{
var tile = result.Tile;
Assert.True(tile.Left >= 0, $"Tile Left boundary {tile.Left} is out of bounds.");
Assert.True(tile.Top >= 0, $"Tile Top boundary {tile.Top} is out of bounds.");
Assert.True(tile.Right <= originalSize.Width, $"Tile Right boundary {tile.Right} is out of bounds.");
Assert.True(tile.Bottom <= originalSize.Height, $"Tile Bottom boundary {tile.Bottom} is out of bounds.");
}
}
[Fact]
public void Split_ChainedDetections_ShouldCreateOneTile()
{
// Arrange
var originalSize = new Size(IMAGE_SIZE, IMAGE_SIZE);
var detections = new List<CanvasLabel>
{
new(100, 200, 100, 200), // Detection A
new(600, 700, 600, 700), // Detection B (close to A)
new(1100, 1200, 1100, 1200) // Detection C (close to B, but far from A)
};
// Act
var results = TileProcessor.Split(originalSize, detections, CancellationToken.None);
// Assert
Assert.Single(results);
Assert.Equal(3, results[0].Detections.Count);
}
[Fact]
public void Split_SingleDetectionLargerThanTileSize_ShouldCreateOneTile()
{
// Arrange
var originalSize = new Size(IMAGE_SIZE, IMAGE_SIZE);
var largeDetection = new CanvasLabel(100, 100 + Constants.AI_TILE_SIZE + 100, 100, 200);
var detections = new List<CanvasLabel> { largeDetection };
// Act
var results = TileProcessor.Split(originalSize, detections, CancellationToken.None);
// Assert
Assert.Single(results);
var resultTile = results[0];
Assert.Single(resultTile.Detections);
Assert.Equal(largeDetection, resultTile.Detections[0]);
// The tile should be at least as large as the detection it contains.
Assert.True(resultTile.Tile.Width >= largeDetection.Width);
Assert.True(resultTile.Tile.Height >= largeDetection.Height);
}
[Fact]
public void Split_LargeDetectionWithNearbySmallDetection_ShouldCreateOneTile()
{
// Arrange
var originalSize = new Size(IMAGE_SIZE, IMAGE_SIZE);
var largeTallDetection = new CanvasLabel(100, 150, 100, 100 + Constants.AI_TILE_SIZE + 200);
var smallDetectionNearby = new CanvasLabel(largeTallDetection.Right + 15, largeTallDetection.Right + 35, 700, 720);
var detections = new List<CanvasLabel> { largeTallDetection, smallDetectionNearby };
// Act
var results = TileProcessor.Split(originalSize, detections, CancellationToken.None);
// Assert
Assert.Single(results);
Assert.Equal(2, results[0].Detections.Count);
Assert.Contains(largeTallDetection, results[0].Detections);
Assert.Contains(smallDetectionNearby, results[0].Detections);
}
}
+2 -2
View File
@@ -13,8 +13,8 @@ del dist\config.json
robocopy "dist" "dist-azaion" "Azaion.Annotator.dll" "Azaion.Dataset.dll" "Azaion.Common.dll" "Azaion.CommonSecurity.dll" /MOV robocopy "dist" "dist-azaion" "Azaion.Annotator.dll" "Azaion.Dataset.dll" "Azaion.Common.dll" "Azaion.CommonSecurity.dll" /MOV
robocopy "dist" "dist-azaion" "Azaion.Suite.dll" "Azaion.Suite.exe" "Azaion.Suite.runtimeconfig.json" "Azaion.Suite.deps.json" "logo.png" /MOV robocopy "dist" "dist-azaion" "Azaion.Suite.dll" "Azaion.Suite.exe" "Azaion.Suite.runtimeconfig.json" "Azaion.Suite.deps.json" "logo.png" /MOV
robocopy "Azaion.LoaderUI\bin\Release\net8.0-windows\win-x64\publish" "dist-dlls" "Azaion.LoaderUI.dll" "Azaion.LoaderUI.exe" "Azaion.LoaderUI.runtimeconfig.json" ^ robocopy "Azaion.LoaderUI\bin\Release\net8.0-windows\win-x64\publish" "dist-azaion" "Azaion.LoaderUI.dll" "Azaion.LoaderUI.exe" "Azaion.LoaderUI.runtimeconfig.json" "Azaion.LoaderUI.deps.json" "loaderconfig.json" /MOV
"Azaion.LoaderUI.deps.json" "loaderconfig.json" robocopy "Azaion.LoaderUI\bin\Release\net8.0-windows\win-x64\publish" "dist-dlls" "updater.cmd" /MOV
move dist\config.production.json dist-azaion\config_updated.json move dist\config.production.json dist-azaion\config_updated.json
+2 -2
View File
@@ -19,8 +19,8 @@ echo building and upload iterative installer...
iscc build\installer.iterative.iss iscc build\installer.iterative.iss
call build\upload.cmd "suite-dev" call build\upload.cmd "suite-dev"
echo building full installer @rem echo building full installer
iscc build\installer.full.iss @rem iscc build\installer.full.iss
cd /d %CURRENT_DIR% cd /d %CURRENT_DIR%
echo Done! echo Done!