Skip to content

Commit

Permalink
BUGFIX: FPS Changes in Video Info Dialog were not reflected in Data A…
Browse files Browse the repository at this point in the history
…cquisition.

BETTER: Video Playback on very fast and very slow fps videos stays at correct timing and does not have deadlocks.
Left to solve: Some videos (e.g. interlaced mov) did not show correct timeline calibration.
BETTER: No arbitrary motion detection on startup with loaded movie.
  • Loading branch information
avosskuehler committed Dec 16, 2021
1 parent a77b309 commit f630ab8
Show file tree
Hide file tree
Showing 8 changed files with 214 additions and 157 deletions.
153 changes: 79 additions & 74 deletions VianaNET/Data/ProcessingData.cs
Original file line number Diff line number Diff line change
Expand Up @@ -274,6 +274,8 @@ public ProcessingData()
this.PositiveContrast.CollectionChanged += this.MotionDetectionParameterCollectionChanged;

this.PropertyChanged += this.ProcessingDataPropertyChanged;

this.IsDetectionActivated = this.IsUsingMotionDetection || (this.IsUsingColorDetection && this.IsTargetColorSet);
}


Expand Down Expand Up @@ -557,94 +559,97 @@ public bool ProcessImage()
this.watch.Start();
long start = this.watch.ElapsedMilliseconds;

for (int i = 0; i < App.Project.ProcessingData.NumberOfTrackedObjects; i++)
if (App.Project.ProcessingData.IsDetectionActivated)
{
// Console.Write("BeforeColorFilte ");
// Console.WriteLine(watch.ElapsedMilliseconds.ToString());
if (this.ColorThreshold.Count <= i || this.BlobMinDiameter.Count <= i
|| this.BlobMaxDiameter.Count <= i || this.CurrentBlobCenter.Count <= i)
for (int i = 0; i < App.Project.ProcessingData.NumberOfTrackedObjects; i++)
{
break;
}

// Get original picture
Video.Instance.RefreshProcessingMap();
// Console.Write("BeforeColorFilte ");
// Console.WriteLine(watch.ElapsedMilliseconds.ToString());
if (this.ColorThreshold.Count <= i || this.BlobMinDiameter.Count <= i
|| this.BlobMaxDiameter.Count <= i || this.CurrentBlobCenter.Count <= i)
{
break;
}

if (this.IsUsingColorDetection)
{
// Apply color and crop filter if applicable
this.colorAndCropFilter.TargetColor = this.TargetColor.Count > i ? this.TargetColor[i] : Colors.Black;
this.colorAndCropFilter.Threshold = this.ColorThreshold[i];
this.colorAndCropFilter.ProcessInPlace(Video.Instance.VideoElement.ColorProcessingMapping);
}
else
{
// Only apply crop filter
this.cropFilter.ProcessInPlace(Video.Instance.VideoElement.ColorProcessingMapping);
}
// Get original picture
Video.Instance.RefreshProcessingMap();

// Apply motion detection if applicable
if (this.IsUsingMotionDetection)
{
if (this.detector.MotionDetectionAlgorithm is TwoFramesDifferenceDetectorSpecial algorithm)
if (this.IsUsingColorDetection)
{
// Apply color and crop filter if applicable
this.colorAndCropFilter.TargetColor = this.TargetColor.Count > i ? this.TargetColor[i] : Colors.Black;
this.colorAndCropFilter.Threshold = this.ColorThreshold[i];
this.colorAndCropFilter.ProcessInPlace(Video.Instance.VideoElement.ColorProcessingMapping);
}
else
{
algorithm.DifferenceThreshold = App.Project.ProcessingData.MotionThreshold[i];
algorithm.IsPositiveThreshold = App.Project.ProcessingData.PositiveContrast[i];
algorithm.SuppressNoise = App.Project.ProcessingData.SuppressNoise[i];
// Only apply crop filter
this.cropFilter.ProcessInPlace(Video.Instance.VideoElement.ColorProcessingMapping);
}

Video.Instance.VideoElement.CopyProcessingMapToUnmanagedImage();
this.detector.ProcessFrame(Video.Instance.VideoElement.UnmanagedImage);
Video.Instance.VideoElement.CopyProcessedDataToProcessingMap();
}
// Apply motion detection if applicable
if (this.IsUsingMotionDetection)
{
if (this.detector.MotionDetectionAlgorithm is TwoFramesDifferenceDetectorSpecial algorithm)
{
algorithm.DifferenceThreshold = App.Project.ProcessingData.MotionThreshold[i];
algorithm.IsPositiveThreshold = App.Project.ProcessingData.PositiveContrast[i];
algorithm.SuppressNoise = App.Project.ProcessingData.SuppressNoise[i];
}

Video.Instance.VideoElement.CopyProcessingMapToUnmanagedImage();
this.detector.ProcessFrame(Video.Instance.VideoElement.UnmanagedImage);
Video.Instance.VideoElement.CopyProcessedDataToProcessingMap();
}

// Send modified image to blobs control
Video.Instance.VideoElement.UpdateProcessedImageSource();
// Send modified image to blobs control
Video.Instance.VideoElement.UpdateProcessedImageSource();

// Get blobs from filtered process
IntPtr mapToUse;
if (this.IsUsingColorDetection && !this.IsUsingMotionDetection)
{
mapToUse = Video.Instance.VideoElement.ColorProcessingMapping;
}
else
{
mapToUse = Video.Instance.VideoElement.MotionProcessingMapping;
}
// Get blobs from filtered process
IntPtr mapToUse;
if (this.IsUsingColorDetection && !this.IsUsingMotionDetection)
{
mapToUse = Video.Instance.VideoElement.ColorProcessingMapping;
}
else
{
mapToUse = Video.Instance.VideoElement.MotionProcessingMapping;
}

Histogram histogram = this.histogrammFilter.FromIntPtrMap(mapToUse);
this.segmentator.Histogram = histogram;
this.segmentator.ThresholdLuminance = histogram.Max * 0.5f;
this.segmentator.MinDiameter = this.BlobMinDiameter[i];
this.segmentator.MaxDiameter = this.BlobMaxDiameter[i];
Histogram histogram = this.histogrammFilter.FromIntPtrMap(mapToUse);
this.segmentator.Histogram = histogram;
this.segmentator.ThresholdLuminance = histogram.Max * 0.5f;
this.segmentator.MinDiameter = this.BlobMinDiameter[i];
this.segmentator.MaxDiameter = this.BlobMaxDiameter[i];

Segment foundSegment = this.segmentator.Process();
while (this.DetectedBlob.Count <= i)
{
this.DetectedBlob.Add(new Segment());
}
Segment foundSegment = this.segmentator.Process();
while (this.DetectedBlob.Count <= i)
{
this.DetectedBlob.Add(new Segment());
}

this.DetectedBlob[i] = foundSegment;
this.DetectedBlob[i] = foundSegment;

// Console.Write("AfterBlobDetection: ");
// Console.WriteLine(watch.ElapsedMilliseconds.ToString());
if (foundSegment.Diagonal != 0 && (foundSegment.Height < (this.colorAndCropFilter.ImageHeight - 10))
&& (foundSegment.Width < (this.colorAndCropFilter.ImageWidth - 10)))
{
this.CurrentBlobCenter[i] = new Point(foundSegment.Center.X, foundSegment.Center.Y);
objectsFound = true;
}
else
{
this.CurrentBlobCenter[i] = null;
}
// Console.Write("AfterBlobDetection: ");
// Console.WriteLine(watch.ElapsedMilliseconds.ToString());
if (foundSegment.Diagonal != 0 && (foundSegment.Height < (this.colorAndCropFilter.ImageHeight - 10))
&& (foundSegment.Width < (this.colorAndCropFilter.ImageWidth - 10)))
{
this.CurrentBlobCenter[i] = new Point(foundSegment.Center.X, foundSegment.Center.Y);
objectsFound = true;
}
else
{
this.CurrentBlobCenter[i] = null;
}

if (Video.Instance.IsDataAcquisitionRunning)
{
if (this.CurrentBlobCenter[i].HasValue)
if (Video.Instance.IsDataAcquisitionRunning)
{
var flippedPoint = new Point(this.CurrentBlobCenter[i].Value.X, Video.Instance.VideoElement.NaturalVideoHeight - this.CurrentBlobCenter[i].Value.Y);
App.Project.VideoData.AddPoint(i, flippedPoint);
if (this.CurrentBlobCenter[i].HasValue)
{
var flippedPoint = new Point(this.CurrentBlobCenter[i].Value.X, Video.Instance.VideoElement.NaturalVideoHeight - this.CurrentBlobCenter[i].Value.Y);
App.Project.VideoData.AddPoint(i, flippedPoint);
}
}
}
}
Expand Down Expand Up @@ -810,7 +815,7 @@ private void ProcessingDataPropertyChanged(object sender, PropertyChangedEventAr
}
else if (e.PropertyName == "IsUsingMotionDetection" || e.PropertyName == "IsUsingColorDetection" || e.PropertyName == "IsTargetColorSet")
{
this.IsDetectionActivated = this.IsUsingMotionDetection || this.IsUsingColorDetection;
this.IsDetectionActivated = this.IsUsingMotionDetection || (this.IsUsingColorDetection && this.IsTargetColorSet);
this.detector.Reset();
Video.Instance.RefreshProcessingMap();
Video.Instance.VideoElement.CopyProcessingMapToUnmanagedImage();
Expand Down
134 changes: 80 additions & 54 deletions VianaNET/Modules/Video/Control/VideoBase.cs
Original file line number Diff line number Diff line change
Expand Up @@ -740,95 +740,121 @@ private void Worker_DoWork(object sender, DoWorkEventArgs e)
{
BackgroundWorker worker = (BackgroundWorker)sender;
Stopwatch watch = new Stopwatch();
//Stopwatch fpswatch = new Stopwatch();
watch.Start();
//fpswatch.Start();

double frametimeInMS = 41;
double framerateFactor = 1;
double selectionEnd = 1;
this.Dispatcher.Invoke(() =>
{
frametimeInMS = this.FrameTimeInMS;
framerateFactor = App.Project.VideoData.FramerateFactor;
selectionEnd = App.Project.VideoData.SelectionEnd;
});

long starttime = 0;
Mat frameMat = new Mat();
while (!worker.CancellationPending)
using (Mat frameMat = new Mat())
{
var currentPosInMS = this.OpenCVObject.Get(VideoCaptureProperties.PosMsec);

bool endreached = false;
this.Dispatcher.Invoke(() =>
while (!worker.CancellationPending)
{
if (currentPosInMS > App.Project.VideoData.SelectionEnd)
var currentPosInMS = this.OpenCVObject.Get(VideoCaptureProperties.PosMsec);
//var currentPosInMS = this.OpenCVObject.Get(VideoCaptureProperties.PosFrames) * frametimeInMS;

var scaledCurrentPosInMS = currentPosInMS * framerateFactor;

if (scaledCurrentPosInMS >= selectionEnd)
{
endreached = true;
break;
}
});

if (endreached)
{
break;
}

//using (Mat frameMat = new Mat())
{
this.OpenCVObject.Read(frameMat);
if (frameMat.Empty())
if (Video.Instance.VideoMode == VideoMode.File)
{
this.Dispatcher.Invoke(() =>
// Get the time it took to process the last frame
var total = watch.ElapsedMilliseconds;

if (total < frametimeInMS)
{
this.Stop();
if (Video.Instance.VideoMode == VideoMode.File)
// Processing time is shorter that default frametime, so wait till frametime is over, to have correct FPS
var wait = new TimeSpan((long)((frametimeInMS - total) * 10000));
Thread.Sleep(wait);
//Console.WriteLine("In Time, wait: {0}", wait.Milliseconds);
}
else
{
// Processing has taken more time, than a frame should last, so skip the next frame to be in time again.
var skipCount = Math.Floor(total / frametimeInMS);
//Console.WriteLine("Skip: {0}", skipCount);
for (int i = 0; i < skipCount; i++)
{
var lastFrameIndex = this.OpenCVObject.Get(VideoCaptureProperties.FrameCount);
this.OpenCVObject.Set(VideoCaptureProperties.PosFrames, lastFrameIndex);
Video.Instance.VideoPlayerElement.RaiseFileComplete();
this.OpenCVObject.Grab();
this.GrabCurrentFrame();
this.frameCounter++;
}
});

break;
watch.Restart();
continue;
}

watch.Restart();
}

if (this.rotation.HasValue)
// Output FPS
//Console.WriteLine("FPS: {0}", 1f / fpswatch.ElapsedMilliseconds * 1000);
//fpswatch.Restart();

// Bildverarbeitung grab, retreive, analyze, send to processing chain
{
using (Mat rotMat = new Mat())
this.OpenCVObject.Read(frameMat);
if (frameMat.Empty())
{
Cv2.Rotate(frameMat, rotMat, this.rotation.Value);
// Letztes Bild erreicht
this.Dispatcher.Invoke(() =>
{
this.Stop();
if (Video.Instance.VideoMode == VideoMode.File)
{
var lastFrameIndex = this.OpenCVObject.Get(VideoCaptureProperties.FrameCount);
this.OpenCVObject.Set(VideoCaptureProperties.PosFrames, lastFrameIndex);
Video.Instance.VideoPlayerElement.RaiseFileComplete();
this.OpenCVObject.Grab();
this.GrabCurrentFrame();
}
});

break;
}

if (this.rotation.HasValue)
{
using (Mat rotMat = new Mat())
{
Cv2.Rotate(frameMat, rotMat, this.rotation.Value);

// Must create and use WriteableBitmap in the same thread(UI Thread).
this.Dispatcher.Invoke(() =>
{
WriteableBitmap newFrame = rotMat.ToWriteableBitmap();
Video.Instance.OriginalImageSource = newFrame;
Video.Instance.VideoElement.NewFrameCallback(newFrame);
});
}
}
else
{
// Must create and use WriteableBitmap in the same thread(UI Thread).
this.Dispatcher.Invoke(() =>
{
WriteableBitmap newFrame = rotMat.ToWriteableBitmap();
WriteableBitmap newFrame = frameMat.ToWriteableBitmap();
Video.Instance.OriginalImageSource = newFrame;
Video.Instance.VideoElement.NewFrameCallback(newFrame);
});
}
}
else
{
// Must create and use WriteableBitmap in the same thread(UI Thread).
this.Dispatcher.Invoke(() =>
{
WriteableBitmap newFrame = frameMat.ToWriteableBitmap();
Video.Instance.OriginalImageSource = newFrame;
Video.Instance.VideoElement.NewFrameCallback(newFrame);
});
}
}

if (Video.Instance.VideoMode == VideoMode.File)
{
while (watch.ElapsedMilliseconds - starttime < frametimeInMS)
{
Thread.Sleep(1);
}
}

starttime = watch.ElapsedMilliseconds;
GC.Collect();
GC.Collect();
}
}

frameMat.Dispose();
}

/// <summary>
Expand Down
10 changes: 8 additions & 2 deletions VianaNET/Modules/Video/Control/VideoPlayer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -89,13 +89,18 @@ public override double MediaPositionInMS
{
get
{
// PosMsec in OpenCV is not reliable, https://github.com/opencv/opencv/issues/9053#issuecomment-745635554
var pos = this.OpenCVObject.Get(VideoCaptureProperties.PosMsec);
return pos;
return pos * App.Project.VideoData.FramerateFactor;

//var pos = this.OpenCVObject.Get(VideoCaptureProperties.PosFrames);
//return pos * this.FrameTimeInMS * App.Project.VideoData.FramerateFactor;
}

set
{
this.OpenCVObject.Set(VideoCaptureProperties.PosMsec, value);
this.OpenCVObject.Set(VideoCaptureProperties.PosMsec, value / App.Project.VideoData.FramerateFactor);
//this.OpenCVObject.Set(VideoCaptureProperties.PosFrames, value / this.FrameTimeInMS / App.Project.VideoData.FramerateFactor);
this.OpenCVObject.Grab();
this.GrabCurrentFrame();
this.UpdateFrameIndex();
Expand Down Expand Up @@ -299,6 +304,7 @@ public override void Revert()

// Seek to the beginning
this.OpenCVObject.Set(VideoCaptureProperties.PosMsec, zeroPosition);
//this.OpenCVObject.Set(VideoCaptureProperties.PosFrames, zeroPosition / FrameTimeInMS);
this.OpenCVObject.Grab();
this.GrabCurrentFrame();
this.UpdateFrameIndex();
Expand Down
Loading

0 comments on commit f630ab8

Please sign in to comment.