-
Notifications
You must be signed in to change notification settings - Fork 300
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[WIP] Use vt for manually decoding frames. Fixes #533 #535
base: master
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -6,6 +6,8 @@ | |
// Copyright (c) 2014 Moonlight Stream. All rights reserved. | ||
// | ||
|
||
@import VideoToolbox; | ||
|
||
#import "VideoDecoderRenderer.h" | ||
#import "StreamView.h" | ||
|
||
|
@@ -23,6 +25,8 @@ @implementation VideoDecoderRenderer { | |
|
||
NSData *spsData, *ppsData, *vpsData; | ||
CMVideoFormatDescriptionRef formatDesc; | ||
CMVideoFormatDescriptionRef formatDescImageBuffer; | ||
VTDecompressionSessionRef decompressionSession; | ||
|
||
CADisplayLink* _displayLink; | ||
BOOL framePacing; | ||
|
@@ -74,6 +78,17 @@ - (void)reinitializeDisplayLayer | |
CFRelease(formatDesc); | ||
formatDesc = nil; | ||
} | ||
|
||
if (formatDescImageBuffer != nil) { | ||
CFRelease(formatDescImageBuffer); | ||
formatDescImageBuffer = nil; | ||
} | ||
|
||
if (decompressionSession != nil){ | ||
VTDecompressionSessionInvalidate(decompressionSession); | ||
CFRelease(decompressionSession); | ||
decompressionSession = nil; | ||
} | ||
} | ||
|
||
- (id)initWithView:(StreamView*)view callbacks:(id<ConnectionCallbacks>)callbacks streamAspectRatio:(float)aspectRatio useFramePacing:(BOOL)useFramePacing | ||
|
@@ -94,10 +109,7 @@ - (void)setupWithVideoFormat:(int)videoFormat frameRate:(int)frameRate | |
{ | ||
self->videoFormat = videoFormat; | ||
self->frameRate = frameRate; | ||
} | ||
|
||
- (void)start | ||
{ | ||
|
||
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)]; | ||
if (@available(iOS 15.0, tvOS 15.0, *)) { | ||
_displayLink.preferredFrameRateRange = CAFrameRateRangeMake(self->frameRate, self->frameRate, self->frameRate); | ||
|
@@ -106,38 +118,37 @@ - (void)start | |
_displayLink.preferredFramesPerSecond = self->frameRate; | ||
} | ||
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSDefaultRunLoopMode]; | ||
|
||
} | ||
|
||
- (void) setupDecompressionSession { | ||
if (decompressionSession != NULL){ | ||
VTDecompressionSessionInvalidate(decompressionSession); | ||
CFRelease(decompressionSession); | ||
decompressionSession = nil; | ||
} | ||
|
||
int status = VTDecompressionSessionCreate(kCFAllocatorDefault, | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. For initialize VTDecompressionSession need set parameters.
|
||
formatDesc, | ||
nil, | ||
nil, | ||
nil, | ||
&decompressionSession); | ||
if (status != noErr) { | ||
Log(LOG_E, @"Failed to instance VTDecompressionSessionRef, status %d", status); | ||
} | ||
|
||
} | ||
|
||
// TODO: Refactor this | ||
int DrSubmitDecodeUnit(PDECODE_UNIT decodeUnit); | ||
|
||
- (void)displayLinkCallback:(CADisplayLink *)sender | ||
{ | ||
VIDEO_FRAME_HANDLE handle; | ||
PDECODE_UNIT du; | ||
|
||
while (LiPollNextVideoFrame(&handle, &du)) { | ||
LiCompleteVideoFrame(handle, DrSubmitDecodeUnit(du)); | ||
|
||
if (framePacing) { | ||
// Calculate the actual display refresh rate | ||
double displayRefreshRate = 1 / (_displayLink.targetTimestamp - _displayLink.timestamp); | ||
|
||
// Only pace frames if the display refresh rate is >= 90% of our stream frame rate. | ||
// Battery saver, accessibility settings, or device thermals can cause the actual | ||
// refresh rate of the display to drop below the physical maximum. | ||
if (displayRefreshRate >= frameRate * 0.9f) { | ||
// Keep one pending frame to smooth out gaps due to | ||
// network jitter at the cost of 1 frame of latency | ||
if (LiGetPendingVideoFrames() == 1) { | ||
break; | ||
} | ||
} | ||
} | ||
} | ||
// Do Nothing | ||
} | ||
|
||
- (void)stop | ||
- (void)cleanup | ||
{ | ||
[_displayLink invalidate]; | ||
} | ||
|
@@ -262,6 +273,8 @@ - (int)submitDecodeBuffer:(unsigned char *)data length:(int)length bufferType:(i | |
formatDesc = NULL; | ||
} | ||
} | ||
|
||
[self setupDecompressionSession]; | ||
} | ||
|
||
// Data is NOT to be freed here. It's a direct usage of the caller's buffer. | ||
|
@@ -330,44 +343,25 @@ - (int)submitDecodeBuffer:(unsigned char *)data length:(int)length bufferType:(i | |
|
||
CMSampleBufferRef sampleBuffer; | ||
|
||
status = CMSampleBufferCreate(kCFAllocatorDefault, | ||
CMSampleTimingInfo sampleTiming = {kCMTimeInvalid, CMTimeMake(pts, 1000), kCMTimeInvalid}; | ||
|
||
status = CMSampleBufferCreateReady(kCFAllocatorDefault, | ||
frameBlockBuffer, | ||
true, NULL, | ||
NULL, formatDesc, 1, 0, | ||
NULL, 0, NULL, | ||
formatDesc, 1, 1, | ||
&sampleTiming, 0, NULL, | ||
&sampleBuffer); | ||
if (status != noErr) { | ||
Log(LOG_E, @"CMSampleBufferCreate failed: %d", (int)status); | ||
CFRelease(dataBlockBuffer); | ||
CFRelease(frameBlockBuffer); | ||
return DR_NEED_IDR; | ||
} | ||
|
||
OSStatus decodeStatus = [self decodeFrameWithSampleBuffer: sampleBuffer frameType: frameType]; | ||
|
||
CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES); | ||
CFMutableDictionaryRef dict = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0); | ||
|
||
CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue); | ||
CFDictionarySetValue(dict, kCMSampleAttachmentKey_IsDependedOnByOthers, kCFBooleanTrue); | ||
|
||
if (frameType == FRAME_TYPE_PFRAME) { | ||
// P-frame | ||
CFDictionarySetValue(dict, kCMSampleAttachmentKey_NotSync, kCFBooleanTrue); | ||
CFDictionarySetValue(dict, kCMSampleAttachmentKey_DependsOnOthers, kCFBooleanTrue); | ||
} else { | ||
// I-frame | ||
CFDictionarySetValue(dict, kCMSampleAttachmentKey_NotSync, kCFBooleanFalse); | ||
CFDictionarySetValue(dict, kCMSampleAttachmentKey_DependsOnOthers, kCFBooleanFalse); | ||
} | ||
|
||
// Enqueue the next frame | ||
[self->displayLayer enqueueSampleBuffer:sampleBuffer]; | ||
|
||
if (frameType == FRAME_TYPE_IDR) { | ||
// Ensure the layer is visible now | ||
self->displayLayer.hidden = NO; | ||
|
||
// Tell our parent VC to hide the progress indicator | ||
[self->_callbacks videoContentShown]; | ||
if (decodeStatus != noErr){ | ||
Log(LOG_E, @"Failed to decompress frame: %d", decodeStatus); | ||
return DR_NEED_IDR; | ||
} | ||
|
||
// Dereference the buffers | ||
|
@@ -378,4 +372,52 @@ - (int)submitDecodeBuffer:(unsigned char *)data length:(int)length bufferType:(i | |
return DR_OK; | ||
} | ||
|
||
- (OSStatus) decodeFrameWithSampleBuffer:(CMSampleBufferRef)sampleBuffer frameType:(int)frameType{ | ||
VTDecodeFrameFlags flags = kVTDecodeFrame_EnableAsynchronousDecompression; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Does async decompression result in improved performance vs synchronous? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Honestly, I didn't compare sync/async here because I couldn't find a reliable way to measure performance other than my gut feeling. Any ideas? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Not every frame is decompressed asynchronously, this is the correct setting to increase speed. |
||
|
||
return VTDecompressionSessionDecodeFrameWithOutputHandler(decompressionSession, sampleBuffer, flags, NULL, ^(OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef _Nullable imageBuffer, CMTime presentationTimestamp, CMTime presentationDuration) { | ||
if (status != noErr) | ||
{ | ||
NSError *error = [NSError errorWithDomain:NSOSStatusErrorDomain code:status userInfo:nil]; | ||
Log(LOG_E, @"Decompression session error: %@", error); | ||
LiRequestIdrFrame(); | ||
return; | ||
} | ||
|
||
if (self->formatDescImageBuffer == NULL || !CMVideoFormatDescriptionMatchesImageBuffer(self->formatDescImageBuffer, imageBuffer)){ | ||
|
||
OSStatus res = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, imageBuffer, &(self->formatDescImageBuffer)); | ||
if (res != noErr){ | ||
Log(LOG_E, @"Failed to create video format description from imageBuffer"); | ||
return; | ||
} | ||
} | ||
|
||
CMSampleBufferRef sampleBuffer; | ||
CMSampleTimingInfo sampleTiming = {kCMTimeInvalid, presentationTimestamp, presentationDuration}; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I recommend using CACurrentMediaTime() for timing info. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I have the impression if I set it to display immediately, more jittering is generated, maybe because presentationDuration gets messed up? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. set the duration if you know the fps. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. but try it without setting the duration, I don't notice the jittering in another project. |
||
|
||
OSStatus err = CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, imageBuffer, self->formatDescImageBuffer, &sampleTiming, &sampleBuffer); | ||
|
||
if (err != noErr){ | ||
Log(LOG_E, @"Error creating sample buffer for decompressed image buffer %d", (int)err); | ||
return; | ||
} | ||
|
||
// Enqueue the next frame | ||
[self->displayLayer enqueueSampleBuffer:sampleBuffer]; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I would add a flush before the [self->displayLayer enqueueSampleBuffer:sampleBuffer] function.
Sometimes it happens that not all data is played, and the buffer fills up, so the playback stops. |
||
|
||
dispatch_async(dispatch_get_main_queue(), ^{ | ||
if (frameType == FRAME_TYPE_IDR) { | ||
// Ensure the layer is visible now | ||
self->displayLayer.hidden = NO; | ||
|
||
// Tell our parent VC to hide the progress indicator | ||
[self->_callbacks videoContentShown]; | ||
} | ||
}); | ||
|
||
CFRelease(sampleBuffer); | ||
}); | ||
} | ||
|
||
@end |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
it is also necessary to do the following:
VTDecompressionSessionWaitForAsynchronousFrames(decompressionSession);