diff --git a/Limelight/Stream/VideoDecoderRenderer.m b/Limelight/Stream/VideoDecoderRenderer.m index 7333c650..35f41bc0 100644 --- a/Limelight/Stream/VideoDecoderRenderer.m +++ b/Limelight/Stream/VideoDecoderRenderer.m @@ -6,6 +6,8 @@ // Copyright (c) 2014 Moonlight Stream. All rights reserved. // +@import VideoToolbox; + #import "VideoDecoderRenderer.h" #import "StreamView.h" @@ -23,6 +25,7 @@ @implementation VideoDecoderRenderer { NSData *spsData, *ppsData, *vpsData; CMVideoFormatDescriptionRef formatDesc; + VTDecompressionSessionRef decompressionSession; CADisplayLink* _displayLink; BOOL framePacing; @@ -74,6 +77,12 @@ - (void)reinitializeDisplayLayer CFRelease(formatDesc); formatDesc = nil; } + + if (decompressionSession != nil){ + VTDecompressionSessionInvalidate(decompressionSession); + CFRelease(decompressionSession); + decompressionSession = nil; + } } - (id)initWithView:(StreamView*)view callbacks:(id)callbacks streamAspectRatio:(float)aspectRatio useFramePacing:(BOOL)useFramePacing @@ -108,6 +117,28 @@ - (void)start [_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSDefaultRunLoopMode]; } +- (void) setupDecompressionSession { + if (decompressionSession != NULL){ + VTDecompressionSessionInvalidate(decompressionSession); + CFRelease(decompressionSession); + decompressionSession = nil; + } + VTDecompressionOutputCallbackRecord outputCallback; + outputCallback.decompressionOutputCallback = (VTDecompressionOutputCallback)decompressionCallback; + outputCallback.decompressionOutputRefCon = (__bridge void*) self; + + int status = VTDecompressionSessionCreate(kCFAllocatorDefault, + formatDesc, + nil, + nil, + &outputCallback, + &decompressionSession); + if (status != noErr) { + NSLog(@"Failed to instance VTDecompressionSessionRef, status %d", status); + } + +} + // TODO: Refactor this int DrSubmitDecodeUnit(PDECODE_UNIT decodeUnit); @@ -262,6 +293,8 @@ - (int)submitDecodeBuffer:(unsigned char *)data length:(int)length bufferType:(i formatDesc = NULL; } } + + [self setupDecompressionSession]; } // Data is NOT to be freed here. It's a direct usage of the caller's buffer. @@ -343,12 +376,80 @@ - (int)submitDecodeBuffer:(unsigned char *)data length:(int)length bufferType:(i return DR_NEED_IDR; } + VTDecodeFrameFlags flags = kVTDecodeFrame_EnableAsynchronousDecompression; + VTDecodeInfoFlags flagOut = 0; + + OSStatus decodeStatus = VTDecompressionSessionDecodeFrame(decompressionSession, sampleBuffer, flags, (void*)(int64_t)frameType, &flagOut); + + if (decodeStatus != noErr){ + NSLog(@"Failed to decompress frame"); + } else { + dispatch_async(dispatch_get_main_queue(), ^{ + if (frameType == FRAME_TYPE_IDR) { + // Ensure the layer is visible now + self->displayLayer.hidden = NO; + + // Tell our parent VC to hide the progress indicator + [self->_callbacks videoContentShown]; + } + }); + } + + /* Flush in-process frames. */ + //VTDecompressionSessionFinishDelayedFrames(decompressionSession); + + /* Block until our callback has been called with the last frame. */ + //VTDecompressionSessionWaitForAsynchronousFrames(decompressionSession); + + // Dereference the buffers + CFRelease(dataBlockBuffer); + CFRelease(frameBlockBuffer); + CFRelease(sampleBuffer); + + return DR_OK; +} + +void decompressionCallback( + void * CM_NULLABLE videoRendererRef, + void * CM_NULLABLE sourceFrameRef, + OSStatus status, + VTDecodeInfoFlags infoFlags, + CM_NULLABLE CVImageBufferRef imageBuffer, + CMTime presentationTimestamp, + CMTime presentationDuration +){ + if (status != noErr) + { + NSError *error = [NSError errorWithDomain:NSOSStatusErrorDomain code:status userInfo:nil]; + NSLog(@"Decompression session error: %@", error); + } + + VideoDecoderRenderer * decoderRenderer = (__bridge VideoDecoderRenderer *) videoRendererRef; + CMVideoFormatDescriptionRef formatDescriptionRef; + + OSStatus res = CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, imageBuffer, &formatDescriptionRef); + if (res != noErr){ + NSLog(@"Failed to create video format description from imageBuffer"); + } + + CMSampleBufferRef sampleBuffer; + CMSampleTimingInfo sampleTiming = {CMTimeMake(1, decoderRenderer->frameRate), kCMTimeZero, kCMTimeInvalid}; + + OSStatus err = CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, imageBuffer, formatDescriptionRef, &sampleTiming, &sampleBuffer); + + if (err != noErr){ + NSLog(@"Error creating sample buffer for decompressed image buffer %d", (int)err); + return; + } + CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES); CFMutableDictionaryRef dict = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0); CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue); CFDictionarySetValue(dict, kCMSampleAttachmentKey_IsDependedOnByOthers, kCFBooleanTrue); + int64_t frameType = (int64_t) sourceFrameRef; + if (frameType == FRAME_TYPE_PFRAME) { // P-frame CFDictionarySetValue(dict, kCMSampleAttachmentKey_NotSync, kCFBooleanTrue); @@ -358,24 +459,10 @@ - (int)submitDecodeBuffer:(unsigned char *)data length:(int)length bufferType:(i CFDictionarySetValue(dict, kCMSampleAttachmentKey_NotSync, kCFBooleanFalse); CFDictionarySetValue(dict, kCMSampleAttachmentKey_DependsOnOthers, kCFBooleanFalse); } - - // Enqueue the next frame - [self->displayLayer enqueueSampleBuffer:sampleBuffer]; - - if (frameType == FRAME_TYPE_IDR) { - // Ensure the layer is visible now - self->displayLayer.hidden = NO; - - // Tell our parent VC to hide the progress indicator - [self->_callbacks videoContentShown]; - } - // Dereference the buffers - CFRelease(dataBlockBuffer); - CFRelease(frameBlockBuffer); + // Enqueue the next frame + [decoderRenderer->displayLayer enqueueSampleBuffer:sampleBuffer]; CFRelease(sampleBuffer); - - return DR_OK; } @end