123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756 |
- #import "GPUImageMovie.h"
- #import "GPUImageMovieWriter.h"
- #import "GPUImageFilter.h"
- #import "GPUImageVideoCamera.h"
- @interface GPUImageMovie () <AVPlayerItemOutputPullDelegate>
- {
- BOOL audioEncodingIsFinished, videoEncodingIsFinished;
- GPUImageMovieWriter *synchronizedMovieWriter;
- AVAssetReader *reader;
- AVPlayerItemVideoOutput *playerItemOutput;
- CADisplayLink *displayLink;
- CMTime previousFrameTime, processingFrameTime;
- CFAbsoluteTime previousActualFrameTime;
- BOOL keepLooping;
- GLuint luminanceTexture, chrominanceTexture;
- GLProgram *yuvConversionProgram;
- GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;
- GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;
- GLint yuvConversionMatrixUniform;
- const GLfloat *_preferredConversion;
-
- BOOL isFullYUVRange;
- int imageBufferWidth, imageBufferHeight;
- }
- - (void)processAsset;
- @end
- @implementation GPUImageMovie
- @synthesize url = _url;
- @synthesize asset = _asset;
- @synthesize runBenchmark = _runBenchmark;
- @synthesize playAtActualSpeed = _playAtActualSpeed;
- @synthesize delegate = _delegate;
- @synthesize shouldRepeat = _shouldRepeat;
- #pragma mark -
- #pragma mark Initialization and teardown
- - (id)initWithURL:(NSURL *)url;
- {
- if (!(self = [super init]))
- {
- return nil;
- }
- [self yuvConversionSetup];
- self.url = url;
- self.asset = nil;
- return self;
- }
- - (id)initWithAsset:(AVAsset *)asset;
- {
- if (!(self = [super init]))
- {
- return nil;
- }
-
- [self yuvConversionSetup];
- self.url = nil;
- self.asset = asset;
- return self;
- }
- - (id)initWithPlayerItem:(AVPlayerItem *)playerItem;
- {
- if (!(self = [super init]))
- {
- return nil;
- }
- [self yuvConversionSetup];
- self.url = nil;
- self.asset = nil;
- self.playerItem = playerItem;
- return self;
- }
- - (void)yuvConversionSetup;
- {
- if ([GPUImageContext supportsFastTextureUpload])
- {
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext useImageProcessingContext];
- _preferredConversion = kColorConversion709;
- isFullYUVRange = YES;
- yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString];
- if (!yuvConversionProgram.initialized)
- {
- [yuvConversionProgram addAttribute:@"position"];
- [yuvConversionProgram addAttribute:@"inputTextureCoordinate"];
- if (![yuvConversionProgram link])
- {
- NSString *progLog = [yuvConversionProgram programLog];
- NSLog(@"Program link log: %@", progLog);
- NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
- NSLog(@"Fragment shader compile log: %@", fragLog);
- NSString *vertLog = [yuvConversionProgram vertexShaderLog];
- NSLog(@"Vertex shader compile log: %@", vertLog);
- yuvConversionProgram = nil;
- NSAssert(NO, @"Filter shader link failed");
- }
- }
- yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
- yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
- yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
- yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
- yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"];
- [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
- glEnableVertexAttribArray(yuvConversionPositionAttribute);
- glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
- });
- }
- }
- - (void)dealloc
- {
- // Moved into endProcessing
- //if (self.playerItem && (displayLink != nil))
- //{
- // [displayLink invalidate]; // remove from all run loops
- // displayLink = nil;
- //}
- }
- #pragma mark -
- #pragma mark Movie processing
- - (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter;
- {
- synchronizedMovieWriter = movieWriter;
- movieWriter.encodingLiveVideo = NO;
- }
- - (void)startProcessing
- {
- if( self.playerItem ) {
- [self processPlayerItem];
- return;
- }
- if(self.url == nil)
- {
- [self processAsset];
- return;
- }
-
- if (_shouldRepeat) keepLooping = YES;
-
- previousFrameTime = kCMTimeZero;
- previousActualFrameTime = CFAbsoluteTimeGetCurrent();
-
- NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
- AVURLAsset *inputAsset = [[AVURLAsset alloc] initWithURL:self.url options:inputOptions];
-
- GPUImageMovie __block *blockSelf = self;
-
- [inputAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler: ^{
- NSError *error = nil;
- AVKeyValueStatus tracksStatus = [inputAsset statusOfValueForKey:@"tracks" error:&error];
- if (tracksStatus != AVKeyValueStatusLoaded)
- {
- return;
- }
- blockSelf.asset = inputAsset;
- [blockSelf processAsset];
- blockSelf = nil;
- }];
- }
- - (AVAssetReader*)createAssetReader
- {
- NSError *error = nil;
- AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:self.asset error:&error];
- NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary];
- if ([GPUImageContext supportsFastTextureUpload]) {
- [outputSettings setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
- isFullYUVRange = YES;
- }
- else {
- [outputSettings setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
- isFullYUVRange = NO;
- }
-
- // Maybe set alwaysCopiesSampleData to NO on iOS 5.0 for faster video decoding
- AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings];
- readerVideoTrackOutput.alwaysCopiesSampleData = NO;
- [assetReader addOutput:readerVideoTrackOutput];
- NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
- BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil) );
- AVAssetReaderTrackOutput *readerAudioTrackOutput = nil;
- if (shouldRecordAudioTrack)
- {
- [self.audioEncodingTarget setShouldInvalidateAudioSampleWhenDone:YES];
-
- // This might need to be extended to handle movies with more than one audio track
- AVAssetTrack* audioTrack = [audioTracks objectAtIndex:0];
- readerAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
- readerAudioTrackOutput.alwaysCopiesSampleData = NO;
- [assetReader addOutput:readerAudioTrackOutput];
- }
- return assetReader;
- }
- - (void)processAsset
- {
- reader = [self createAssetReader];
- AVAssetReaderOutput *readerVideoTrackOutput = nil;
- AVAssetReaderOutput *readerAudioTrackOutput = nil;
- audioEncodingIsFinished = YES;
- for( AVAssetReaderOutput *output in reader.outputs ) {
- if( [output.mediaType isEqualToString:AVMediaTypeAudio] ) {
- audioEncodingIsFinished = NO;
- readerAudioTrackOutput = output;
- }
- else if( [output.mediaType isEqualToString:AVMediaTypeVideo] ) {
- readerVideoTrackOutput = output;
- }
- }
- if ([reader startReading] == NO)
- {
- NSLog(@"Error reading from file at URL: %@", self.url);
- return;
- }
- __unsafe_unretained GPUImageMovie *weakSelf = self;
- if (synchronizedMovieWriter != nil)
- {
- [synchronizedMovieWriter setVideoInputReadyCallback:^{
- return [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
- }];
- [synchronizedMovieWriter setAudioInputReadyCallback:^{
- return [weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
- }];
- [synchronizedMovieWriter enableSynchronizationCallbacks];
- }
- else
- {
- while (reader.status == AVAssetReaderStatusReading && (!_shouldRepeat || keepLooping))
- {
- [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
- if ( (readerAudioTrackOutput) && (!audioEncodingIsFinished) )
- {
- [weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
- }
- }
- if (reader.status == AVAssetWriterStatusCompleted) {
-
- [reader cancelReading];
- if (keepLooping) {
- reader = nil;
- dispatch_async(dispatch_get_main_queue(), ^{
- [self startProcessing];
- });
- } else {
- [weakSelf endProcessing];
- }
- }
- }
- }
- - (void)processPlayerItem
- {
- runSynchronouslyOnVideoProcessingQueue(^{
- displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)];
- [displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
- [displayLink setPaused:YES];
- dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue];
- NSMutableDictionary *pixBuffAttributes = [NSMutableDictionary dictionary];
- if ([GPUImageContext supportsFastTextureUpload]) {
- [pixBuffAttributes setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
- }
- else {
- [pixBuffAttributes setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
- }
- playerItemOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:pixBuffAttributes];
- [playerItemOutput setDelegate:self queue:videoProcessingQueue];
- [_playerItem addOutput:playerItemOutput];
- [playerItemOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0.1];
- });
- }
- - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender
- {
- // Restart display link.
- [displayLink setPaused:NO];
- }
- - (void)displayLinkCallback:(CADisplayLink *)sender
- {
- /*
- The callback gets called once every Vsync.
- Using the display link's timestamp and duration we can compute the next time the screen will be refreshed, and copy the pixel buffer for that time
- This pixel buffer can then be processed and later rendered on screen.
- */
- // Calculate the nextVsync time which is when the screen will be refreshed next.
- CFTimeInterval nextVSync = ([sender timestamp] + [sender duration]);
- CMTime outputItemTime = [playerItemOutput itemTimeForHostTime:nextVSync];
- if ([playerItemOutput hasNewPixelBufferForItemTime:outputItemTime]) {
- __unsafe_unretained GPUImageMovie *weakSelf = self;
- CVPixelBufferRef pixelBuffer = [playerItemOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
- if( pixelBuffer )
- runSynchronouslyOnVideoProcessingQueue(^{
- [weakSelf processMovieFrame:pixelBuffer withSampleTime:outputItemTime];
- CFRelease(pixelBuffer);
- });
- }
- }
- - (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput;
- {
- if (reader.status == AVAssetReaderStatusReading && ! videoEncodingIsFinished)
- {
- CMSampleBufferRef sampleBufferRef = [readerVideoTrackOutput copyNextSampleBuffer];
- if (sampleBufferRef)
- {
- //NSLog(@"read a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef))));
- if (_playAtActualSpeed)
- {
- // Do this outside of the video processing queue to not slow that down while waiting
- CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef);
- CMTime differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime);
- CFAbsoluteTime currentActualTime = CFAbsoluteTimeGetCurrent();
-
- CGFloat frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame);
- CGFloat actualTimeDifference = currentActualTime - previousActualFrameTime;
-
- if (frameTimeDifference > actualTimeDifference)
- {
- usleep(1000000.0 * (frameTimeDifference - actualTimeDifference));
- }
-
- previousFrameTime = currentSampleTime;
- previousActualFrameTime = CFAbsoluteTimeGetCurrent();
- }
- __unsafe_unretained GPUImageMovie *weakSelf = self;
- runSynchronouslyOnVideoProcessingQueue(^{
- [weakSelf processMovieFrame:sampleBufferRef];
- CMSampleBufferInvalidate(sampleBufferRef);
- CFRelease(sampleBufferRef);
- });
- return YES;
- }
- else
- {
- if (!keepLooping) {
- videoEncodingIsFinished = YES;
- if( videoEncodingIsFinished && audioEncodingIsFinished )
- [self endProcessing];
- }
- }
- }
- else if (synchronizedMovieWriter != nil)
- {
- if (reader.status == AVAssetReaderStatusCompleted)
- {
- [self endProcessing];
- }
- }
- return NO;
- }
- - (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput;
- {
- if (reader.status == AVAssetReaderStatusReading && ! audioEncodingIsFinished)
- {
- CMSampleBufferRef audioSampleBufferRef = [readerAudioTrackOutput copyNextSampleBuffer];
- if (audioSampleBufferRef)
- {
- //NSLog(@"read an audio frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, CMSampleBufferGetOutputPresentationTimeStamp(audioSampleBufferRef))));
- [self.audioEncodingTarget processAudioBuffer:audioSampleBufferRef];
- CFRelease(audioSampleBufferRef);
- return YES;
- }
- else
- {
- if (!keepLooping) {
- audioEncodingIsFinished = YES;
- if( videoEncodingIsFinished && audioEncodingIsFinished )
- [self endProcessing];
- }
- }
- }
- else if (synchronizedMovieWriter != nil)
- {
- if (reader.status == AVAssetReaderStatusCompleted || reader.status == AVAssetReaderStatusFailed ||
- reader.status == AVAssetReaderStatusCancelled)
- {
- [self endProcessing];
- }
- }
- return NO;
- }
- - (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
- {
- // CMTimeGetSeconds
- // CMTimeSubtract
-
- CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(movieSampleBuffer);
- CVImageBufferRef movieFrame = CMSampleBufferGetImageBuffer(movieSampleBuffer);
- processingFrameTime = currentSampleTime;
- [self processMovieFrame:movieFrame withSampleTime:currentSampleTime];
- }
- - (float)progress
- {
- if ( AVAssetReaderStatusReading == reader.status )
- {
- float current = processingFrameTime.value * 1.0f / processingFrameTime.timescale;
- float duration = self.asset.duration.value * 1.0f / self.asset.duration.timescale;
- return current / duration;
- }
- else if ( AVAssetReaderStatusCompleted == reader.status )
- {
- return 1.f;
- }
- else
- {
- return 0.f;
- }
- }
- - (void)processMovieFrame:(CVPixelBufferRef)movieFrame withSampleTime:(CMTime)currentSampleTime
- {
- int bufferHeight = (int) CVPixelBufferGetHeight(movieFrame);
- int bufferWidth = (int) CVPixelBufferGetWidth(movieFrame);
- CFTypeRef colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, NULL);
- if (colorAttachments != NULL)
- {
- if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)
- {
- if (isFullYUVRange)
- {
- _preferredConversion = kColorConversion601FullRange;
- }
- else
- {
- _preferredConversion = kColorConversion601;
- }
- }
- else
- {
- _preferredConversion = kColorConversion709;
- }
- }
- else
- {
- if (isFullYUVRange)
- {
- _preferredConversion = kColorConversion601FullRange;
- }
- else
- {
- _preferredConversion = kColorConversion601;
- }
- }
-
- CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
- if ([GPUImageContext supportsFastTextureUpload])
- {
- CVOpenGLESTextureRef luminanceTextureRef = NULL;
- CVOpenGLESTextureRef chrominanceTextureRef = NULL;
- // if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
- if (CVPixelBufferGetPlaneCount(movieFrame) > 0) // Check for YUV planar inputs to do RGB conversion
- {
- if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )
- {
- imageBufferWidth = bufferWidth;
- imageBufferHeight = bufferHeight;
- }
- CVReturn err;
- // Y-plane
- glActiveTexture(GL_TEXTURE4);
- if ([GPUImageContext deviceSupportsRedTextures])
- {
- err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
- }
- else
- {
- err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
- }
- if (err)
- {
- NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
- }
- luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);
- glBindTexture(GL_TEXTURE_2D, luminanceTexture);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- // UV-plane
- glActiveTexture(GL_TEXTURE5);
- if ([GPUImageContext deviceSupportsRedTextures])
- {
- err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
- }
- else
- {
- err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
- }
- if (err)
- {
- NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
- }
- chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);
- glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- // if (!allTargetsWantMonochromeData)
- // {
- [self convertYUVToRGBOutput];
- // }
- for (id<GPUImageInput> currentTarget in targets)
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
- [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
- [currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
- }
-
- [outputFramebuffer unlock];
- for (id<GPUImageInput> currentTarget in targets)
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
- [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
- }
- CVPixelBufferUnlockBaseAddress(movieFrame, 0);
- CFRelease(luminanceTextureRef);
- CFRelease(chrominanceTextureRef);
- }
- else
- {
- // TODO: Mesh this with the new framebuffer cache
- // CVPixelBufferLockBaseAddress(movieFrame, 0);
- //
- // CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, movieFrame, NULL, GL_TEXTURE_2D, GL_RGBA, bufferWidth, bufferHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);
- //
- // if (!texture || err) {
- // NSLog(@"Movie CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);
- // NSAssert(NO, @"Camera failure");
- // return;
- // }
- //
- // outputTexture = CVOpenGLESTextureGetName(texture);
- // // glBindTexture(CVOpenGLESTextureGetTarget(texture), outputTexture);
- // glBindTexture(GL_TEXTURE_2D, outputTexture);
- // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
- // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
- // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- //
- // for (id<GPUImageInput> currentTarget in targets)
- // {
- // NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- // NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
- //
- // [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
- // [currentTarget setInputTexture:outputTexture atIndex:targetTextureIndex];
- //
- // [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
- // }
- //
- // CVPixelBufferUnlockBaseAddress(movieFrame, 0);
- // CVOpenGLESTextureCacheFlush(coreVideoTextureCache, 0);
- // CFRelease(texture);
- //
- // outputTexture = 0;
- }
- }
- else
- {
- // Upload to texture
- CVPixelBufferLockBaseAddress(movieFrame, 0);
-
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bufferWidth, bufferHeight) textureOptions:self.outputTextureOptions onlyTexture:YES];
- glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
- // Using BGRA extension to pull in video frame data directly
- glTexImage2D(GL_TEXTURE_2D,
- 0,
- self.outputTextureOptions.internalFormat,
- bufferWidth,
- bufferHeight,
- 0,
- self.outputTextureOptions.format,
- self.outputTextureOptions.type,
- CVPixelBufferGetBaseAddress(movieFrame));
-
- for (id<GPUImageInput> currentTarget in targets)
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
- [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
- [currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
- }
-
- [outputFramebuffer unlock];
-
- for (id<GPUImageInput> currentTarget in targets)
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
- [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
- }
- CVPixelBufferUnlockBaseAddress(movieFrame, 0);
- }
-
- if (_runBenchmark)
- {
- CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
- NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
- }
- }
- - (void)endProcessing;
- {
- keepLooping = NO;
- [displayLink setPaused:YES];
- for (id<GPUImageInput> currentTarget in targets)
- {
- [currentTarget endProcessing];
- }
-
- if (synchronizedMovieWriter != nil)
- {
- [synchronizedMovieWriter setVideoInputReadyCallback:^{return NO;}];
- [synchronizedMovieWriter setAudioInputReadyCallback:^{return NO;}];
- }
-
- if (self.playerItem && (displayLink != nil))
- {
- [displayLink invalidate]; // remove from all run loops
- displayLink = nil;
- }
- if ([self.delegate respondsToSelector:@selector(didCompletePlayingMovie)]) {
- [self.delegate didCompletePlayingMovie];
- }
- self.delegate = nil;
- }
- - (void)cancelProcessing
- {
- if (reader) {
- [reader cancelReading];
- }
- [self endProcessing];
- }
- - (void)convertYUVToRGBOutput;
- {
- [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(imageBufferWidth, imageBufferHeight) onlyTexture:NO];
- [outputFramebuffer activateFramebuffer];
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
- static const GLfloat squareVertices[] = {
- -1.0f, -1.0f,
- 1.0f, -1.0f,
- -1.0f, 1.0f,
- 1.0f, 1.0f,
- };
- static const GLfloat textureCoordinates[] = {
- 0.0f, 0.0f,
- 1.0f, 0.0f,
- 0.0f, 1.0f,
- 1.0f, 1.0f,
- };
- glActiveTexture(GL_TEXTURE4);
- glBindTexture(GL_TEXTURE_2D, luminanceTexture);
- glUniform1i(yuvConversionLuminanceTextureUniform, 4);
- glActiveTexture(GL_TEXTURE5);
- glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
- glUniform1i(yuvConversionChrominanceTextureUniform, 5);
- glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion);
- glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
- glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
- }
- - (AVAssetReader*)assetReader {
- return reader;
- }
- - (BOOL)audioEncodingIsFinished {
- return audioEncodingIsFinished;
- }
- - (BOOL)videoEncodingIsFinished {
- return videoEncodingIsFinished;
- }
- @end
|