GPUImageMovie.m 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756
  1. #import "GPUImageMovie.h"
  2. #import "GPUImageMovieWriter.h"
  3. #import "GPUImageFilter.h"
  4. #import "GPUImageVideoCamera.h"
  5. @interface GPUImageMovie () <AVPlayerItemOutputPullDelegate>
  6. {
  7. BOOL audioEncodingIsFinished, videoEncodingIsFinished;
  8. GPUImageMovieWriter *synchronizedMovieWriter;
  9. AVAssetReader *reader;
  10. AVPlayerItemVideoOutput *playerItemOutput;
  11. CADisplayLink *displayLink;
  12. CMTime previousFrameTime, processingFrameTime;
  13. CFAbsoluteTime previousActualFrameTime;
  14. BOOL keepLooping;
  15. GLuint luminanceTexture, chrominanceTexture;
  16. GLProgram *yuvConversionProgram;
  17. GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;
  18. GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;
  19. GLint yuvConversionMatrixUniform;
  20. const GLfloat *_preferredConversion;
  21. BOOL isFullYUVRange;
  22. int imageBufferWidth, imageBufferHeight;
  23. }
  24. - (void)processAsset;
  25. @end
  26. @implementation GPUImageMovie
  27. @synthesize url = _url;
  28. @synthesize asset = _asset;
  29. @synthesize runBenchmark = _runBenchmark;
  30. @synthesize playAtActualSpeed = _playAtActualSpeed;
  31. @synthesize delegate = _delegate;
  32. @synthesize shouldRepeat = _shouldRepeat;
  33. #pragma mark -
  34. #pragma mark Initialization and teardown
  35. - (id)initWithURL:(NSURL *)url;
  36. {
  37. if (!(self = [super init]))
  38. {
  39. return nil;
  40. }
  41. [self yuvConversionSetup];
  42. self.url = url;
  43. self.asset = nil;
  44. return self;
  45. }
  46. - (id)initWithAsset:(AVAsset *)asset;
  47. {
  48. if (!(self = [super init]))
  49. {
  50. return nil;
  51. }
  52. [self yuvConversionSetup];
  53. self.url = nil;
  54. self.asset = asset;
  55. return self;
  56. }
  57. - (id)initWithPlayerItem:(AVPlayerItem *)playerItem;
  58. {
  59. if (!(self = [super init]))
  60. {
  61. return nil;
  62. }
  63. [self yuvConversionSetup];
  64. self.url = nil;
  65. self.asset = nil;
  66. self.playerItem = playerItem;
  67. return self;
  68. }
  69. - (void)yuvConversionSetup;
  70. {
  71. if ([GPUImageContext supportsFastTextureUpload])
  72. {
  73. runSynchronouslyOnVideoProcessingQueue(^{
  74. [GPUImageContext useImageProcessingContext];
  75. _preferredConversion = kColorConversion709;
  76. isFullYUVRange = YES;
  77. yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString];
  78. if (!yuvConversionProgram.initialized)
  79. {
  80. [yuvConversionProgram addAttribute:@"position"];
  81. [yuvConversionProgram addAttribute:@"inputTextureCoordinate"];
  82. if (![yuvConversionProgram link])
  83. {
  84. NSString *progLog = [yuvConversionProgram programLog];
  85. NSLog(@"Program link log: %@", progLog);
  86. NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
  87. NSLog(@"Fragment shader compile log: %@", fragLog);
  88. NSString *vertLog = [yuvConversionProgram vertexShaderLog];
  89. NSLog(@"Vertex shader compile log: %@", vertLog);
  90. yuvConversionProgram = nil;
  91. NSAssert(NO, @"Filter shader link failed");
  92. }
  93. }
  94. yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
  95. yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
  96. yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
  97. yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
  98. yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"];
  99. [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
  100. glEnableVertexAttribArray(yuvConversionPositionAttribute);
  101. glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
  102. });
  103. }
  104. }
  105. - (void)dealloc
  106. {
  107. // Moved into endProcessing
  108. //if (self.playerItem && (displayLink != nil))
  109. //{
  110. // [displayLink invalidate]; // remove from all run loops
  111. // displayLink = nil;
  112. //}
  113. }
  114. #pragma mark -
  115. #pragma mark Movie processing
  116. - (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter;
  117. {
  118. synchronizedMovieWriter = movieWriter;
  119. movieWriter.encodingLiveVideo = NO;
  120. }
  121. - (void)startProcessing
  122. {
  123. if( self.playerItem ) {
  124. [self processPlayerItem];
  125. return;
  126. }
  127. if(self.url == nil)
  128. {
  129. [self processAsset];
  130. return;
  131. }
  132. if (_shouldRepeat) keepLooping = YES;
  133. previousFrameTime = kCMTimeZero;
  134. previousActualFrameTime = CFAbsoluteTimeGetCurrent();
  135. NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
  136. AVURLAsset *inputAsset = [[AVURLAsset alloc] initWithURL:self.url options:inputOptions];
  137. GPUImageMovie __block *blockSelf = self;
  138. [inputAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler: ^{
  139. NSError *error = nil;
  140. AVKeyValueStatus tracksStatus = [inputAsset statusOfValueForKey:@"tracks" error:&error];
  141. if (tracksStatus != AVKeyValueStatusLoaded)
  142. {
  143. return;
  144. }
  145. blockSelf.asset = inputAsset;
  146. [blockSelf processAsset];
  147. blockSelf = nil;
  148. }];
  149. }
  150. - (AVAssetReader*)createAssetReader
  151. {
  152. NSError *error = nil;
  153. AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:self.asset error:&error];
  154. NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary];
  155. if ([GPUImageContext supportsFastTextureUpload]) {
  156. [outputSettings setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
  157. isFullYUVRange = YES;
  158. }
  159. else {
  160. [outputSettings setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
  161. isFullYUVRange = NO;
  162. }
  163. // Maybe set alwaysCopiesSampleData to NO on iOS 5.0 for faster video decoding
  164. AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings];
  165. readerVideoTrackOutput.alwaysCopiesSampleData = NO;
  166. [assetReader addOutput:readerVideoTrackOutput];
  167. NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
  168. BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil) );
  169. AVAssetReaderTrackOutput *readerAudioTrackOutput = nil;
  170. if (shouldRecordAudioTrack)
  171. {
  172. [self.audioEncodingTarget setShouldInvalidateAudioSampleWhenDone:YES];
  173. // This might need to be extended to handle movies with more than one audio track
  174. AVAssetTrack* audioTrack = [audioTracks objectAtIndex:0];
  175. readerAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
  176. readerAudioTrackOutput.alwaysCopiesSampleData = NO;
  177. [assetReader addOutput:readerAudioTrackOutput];
  178. }
  179. return assetReader;
  180. }
  181. - (void)processAsset
  182. {
  183. reader = [self createAssetReader];
  184. AVAssetReaderOutput *readerVideoTrackOutput = nil;
  185. AVAssetReaderOutput *readerAudioTrackOutput = nil;
  186. audioEncodingIsFinished = YES;
  187. for( AVAssetReaderOutput *output in reader.outputs ) {
  188. if( [output.mediaType isEqualToString:AVMediaTypeAudio] ) {
  189. audioEncodingIsFinished = NO;
  190. readerAudioTrackOutput = output;
  191. }
  192. else if( [output.mediaType isEqualToString:AVMediaTypeVideo] ) {
  193. readerVideoTrackOutput = output;
  194. }
  195. }
  196. if ([reader startReading] == NO)
  197. {
  198. NSLog(@"Error reading from file at URL: %@", self.url);
  199. return;
  200. }
  201. __unsafe_unretained GPUImageMovie *weakSelf = self;
  202. if (synchronizedMovieWriter != nil)
  203. {
  204. [synchronizedMovieWriter setVideoInputReadyCallback:^{
  205. return [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
  206. }];
  207. [synchronizedMovieWriter setAudioInputReadyCallback:^{
  208. return [weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
  209. }];
  210. [synchronizedMovieWriter enableSynchronizationCallbacks];
  211. }
  212. else
  213. {
  214. while (reader.status == AVAssetReaderStatusReading && (!_shouldRepeat || keepLooping))
  215. {
  216. [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
  217. if ( (readerAudioTrackOutput) && (!audioEncodingIsFinished) )
  218. {
  219. [weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
  220. }
  221. }
  222. if (reader.status == AVAssetWriterStatusCompleted) {
  223. [reader cancelReading];
  224. if (keepLooping) {
  225. reader = nil;
  226. dispatch_async(dispatch_get_main_queue(), ^{
  227. [self startProcessing];
  228. });
  229. } else {
  230. [weakSelf endProcessing];
  231. }
  232. }
  233. }
  234. }
  235. - (void)processPlayerItem
  236. {
  237. runSynchronouslyOnVideoProcessingQueue(^{
  238. displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)];
  239. [displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
  240. [displayLink setPaused:YES];
  241. dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue];
  242. NSMutableDictionary *pixBuffAttributes = [NSMutableDictionary dictionary];
  243. if ([GPUImageContext supportsFastTextureUpload]) {
  244. [pixBuffAttributes setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
  245. }
  246. else {
  247. [pixBuffAttributes setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
  248. }
  249. playerItemOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:pixBuffAttributes];
  250. [playerItemOutput setDelegate:self queue:videoProcessingQueue];
  251. [_playerItem addOutput:playerItemOutput];
  252. [playerItemOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0.1];
  253. });
  254. }
  255. - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender
  256. {
  257. // Restart display link.
  258. [displayLink setPaused:NO];
  259. }
  260. - (void)displayLinkCallback:(CADisplayLink *)sender
  261. {
  262. /*
  263. The callback gets called once every Vsync.
  264. Using the display link's timestamp and duration we can compute the next time the screen will be refreshed, and copy the pixel buffer for that time
  265. This pixel buffer can then be processed and later rendered on screen.
  266. */
  267. // Calculate the nextVsync time which is when the screen will be refreshed next.
  268. CFTimeInterval nextVSync = ([sender timestamp] + [sender duration]);
  269. CMTime outputItemTime = [playerItemOutput itemTimeForHostTime:nextVSync];
  270. if ([playerItemOutput hasNewPixelBufferForItemTime:outputItemTime]) {
  271. __unsafe_unretained GPUImageMovie *weakSelf = self;
  272. CVPixelBufferRef pixelBuffer = [playerItemOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
  273. if( pixelBuffer )
  274. runSynchronouslyOnVideoProcessingQueue(^{
  275. [weakSelf processMovieFrame:pixelBuffer withSampleTime:outputItemTime];
  276. CFRelease(pixelBuffer);
  277. });
  278. }
  279. }
  280. - (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput;
  281. {
  282. if (reader.status == AVAssetReaderStatusReading && ! videoEncodingIsFinished)
  283. {
  284. CMSampleBufferRef sampleBufferRef = [readerVideoTrackOutput copyNextSampleBuffer];
  285. if (sampleBufferRef)
  286. {
  287. //NSLog(@"read a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef))));
  288. if (_playAtActualSpeed)
  289. {
  290. // Do this outside of the video processing queue to not slow that down while waiting
  291. CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef);
  292. CMTime differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime);
  293. CFAbsoluteTime currentActualTime = CFAbsoluteTimeGetCurrent();
  294. CGFloat frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame);
  295. CGFloat actualTimeDifference = currentActualTime - previousActualFrameTime;
  296. if (frameTimeDifference > actualTimeDifference)
  297. {
  298. usleep(1000000.0 * (frameTimeDifference - actualTimeDifference));
  299. }
  300. previousFrameTime = currentSampleTime;
  301. previousActualFrameTime = CFAbsoluteTimeGetCurrent();
  302. }
  303. __unsafe_unretained GPUImageMovie *weakSelf = self;
  304. runSynchronouslyOnVideoProcessingQueue(^{
  305. [weakSelf processMovieFrame:sampleBufferRef];
  306. CMSampleBufferInvalidate(sampleBufferRef);
  307. CFRelease(sampleBufferRef);
  308. });
  309. return YES;
  310. }
  311. else
  312. {
  313. if (!keepLooping) {
  314. videoEncodingIsFinished = YES;
  315. if( videoEncodingIsFinished && audioEncodingIsFinished )
  316. [self endProcessing];
  317. }
  318. }
  319. }
  320. else if (synchronizedMovieWriter != nil)
  321. {
  322. if (reader.status == AVAssetReaderStatusCompleted)
  323. {
  324. [self endProcessing];
  325. }
  326. }
  327. return NO;
  328. }
  329. - (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput;
  330. {
  331. if (reader.status == AVAssetReaderStatusReading && ! audioEncodingIsFinished)
  332. {
  333. CMSampleBufferRef audioSampleBufferRef = [readerAudioTrackOutput copyNextSampleBuffer];
  334. if (audioSampleBufferRef)
  335. {
  336. //NSLog(@"read an audio frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, CMSampleBufferGetOutputPresentationTimeStamp(audioSampleBufferRef))));
  337. [self.audioEncodingTarget processAudioBuffer:audioSampleBufferRef];
  338. CFRelease(audioSampleBufferRef);
  339. return YES;
  340. }
  341. else
  342. {
  343. if (!keepLooping) {
  344. audioEncodingIsFinished = YES;
  345. if( videoEncodingIsFinished && audioEncodingIsFinished )
  346. [self endProcessing];
  347. }
  348. }
  349. }
  350. else if (synchronizedMovieWriter != nil)
  351. {
  352. if (reader.status == AVAssetReaderStatusCompleted || reader.status == AVAssetReaderStatusFailed ||
  353. reader.status == AVAssetReaderStatusCancelled)
  354. {
  355. [self endProcessing];
  356. }
  357. }
  358. return NO;
  359. }
  360. - (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
  361. {
  362. // CMTimeGetSeconds
  363. // CMTimeSubtract
  364. CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(movieSampleBuffer);
  365. CVImageBufferRef movieFrame = CMSampleBufferGetImageBuffer(movieSampleBuffer);
  366. processingFrameTime = currentSampleTime;
  367. [self processMovieFrame:movieFrame withSampleTime:currentSampleTime];
  368. }
  369. - (float)progress
  370. {
  371. if ( AVAssetReaderStatusReading == reader.status )
  372. {
  373. float current = processingFrameTime.value * 1.0f / processingFrameTime.timescale;
  374. float duration = self.asset.duration.value * 1.0f / self.asset.duration.timescale;
  375. return current / duration;
  376. }
  377. else if ( AVAssetReaderStatusCompleted == reader.status )
  378. {
  379. return 1.f;
  380. }
  381. else
  382. {
  383. return 0.f;
  384. }
  385. }
  386. - (void)processMovieFrame:(CVPixelBufferRef)movieFrame withSampleTime:(CMTime)currentSampleTime
  387. {
  388. int bufferHeight = (int) CVPixelBufferGetHeight(movieFrame);
  389. int bufferWidth = (int) CVPixelBufferGetWidth(movieFrame);
  390. CFTypeRef colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, NULL);
  391. if (colorAttachments != NULL)
  392. {
  393. if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)
  394. {
  395. if (isFullYUVRange)
  396. {
  397. _preferredConversion = kColorConversion601FullRange;
  398. }
  399. else
  400. {
  401. _preferredConversion = kColorConversion601;
  402. }
  403. }
  404. else
  405. {
  406. _preferredConversion = kColorConversion709;
  407. }
  408. }
  409. else
  410. {
  411. if (isFullYUVRange)
  412. {
  413. _preferredConversion = kColorConversion601FullRange;
  414. }
  415. else
  416. {
  417. _preferredConversion = kColorConversion601;
  418. }
  419. }
  420. CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
  421. if ([GPUImageContext supportsFastTextureUpload])
  422. {
  423. CVOpenGLESTextureRef luminanceTextureRef = NULL;
  424. CVOpenGLESTextureRef chrominanceTextureRef = NULL;
  425. // if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
  426. if (CVPixelBufferGetPlaneCount(movieFrame) > 0) // Check for YUV planar inputs to do RGB conversion
  427. {
  428. if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )
  429. {
  430. imageBufferWidth = bufferWidth;
  431. imageBufferHeight = bufferHeight;
  432. }
  433. CVReturn err;
  434. // Y-plane
  435. glActiveTexture(GL_TEXTURE4);
  436. if ([GPUImageContext deviceSupportsRedTextures])
  437. {
  438. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
  439. }
  440. else
  441. {
  442. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
  443. }
  444. if (err)
  445. {
  446. NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
  447. }
  448. luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);
  449. glBindTexture(GL_TEXTURE_2D, luminanceTexture);
  450. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  451. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  452. // UV-plane
  453. glActiveTexture(GL_TEXTURE5);
  454. if ([GPUImageContext deviceSupportsRedTextures])
  455. {
  456. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
  457. }
  458. else
  459. {
  460. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
  461. }
  462. if (err)
  463. {
  464. NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
  465. }
  466. chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);
  467. glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
  468. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  469. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  470. // if (!allTargetsWantMonochromeData)
  471. // {
  472. [self convertYUVToRGBOutput];
  473. // }
  474. for (id<GPUImageInput> currentTarget in targets)
  475. {
  476. NSInteger indexOfObject = [targets indexOfObject:currentTarget];
  477. NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
  478. [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
  479. [currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
  480. }
  481. [outputFramebuffer unlock];
  482. for (id<GPUImageInput> currentTarget in targets)
  483. {
  484. NSInteger indexOfObject = [targets indexOfObject:currentTarget];
  485. NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
  486. [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
  487. }
  488. CVPixelBufferUnlockBaseAddress(movieFrame, 0);
  489. CFRelease(luminanceTextureRef);
  490. CFRelease(chrominanceTextureRef);
  491. }
  492. else
  493. {
  494. // TODO: Mesh this with the new framebuffer cache
  495. // CVPixelBufferLockBaseAddress(movieFrame, 0);
  496. //
  497. // CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, movieFrame, NULL, GL_TEXTURE_2D, GL_RGBA, bufferWidth, bufferHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);
  498. //
  499. // if (!texture || err) {
  500. // NSLog(@"Movie CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);
  501. // NSAssert(NO, @"Camera failure");
  502. // return;
  503. // }
  504. //
  505. // outputTexture = CVOpenGLESTextureGetName(texture);
  506. // // glBindTexture(CVOpenGLESTextureGetTarget(texture), outputTexture);
  507. // glBindTexture(GL_TEXTURE_2D, outputTexture);
  508. // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  509. // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  510. // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  511. // glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  512. //
  513. // for (id<GPUImageInput> currentTarget in targets)
  514. // {
  515. // NSInteger indexOfObject = [targets indexOfObject:currentTarget];
  516. // NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
  517. //
  518. // [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
  519. // [currentTarget setInputTexture:outputTexture atIndex:targetTextureIndex];
  520. //
  521. // [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
  522. // }
  523. //
  524. // CVPixelBufferUnlockBaseAddress(movieFrame, 0);
  525. // CVOpenGLESTextureCacheFlush(coreVideoTextureCache, 0);
  526. // CFRelease(texture);
  527. //
  528. // outputTexture = 0;
  529. }
  530. }
  531. else
  532. {
  533. // Upload to texture
  534. CVPixelBufferLockBaseAddress(movieFrame, 0);
  535. outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bufferWidth, bufferHeight) textureOptions:self.outputTextureOptions onlyTexture:YES];
  536. glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
  537. // Using BGRA extension to pull in video frame data directly
  538. glTexImage2D(GL_TEXTURE_2D,
  539. 0,
  540. self.outputTextureOptions.internalFormat,
  541. bufferWidth,
  542. bufferHeight,
  543. 0,
  544. self.outputTextureOptions.format,
  545. self.outputTextureOptions.type,
  546. CVPixelBufferGetBaseAddress(movieFrame));
  547. for (id<GPUImageInput> currentTarget in targets)
  548. {
  549. NSInteger indexOfObject = [targets indexOfObject:currentTarget];
  550. NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
  551. [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
  552. [currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
  553. }
  554. [outputFramebuffer unlock];
  555. for (id<GPUImageInput> currentTarget in targets)
  556. {
  557. NSInteger indexOfObject = [targets indexOfObject:currentTarget];
  558. NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
  559. [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
  560. }
  561. CVPixelBufferUnlockBaseAddress(movieFrame, 0);
  562. }
  563. if (_runBenchmark)
  564. {
  565. CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
  566. NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
  567. }
  568. }
  569. - (void)endProcessing;
  570. {
  571. keepLooping = NO;
  572. [displayLink setPaused:YES];
  573. for (id<GPUImageInput> currentTarget in targets)
  574. {
  575. [currentTarget endProcessing];
  576. }
  577. if (synchronizedMovieWriter != nil)
  578. {
  579. [synchronizedMovieWriter setVideoInputReadyCallback:^{return NO;}];
  580. [synchronizedMovieWriter setAudioInputReadyCallback:^{return NO;}];
  581. }
  582. if (self.playerItem && (displayLink != nil))
  583. {
  584. [displayLink invalidate]; // remove from all run loops
  585. displayLink = nil;
  586. }
  587. if ([self.delegate respondsToSelector:@selector(didCompletePlayingMovie)]) {
  588. [self.delegate didCompletePlayingMovie];
  589. }
  590. self.delegate = nil;
  591. }
  592. - (void)cancelProcessing
  593. {
  594. if (reader) {
  595. [reader cancelReading];
  596. }
  597. [self endProcessing];
  598. }
  599. - (void)convertYUVToRGBOutput;
  600. {
  601. [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
  602. outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(imageBufferWidth, imageBufferHeight) onlyTexture:NO];
  603. [outputFramebuffer activateFramebuffer];
  604. glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
  605. glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
  606. static const GLfloat squareVertices[] = {
  607. -1.0f, -1.0f,
  608. 1.0f, -1.0f,
  609. -1.0f, 1.0f,
  610. 1.0f, 1.0f,
  611. };
  612. static const GLfloat textureCoordinates[] = {
  613. 0.0f, 0.0f,
  614. 1.0f, 0.0f,
  615. 0.0f, 1.0f,
  616. 1.0f, 1.0f,
  617. };
  618. glActiveTexture(GL_TEXTURE4);
  619. glBindTexture(GL_TEXTURE_2D, luminanceTexture);
  620. glUniform1i(yuvConversionLuminanceTextureUniform, 4);
  621. glActiveTexture(GL_TEXTURE5);
  622. glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
  623. glUniform1i(yuvConversionChrominanceTextureUniform, 5);
  624. glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion);
  625. glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
  626. glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
  627. glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
  628. }
  629. - (AVAssetReader*)assetReader {
  630. return reader;
  631. }
  632. - (BOOL)audioEncodingIsFinished {
  633. return audioEncodingIsFinished;
  634. }
  635. - (BOOL)videoEncodingIsFinished {
  636. return videoEncodingIsFinished;
  637. }
  638. @end