GPUImageMovieWriter.m 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921
  1. #import "GPUImageMovieWriter.h"
  2. #import "GPUImageContext.h"
  3. #import "GLProgram.h"
  4. #import "GPUImageFilter.h"
  5. NSString *const kGPUImageColorSwizzlingFragmentShaderString = SHADER_STRING
  6. (
  7. varying highp vec2 textureCoordinate;
  8. uniform sampler2D inputImageTexture;
  9. void main()
  10. {
  11. gl_FragColor = texture2D(inputImageTexture, textureCoordinate).bgra;
  12. }
  13. );
  14. @interface GPUImageMovieWriter ()
  15. {
  16. GLuint movieFramebuffer, movieRenderbuffer;
  17. GLProgram *colorSwizzlingProgram;
  18. GLint colorSwizzlingPositionAttribute, colorSwizzlingTextureCoordinateAttribute;
  19. GLint colorSwizzlingInputTextureUniform;
  20. GPUImageFramebuffer *firstInputFramebuffer;
  21. CMTime startTime, previousFrameTime, previousAudioTime;
  22. dispatch_queue_t audioQueue, videoQueue;
  23. BOOL audioEncodingIsFinished, videoEncodingIsFinished;
  24. BOOL isRecording;
  25. }
  26. // Movie recording
  27. - (void)initializeMovieWithOutputSettings:(NSMutableDictionary *)outputSettings;
  28. // Frame rendering
  29. - (void)createDataFBO;
  30. - (void)destroyDataFBO;
  31. - (void)setFilterFBO;
  32. - (void)renderAtInternalSizeUsingFramebuffer:(GPUImageFramebuffer *)inputFramebufferToUse;
  33. @end
  34. @implementation GPUImageMovieWriter
  35. @synthesize hasAudioTrack = _hasAudioTrack;
  36. @synthesize encodingLiveVideo = _encodingLiveVideo;
  37. @synthesize shouldPassthroughAudio = _shouldPassthroughAudio;
  38. @synthesize completionBlock;
  39. @synthesize failureBlock;
  40. @synthesize videoInputReadyCallback;
  41. @synthesize audioInputReadyCallback;
  42. @synthesize enabled;
  43. @synthesize shouldInvalidateAudioSampleWhenDone = _shouldInvalidateAudioSampleWhenDone;
  44. @synthesize paused = _paused;
  45. @synthesize movieWriterContext = _movieWriterContext;
  46. @synthesize delegate = _delegate;
  47. #pragma mark -
  48. #pragma mark Initialization and teardown
  49. - (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize;
  50. {
  51. return [self initWithMovieURL:newMovieURL size:newSize fileType:AVFileTypeQuickTimeMovie outputSettings:nil];
  52. }
  53. - (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSMutableDictionary *)outputSettings;
  54. {
  55. if (!(self = [super init]))
  56. {
  57. return nil;
  58. }
  59. _shouldInvalidateAudioSampleWhenDone = NO;
  60. self.enabled = YES;
  61. alreadyFinishedRecording = NO;
  62. videoEncodingIsFinished = NO;
  63. audioEncodingIsFinished = NO;
  64. videoSize = newSize;
  65. movieURL = newMovieURL;
  66. fileType = newFileType;
  67. startTime = kCMTimeInvalid;
  68. _encodingLiveVideo = [[outputSettings objectForKey:@"EncodingLiveVideo"] isKindOfClass:[NSNumber class]] ? [[outputSettings objectForKey:@"EncodingLiveVideo"] boolValue] : YES;
  69. previousFrameTime = kCMTimeNegativeInfinity;
  70. previousAudioTime = kCMTimeNegativeInfinity;
  71. inputRotation = kGPUImageNoRotation;
  72. _movieWriterContext = [[GPUImageContext alloc] init];
  73. [_movieWriterContext useSharegroup:[[[GPUImageContext sharedImageProcessingContext] context] sharegroup]];
  74. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  75. [_movieWriterContext useAsCurrentContext];
  76. if ([GPUImageContext supportsFastTextureUpload])
  77. {
  78. colorSwizzlingProgram = [_movieWriterContext programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString];
  79. }
  80. else
  81. {
  82. colorSwizzlingProgram = [_movieWriterContext programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageColorSwizzlingFragmentShaderString];
  83. }
  84. if (!colorSwizzlingProgram.initialized)
  85. {
  86. [colorSwizzlingProgram addAttribute:@"position"];
  87. [colorSwizzlingProgram addAttribute:@"inputTextureCoordinate"];
  88. if (![colorSwizzlingProgram link])
  89. {
  90. NSString *progLog = [colorSwizzlingProgram programLog];
  91. NSLog(@"Program link log: %@", progLog);
  92. NSString *fragLog = [colorSwizzlingProgram fragmentShaderLog];
  93. NSLog(@"Fragment shader compile log: %@", fragLog);
  94. NSString *vertLog = [colorSwizzlingProgram vertexShaderLog];
  95. NSLog(@"Vertex shader compile log: %@", vertLog);
  96. colorSwizzlingProgram = nil;
  97. NSAssert(NO, @"Filter shader link failed");
  98. }
  99. }
  100. colorSwizzlingPositionAttribute = [colorSwizzlingProgram attributeIndex:@"position"];
  101. colorSwizzlingTextureCoordinateAttribute = [colorSwizzlingProgram attributeIndex:@"inputTextureCoordinate"];
  102. colorSwizzlingInputTextureUniform = [colorSwizzlingProgram uniformIndex:@"inputImageTexture"];
  103. [_movieWriterContext setContextShaderProgram:colorSwizzlingProgram];
  104. glEnableVertexAttribArray(colorSwizzlingPositionAttribute);
  105. glEnableVertexAttribArray(colorSwizzlingTextureCoordinateAttribute);
  106. });
  107. [self initializeMovieWithOutputSettings:outputSettings];
  108. return self;
  109. }
  110. - (void)dealloc;
  111. {
  112. [self destroyDataFBO];
  113. #if ( (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_6_0) || (!defined(__IPHONE_6_0)) )
  114. if( audioQueue != NULL )
  115. {
  116. dispatch_release(audioQueue);
  117. }
  118. if( videoQueue != NULL )
  119. {
  120. dispatch_release(videoQueue);
  121. }
  122. #endif
  123. }
  124. #pragma mark -
  125. #pragma mark Movie recording
  126. - (void)initializeMovieWithOutputSettings:(NSDictionary *)outputSettings;
  127. {
  128. isRecording = NO;
  129. self.enabled = YES;
  130. NSError *error = nil;
  131. assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType:fileType error:&error];
  132. if (error != nil)
  133. {
  134. NSLog(@"Error: %@", error);
  135. if (failureBlock)
  136. {
  137. failureBlock(error);
  138. }
  139. else
  140. {
  141. if(self.delegate && [self.delegate respondsToSelector:@selector(movieRecordingFailedWithError:)])
  142. {
  143. [self.delegate movieRecordingFailedWithError:error];
  144. }
  145. }
  146. }
  147. // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case.
  148. assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000);
  149. // use default output settings if none specified
  150. if (outputSettings == nil)
  151. {
  152. NSMutableDictionary *settings = [[NSMutableDictionary alloc] init];
  153. [settings setObject:AVVideoCodecH264 forKey:AVVideoCodecKey];
  154. [settings setObject:[NSNumber numberWithInt:videoSize.width] forKey:AVVideoWidthKey];
  155. [settings setObject:[NSNumber numberWithInt:videoSize.height] forKey:AVVideoHeightKey];
  156. outputSettings = settings;
  157. }
  158. // custom output settings specified
  159. else
  160. {
  161. NSString *videoCodec = [outputSettings objectForKey:AVVideoCodecKey];
  162. NSNumber *width = [outputSettings objectForKey:AVVideoWidthKey];
  163. NSNumber *height = [outputSettings objectForKey:AVVideoHeightKey];
  164. NSAssert(videoCodec && width && height, @"OutputSettings is missing required parameters.");
  165. if( [outputSettings objectForKey:@"EncodingLiveVideo"] ) {
  166. NSMutableDictionary *tmp = [outputSettings mutableCopy];
  167. [tmp removeObjectForKey:@"EncodingLiveVideo"];
  168. outputSettings = tmp;
  169. }
  170. }
  171. /*
  172. NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:
  173. [NSNumber numberWithInt:videoSize.width], AVVideoCleanApertureWidthKey,
  174. [NSNumber numberWithInt:videoSize.height], AVVideoCleanApertureHeightKey,
  175. [NSNumber numberWithInt:0], AVVideoCleanApertureHorizontalOffsetKey,
  176. [NSNumber numberWithInt:0], AVVideoCleanApertureVerticalOffsetKey,
  177. nil];
  178. NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
  179. [NSNumber numberWithInt:3], AVVideoPixelAspectRatioHorizontalSpacingKey,
  180. [NSNumber numberWithInt:3], AVVideoPixelAspectRatioVerticalSpacingKey,
  181. nil];
  182. NSMutableDictionary * compressionProperties = [[NSMutableDictionary alloc] init];
  183. [compressionProperties setObject:videoCleanApertureSettings forKey:AVVideoCleanApertureKey];
  184. [compressionProperties setObject:videoAspectRatioSettings forKey:AVVideoPixelAspectRatioKey];
  185. [compressionProperties setObject:[NSNumber numberWithInt: 2000000] forKey:AVVideoAverageBitRateKey];
  186. [compressionProperties setObject:[NSNumber numberWithInt: 16] forKey:AVVideoMaxKeyFrameIntervalKey];
  187. [compressionProperties setObject:AVVideoProfileLevelH264Main31 forKey:AVVideoProfileLevelKey];
  188. [outputSettings setObject:compressionProperties forKey:AVVideoCompressionPropertiesKey];
  189. */
  190. assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
  191. assetWriterVideoInput.expectsMediaDataInRealTime = _encodingLiveVideo;
  192. // You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA.
  193. NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
  194. [NSNumber numberWithInt:videoSize.width], kCVPixelBufferWidthKey,
  195. [NSNumber numberWithInt:videoSize.height], kCVPixelBufferHeightKey,
  196. nil];
  197. // NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey,
  198. // nil];
  199. assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:assetWriterVideoInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
  200. [assetWriter addInput:assetWriterVideoInput];
  201. }
  202. - (void)startRecording;
  203. {
  204. alreadyFinishedRecording = NO;
  205. startTime = kCMTimeInvalid;
  206. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  207. if (audioInputReadyCallback == NULL)
  208. {
  209. [assetWriter startWriting];
  210. }
  211. });
  212. isRecording = YES;
  213. // [assetWriter startSessionAtSourceTime:kCMTimeZero];
  214. }
  215. - (void)startRecordingInOrientation:(CGAffineTransform)orientationTransform;
  216. {
  217. assetWriterVideoInput.transform = orientationTransform;
  218. [self startRecording];
  219. }
  220. - (void)cancelRecording;
  221. {
  222. if (assetWriter.status == AVAssetWriterStatusCompleted)
  223. {
  224. return;
  225. }
  226. isRecording = NO;
  227. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  228. alreadyFinishedRecording = YES;
  229. if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished )
  230. {
  231. videoEncodingIsFinished = YES;
  232. [assetWriterVideoInput markAsFinished];
  233. }
  234. if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished )
  235. {
  236. audioEncodingIsFinished = YES;
  237. [assetWriterAudioInput markAsFinished];
  238. }
  239. [assetWriter cancelWriting];
  240. });
  241. }
  242. - (void)finishRecording;
  243. {
  244. [self finishRecordingWithCompletionHandler:NULL];
  245. }
  246. - (void)finishRecordingWithCompletionHandler:(void (^)(void))handler;
  247. {
  248. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  249. isRecording = NO;
  250. if (assetWriter.status == AVAssetWriterStatusCompleted || assetWriter.status == AVAssetWriterStatusCancelled || assetWriter.status == AVAssetWriterStatusUnknown)
  251. {
  252. if (handler)
  253. runAsynchronouslyOnContextQueue(_movieWriterContext, handler);
  254. return;
  255. }
  256. if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished )
  257. {
  258. videoEncodingIsFinished = YES;
  259. [assetWriterVideoInput markAsFinished];
  260. }
  261. if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished )
  262. {
  263. audioEncodingIsFinished = YES;
  264. [assetWriterAudioInput markAsFinished];
  265. }
  266. #if (!defined(__IPHONE_6_0) || (__IPHONE_OS_VERSION_MAX_ALLOWED < __IPHONE_6_0))
  267. // Not iOS 6 SDK
  268. [assetWriter finishWriting];
  269. if (handler)
  270. runAsynchronouslyOnContextQueue(_movieWriterContext,handler);
  271. #else
  272. // iOS 6 SDK
  273. if ([assetWriter respondsToSelector:@selector(finishWritingWithCompletionHandler:)]) {
  274. // Running iOS 6
  275. [assetWriter finishWritingWithCompletionHandler:(handler ?: ^{ })];
  276. }
  277. else {
  278. // Not running iOS 6
  279. #pragma clang diagnostic push
  280. #pragma clang diagnostic ignored "-Wdeprecated-declarations"
  281. [assetWriter finishWriting];
  282. #pragma clang diagnostic pop
  283. if (handler)
  284. runAsynchronouslyOnContextQueue(_movieWriterContext, handler);
  285. }
  286. #endif
  287. });
  288. }
  289. - (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer;
  290. {
  291. if (!isRecording)
  292. {
  293. return;
  294. }
  295. // if (_hasAudioTrack && CMTIME_IS_VALID(startTime))
  296. if (_hasAudioTrack)
  297. {
  298. CFRetain(audioBuffer);
  299. CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer);
  300. if (CMTIME_IS_INVALID(startTime))
  301. {
  302. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  303. if ((audioInputReadyCallback == NULL) && (assetWriter.status != AVAssetWriterStatusWriting))
  304. {
  305. [assetWriter startWriting];
  306. }
  307. [assetWriter startSessionAtSourceTime:currentSampleTime];
  308. startTime = currentSampleTime;
  309. });
  310. }
  311. if (!assetWriterAudioInput.readyForMoreMediaData && _encodingLiveVideo)
  312. {
  313. NSLog(@"1: Had to drop an audio frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));
  314. if (_shouldInvalidateAudioSampleWhenDone)
  315. {
  316. CMSampleBufferInvalidate(audioBuffer);
  317. }
  318. CFRelease(audioBuffer);
  319. return;
  320. }
  321. previousAudioTime = currentSampleTime;
  322. //if the consumer wants to do something with the audio samples before writing, let him.
  323. if (self.audioProcessingCallback) {
  324. //need to introspect into the opaque CMBlockBuffer structure to find its raw sample buffers.
  325. CMBlockBufferRef buffer = CMSampleBufferGetDataBuffer(audioBuffer);
  326. CMItemCount numSamplesInBuffer = CMSampleBufferGetNumSamples(audioBuffer);
  327. AudioBufferList audioBufferList;
  328. CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(audioBuffer,
  329. NULL,
  330. &audioBufferList,
  331. sizeof(audioBufferList),
  332. NULL,
  333. NULL,
  334. kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
  335. &buffer
  336. );
  337. //passing a live pointer to the audio buffers, try to process them in-place or we might have syncing issues.
  338. for (int bufferCount=0; bufferCount < audioBufferList.mNumberBuffers; bufferCount++) {
  339. SInt16 *samples = (SInt16 *)audioBufferList.mBuffers[bufferCount].mData;
  340. self.audioProcessingCallback(&samples, numSamplesInBuffer);
  341. }
  342. }
  343. // NSLog(@"Recorded audio sample time: %lld, %d, %lld", currentSampleTime.value, currentSampleTime.timescale, currentSampleTime.epoch);
  344. void(^write)() = ^() {
  345. while( ! assetWriterAudioInput.readyForMoreMediaData && ! _encodingLiveVideo && ! audioEncodingIsFinished ) {
  346. NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.5];
  347. //NSLog(@"audio waiting...");
  348. [[NSRunLoop currentRunLoop] runUntilDate:maxDate];
  349. }
  350. if (!assetWriterAudioInput.readyForMoreMediaData)
  351. {
  352. NSLog(@"2: Had to drop an audio frame %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));
  353. }
  354. else if(assetWriter.status == AVAssetWriterStatusWriting)
  355. {
  356. if (![assetWriterAudioInput appendSampleBuffer:audioBuffer])
  357. NSLog(@"Problem appending audio buffer at time: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));
  358. }
  359. else
  360. {
  361. //NSLog(@"Wrote an audio frame %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));
  362. }
  363. if (_shouldInvalidateAudioSampleWhenDone)
  364. {
  365. CMSampleBufferInvalidate(audioBuffer);
  366. }
  367. CFRelease(audioBuffer);
  368. };
  369. // runAsynchronouslyOnContextQueue(_movieWriterContext, write);
  370. if( _encodingLiveVideo )
  371. {
  372. runAsynchronouslyOnContextQueue(_movieWriterContext, write);
  373. }
  374. else
  375. {
  376. write();
  377. }
  378. }
  379. }
  380. - (void)enableSynchronizationCallbacks;
  381. {
  382. if (videoInputReadyCallback != NULL)
  383. {
  384. if( assetWriter.status != AVAssetWriterStatusWriting )
  385. {
  386. [assetWriter startWriting];
  387. }
  388. videoQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.videoReadingQueue", NULL);
  389. [assetWriterVideoInput requestMediaDataWhenReadyOnQueue:videoQueue usingBlock:^{
  390. if( _paused )
  391. {
  392. //NSLog(@"video requestMediaDataWhenReadyOnQueue paused");
  393. // if we don't sleep, we'll get called back almost immediately, chewing up CPU
  394. usleep(10000);
  395. return;
  396. }
  397. //NSLog(@"video requestMediaDataWhenReadyOnQueue begin");
  398. while( assetWriterVideoInput.readyForMoreMediaData && ! _paused )
  399. {
  400. if( videoInputReadyCallback && ! videoInputReadyCallback() && ! videoEncodingIsFinished )
  401. {
  402. runAsynchronouslyOnContextQueue(_movieWriterContext, ^{
  403. if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished )
  404. {
  405. videoEncodingIsFinished = YES;
  406. [assetWriterVideoInput markAsFinished];
  407. }
  408. });
  409. }
  410. }
  411. //NSLog(@"video requestMediaDataWhenReadyOnQueue end");
  412. }];
  413. }
  414. if (audioInputReadyCallback != NULL)
  415. {
  416. audioQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.audioReadingQueue", NULL);
  417. [assetWriterAudioInput requestMediaDataWhenReadyOnQueue:audioQueue usingBlock:^{
  418. if( _paused )
  419. {
  420. //NSLog(@"audio requestMediaDataWhenReadyOnQueue paused");
  421. // if we don't sleep, we'll get called back almost immediately, chewing up CPU
  422. usleep(10000);
  423. return;
  424. }
  425. //NSLog(@"audio requestMediaDataWhenReadyOnQueue begin");
  426. while( assetWriterAudioInput.readyForMoreMediaData && ! _paused )
  427. {
  428. if( audioInputReadyCallback && ! audioInputReadyCallback() && ! audioEncodingIsFinished )
  429. {
  430. runAsynchronouslyOnContextQueue(_movieWriterContext, ^{
  431. if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished )
  432. {
  433. audioEncodingIsFinished = YES;
  434. [assetWriterAudioInput markAsFinished];
  435. }
  436. });
  437. }
  438. }
  439. //NSLog(@"audio requestMediaDataWhenReadyOnQueue end");
  440. }];
  441. }
  442. }
  443. #pragma mark -
  444. #pragma mark Frame rendering
  445. - (void)createDataFBO;
  446. {
  447. glActiveTexture(GL_TEXTURE1);
  448. glGenFramebuffers(1, &movieFramebuffer);
  449. glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);
  450. if ([GPUImageContext supportsFastTextureUpload])
  451. {
  452. // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/
  453. CVPixelBufferPoolCreatePixelBuffer (NULL, [assetWriterPixelBufferInput pixelBufferPool], &renderTarget);
  454. /* AVAssetWriter will use BT.601 conversion matrix for RGB to YCbCr conversion
  455. * regardless of the kCVImageBufferYCbCrMatrixKey value.
  456. * Tagging the resulting video file as BT.601, is the best option right now.
  457. * Creating a proper BT.709 video is not possible at the moment.
  458. */
  459. CVBufferSetAttachment(renderTarget, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, kCVAttachmentMode_ShouldPropagate);
  460. CVBufferSetAttachment(renderTarget, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, kCVAttachmentMode_ShouldPropagate);
  461. CVBufferSetAttachment(renderTarget, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, kCVAttachmentMode_ShouldPropagate);
  462. CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, [_movieWriterContext coreVideoTextureCache], renderTarget,
  463. NULL, // texture attributes
  464. GL_TEXTURE_2D,
  465. GL_RGBA, // opengl format
  466. (int)videoSize.width,
  467. (int)videoSize.height,
  468. GL_BGRA, // native iOS format
  469. GL_UNSIGNED_BYTE,
  470. 0,
  471. &renderTexture);
  472. glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture));
  473. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  474. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  475. glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0);
  476. }
  477. else
  478. {
  479. glGenRenderbuffers(1, &movieRenderbuffer);
  480. glBindRenderbuffer(GL_RENDERBUFFER, movieRenderbuffer);
  481. glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8_OES, (int)videoSize.width, (int)videoSize.height);
  482. glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, movieRenderbuffer);
  483. }
  484. GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
  485. NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status);
  486. }
  487. - (void)destroyDataFBO;
  488. {
  489. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  490. [_movieWriterContext useAsCurrentContext];
  491. if (movieFramebuffer)
  492. {
  493. glDeleteFramebuffers(1, &movieFramebuffer);
  494. movieFramebuffer = 0;
  495. }
  496. if (movieRenderbuffer)
  497. {
  498. glDeleteRenderbuffers(1, &movieRenderbuffer);
  499. movieRenderbuffer = 0;
  500. }
  501. if ([GPUImageContext supportsFastTextureUpload])
  502. {
  503. if (renderTexture)
  504. {
  505. CFRelease(renderTexture);
  506. }
  507. if (renderTarget)
  508. {
  509. CVPixelBufferRelease(renderTarget);
  510. }
  511. }
  512. });
  513. }
  514. - (void)setFilterFBO;
  515. {
  516. if (!movieFramebuffer)
  517. {
  518. [self createDataFBO];
  519. }
  520. glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);
  521. glViewport(0, 0, (int)videoSize.width, (int)videoSize.height);
  522. }
  523. - (void)renderAtInternalSizeUsingFramebuffer:(GPUImageFramebuffer *)inputFramebufferToUse;
  524. {
  525. [_movieWriterContext useAsCurrentContext];
  526. [self setFilterFBO];
  527. [_movieWriterContext setContextShaderProgram:colorSwizzlingProgram];
  528. glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
  529. glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
  530. // This needs to be flipped to write out to video correctly
  531. static const GLfloat squareVertices[] = {
  532. -1.0f, -1.0f,
  533. 1.0f, -1.0f,
  534. -1.0f, 1.0f,
  535. 1.0f, 1.0f,
  536. };
  537. const GLfloat *textureCoordinates = [GPUImageFilter textureCoordinatesForRotation:inputRotation];
  538. glActiveTexture(GL_TEXTURE4);
  539. glBindTexture(GL_TEXTURE_2D, [inputFramebufferToUse texture]);
  540. glUniform1i(colorSwizzlingInputTextureUniform, 4);
  541. // NSLog(@"Movie writer framebuffer: %@", inputFramebufferToUse);
  542. glVertexAttribPointer(colorSwizzlingPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
  543. glVertexAttribPointer(colorSwizzlingTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
  544. glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
  545. glFinish();
  546. }
  547. #pragma mark -
  548. #pragma mark GPUImageInput protocol
  549. - (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
  550. {
  551. if (!isRecording)
  552. {
  553. [firstInputFramebuffer unlock];
  554. return;
  555. }
  556. // Drop frames forced by images and other things with no time constants
  557. // Also, if two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case
  558. if ( (CMTIME_IS_INVALID(frameTime)) || (CMTIME_COMPARE_INLINE(frameTime, ==, previousFrameTime)) || (CMTIME_IS_INDEFINITE(frameTime)) )
  559. {
  560. [firstInputFramebuffer unlock];
  561. return;
  562. }
  563. if (CMTIME_IS_INVALID(startTime))
  564. {
  565. runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  566. if ((videoInputReadyCallback == NULL) && (assetWriter.status != AVAssetWriterStatusWriting))
  567. {
  568. [assetWriter startWriting];
  569. }
  570. [assetWriter startSessionAtSourceTime:frameTime];
  571. startTime = frameTime;
  572. });
  573. }
  574. GPUImageFramebuffer *inputFramebufferForBlock = firstInputFramebuffer;
  575. glFinish();
  576. runAsynchronouslyOnContextQueue(_movieWriterContext, ^{
  577. if (!assetWriterVideoInput.readyForMoreMediaData && _encodingLiveVideo)
  578. {
  579. [inputFramebufferForBlock unlock];
  580. NSLog(@"1: Had to drop a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));
  581. return;
  582. }
  583. // Render the frame with swizzled colors, so that they can be uploaded quickly as BGRA frames
  584. [_movieWriterContext useAsCurrentContext];
  585. [self renderAtInternalSizeUsingFramebuffer:inputFramebufferForBlock];
  586. CVPixelBufferRef pixel_buffer = NULL;
  587. if ([GPUImageContext supportsFastTextureUpload])
  588. {
  589. pixel_buffer = renderTarget;
  590. CVPixelBufferLockBaseAddress(pixel_buffer, 0);
  591. }
  592. else
  593. {
  594. CVReturn status = CVPixelBufferPoolCreatePixelBuffer (NULL, [assetWriterPixelBufferInput pixelBufferPool], &pixel_buffer);
  595. if ((pixel_buffer == NULL) || (status != kCVReturnSuccess))
  596. {
  597. CVPixelBufferRelease(pixel_buffer);
  598. return;
  599. }
  600. else
  601. {
  602. CVPixelBufferLockBaseAddress(pixel_buffer, 0);
  603. GLubyte *pixelBufferData = (GLubyte *)CVPixelBufferGetBaseAddress(pixel_buffer);
  604. glReadPixels(0, 0, videoSize.width, videoSize.height, GL_RGBA, GL_UNSIGNED_BYTE, pixelBufferData);
  605. }
  606. }
  607. void(^write)() = ^() {
  608. while( ! assetWriterVideoInput.readyForMoreMediaData && ! _encodingLiveVideo && ! videoEncodingIsFinished ) {
  609. NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
  610. // NSLog(@"video waiting...");
  611. [[NSRunLoop currentRunLoop] runUntilDate:maxDate];
  612. }
  613. if (!assetWriterVideoInput.readyForMoreMediaData)
  614. {
  615. NSLog(@"2: Had to drop a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));
  616. }
  617. else if(self.assetWriter.status == AVAssetWriterStatusWriting)
  618. {
  619. if (![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:frameTime])
  620. NSLog(@"Problem appending pixel buffer at time: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));
  621. }
  622. else
  623. {
  624. NSLog(@"Couldn't write a frame");
  625. //NSLog(@"Wrote a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));
  626. }
  627. CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);
  628. previousFrameTime = frameTime;
  629. if (![GPUImageContext supportsFastTextureUpload])
  630. {
  631. CVPixelBufferRelease(pixel_buffer);
  632. }
  633. };
  634. write();
  635. [inputFramebufferForBlock unlock];
  636. });
  637. }
  638. - (NSInteger)nextAvailableTextureIndex;
  639. {
  640. return 0;
  641. }
  642. - (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
  643. {
  644. [newInputFramebuffer lock];
  645. // runSynchronouslyOnContextQueue(_movieWriterContext, ^{
  646. firstInputFramebuffer = newInputFramebuffer;
  647. // });
  648. }
  649. - (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
  650. {
  651. inputRotation = newInputRotation;
  652. }
  653. - (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
  654. {
  655. }
  656. - (CGSize)maximumOutputSize;
  657. {
  658. return videoSize;
  659. }
  660. - (void)endProcessing
  661. {
  662. if (completionBlock)
  663. {
  664. if (!alreadyFinishedRecording)
  665. {
  666. alreadyFinishedRecording = YES;
  667. completionBlock();
  668. }
  669. }
  670. else
  671. {
  672. if (_delegate && [_delegate respondsToSelector:@selector(movieRecordingCompleted)])
  673. {
  674. [_delegate movieRecordingCompleted];
  675. }
  676. }
  677. }
  678. - (BOOL)shouldIgnoreUpdatesToThisTarget;
  679. {
  680. return NO;
  681. }
  682. - (BOOL)wantsMonochromeInput;
  683. {
  684. return NO;
  685. }
  686. - (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
  687. {
  688. }
  689. #pragma mark -
  690. #pragma mark Accessors
  691. - (void)setHasAudioTrack:(BOOL)newValue
  692. {
  693. [self setHasAudioTrack:newValue audioSettings:nil];
  694. }
  695. - (void)setHasAudioTrack:(BOOL)newValue audioSettings:(NSDictionary *)audioOutputSettings;
  696. {
  697. _hasAudioTrack = newValue;
  698. if (_hasAudioTrack)
  699. {
  700. if (_shouldPassthroughAudio)
  701. {
  702. // Do not set any settings so audio will be the same as passthrough
  703. audioOutputSettings = nil;
  704. }
  705. else if (audioOutputSettings == nil)
  706. {
  707. AVAudioSession *sharedAudioSession = [AVAudioSession sharedInstance];
  708. double preferredHardwareSampleRate;
  709. if ([sharedAudioSession respondsToSelector:@selector(sampleRate)])
  710. {
  711. preferredHardwareSampleRate = [sharedAudioSession sampleRate];
  712. }
  713. else
  714. {
  715. #pragma clang diagnostic push
  716. #pragma clang diagnostic ignored "-Wdeprecated-declarations"
  717. preferredHardwareSampleRate = [[AVAudioSession sharedInstance] currentHardwareSampleRate];
  718. #pragma clang diagnostic pop
  719. }
  720. AudioChannelLayout acl;
  721. bzero( &acl, sizeof(acl));
  722. acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
  723. audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
  724. [ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
  725. [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
  726. [ NSNumber numberWithFloat: preferredHardwareSampleRate ], AVSampleRateKey,
  727. [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
  728. //[ NSNumber numberWithInt:AVAudioQualityLow], AVEncoderAudioQualityKey,
  729. [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
  730. nil];
  731. /*
  732. AudioChannelLayout acl;
  733. bzero( &acl, sizeof(acl));
  734. acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
  735. audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
  736. [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
  737. [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
  738. [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
  739. [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
  740. [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
  741. nil];*/
  742. }
  743. assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
  744. [assetWriter addInput:assetWriterAudioInput];
  745. assetWriterAudioInput.expectsMediaDataInRealTime = _encodingLiveVideo;
  746. }
  747. else
  748. {
  749. // Remove audio track if it exists
  750. }
  751. }
  752. - (NSArray*)metaData {
  753. return assetWriter.metadata;
  754. }
  755. - (void)setMetaData:(NSArray*)metaData {
  756. assetWriter.metadata = metaData;
  757. }
  758. - (CMTime)duration {
  759. if( ! CMTIME_IS_VALID(startTime) )
  760. return kCMTimeZero;
  761. if( ! CMTIME_IS_NEGATIVE_INFINITY(previousFrameTime) )
  762. return CMTimeSubtract(previousFrameTime, startTime);
  763. if( ! CMTIME_IS_NEGATIVE_INFINITY(previousAudioTime) )
  764. return CMTimeSubtract(previousAudioTime, startTime);
  765. return kCMTimeZero;
  766. }
  767. - (CGAffineTransform)transform {
  768. return assetWriterVideoInput.transform;
  769. }
  770. - (void)setTransform:(CGAffineTransform)transform {
  771. assetWriterVideoInput.transform = transform;
  772. }
  773. - (AVAssetWriter*)assetWriter {
  774. return assetWriter;
  775. }
  776. @end