GPUImageMovieWriter.m 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644
  1. #import "GPUImageMovieWriter.h"
  2. #import "GPUImageContext.h"
  3. #import "GLProgram.h"
  4. #import "GPUImageFilter.h"
  5. #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
  6. NSString *const kGPUImageColorSwizzlingFragmentShaderString = SHADER_STRING
  7. (
  8. varying highp vec2 textureCoordinate;
  9. uniform sampler2D inputImageTexture;
  10. void main()
  11. {
  12. gl_FragColor = texture2D(inputImageTexture, textureCoordinate).bgra;
  13. }
  14. );
  15. #else
  16. NSString *const kGPUImageColorSwizzlingFragmentShaderString = SHADER_STRING
  17. (
  18. varying vec2 textureCoordinate;
  19. uniform sampler2D inputImageTexture;
  20. void main()
  21. {
  22. gl_FragColor = texture2D(inputImageTexture, textureCoordinate).bgra;
  23. }
  24. );
  25. #endif
  26. @interface GPUImageMovieWriter ()
  27. {
  28. GPUImageFramebuffer *firstInputFramebuffer;
  29. GLuint movieFramebuffer, movieRenderbuffer;
  30. GLProgram *colorSwizzlingProgram;
  31. GLint colorSwizzlingPositionAttribute, colorSwizzlingTextureCoordinateAttribute;
  32. GLint colorSwizzlingInputTextureUniform;
  33. GLubyte *frameData;
  34. CMTime startTime, previousFrameTime;
  35. BOOL isRecording;
  36. }
  37. // Movie recording
  38. - (void)initializeMovieWithOutputSettings:(NSMutableDictionary *)outputSettings;
  39. // Frame rendering
  40. - (void)createDataFBO;
  41. - (void)destroyDataFBO;
  42. - (void)setFilterFBO;
  43. - (void)renderAtInternalSize;
  44. @end
  45. @implementation GPUImageMovieWriter
  46. @synthesize hasAudioTrack = _hasAudioTrack;
  47. @synthesize encodingLiveVideo = _encodingLiveVideo;
  48. @synthesize shouldPassthroughAudio = _shouldPassthroughAudio;
  49. @synthesize completionBlock;
  50. @synthesize failureBlock;
  51. @synthesize videoInputReadyCallback;
  52. @synthesize audioInputReadyCallback;
  53. @synthesize enabled;
  54. @synthesize delegate = _delegate;
  55. #pragma mark -
  56. #pragma mark Initialization and teardown
  57. - (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize;
  58. {
  59. return [self initWithMovieURL:newMovieURL size:newSize fileType:AVFileTypeQuickTimeMovie outputSettings:nil];
  60. }
  61. - (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSMutableDictionary *)outputSettings;
  62. {
  63. if (!(self = [super init]))
  64. {
  65. return nil;
  66. }
  67. self.enabled = YES;
  68. videoSize = newSize;
  69. movieURL = newMovieURL;
  70. fileType = newFileType;
  71. startTime = kCMTimeInvalid;
  72. _encodingLiveVideo = YES;
  73. previousFrameTime = kCMTimeNegativeInfinity;
  74. inputRotation = kGPUImageNoRotation;
  75. runSynchronouslyOnVideoProcessingQueue(^{
  76. [GPUImageContext useImageProcessingContext];
  77. if ([GPUImageContext supportsFastTextureUpload])
  78. {
  79. colorSwizzlingProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString];
  80. }
  81. else
  82. {
  83. colorSwizzlingProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageColorSwizzlingFragmentShaderString];
  84. }
  85. if (!colorSwizzlingProgram.initialized)
  86. {
  87. [colorSwizzlingProgram addAttribute:@"position"];
  88. [colorSwizzlingProgram addAttribute:@"inputTextureCoordinate"];
  89. if (![colorSwizzlingProgram link])
  90. {
  91. NSString *progLog = [colorSwizzlingProgram programLog];
  92. NSLog(@"Program link log: %@", progLog);
  93. NSString *fragLog = [colorSwizzlingProgram fragmentShaderLog];
  94. NSLog(@"Fragment shader compile log: %@", fragLog);
  95. NSString *vertLog = [colorSwizzlingProgram vertexShaderLog];
  96. NSLog(@"Vertex shader compile log: %@", vertLog);
  97. colorSwizzlingProgram = nil;
  98. NSAssert(NO, @"Filter shader link failed");
  99. }
  100. }
  101. colorSwizzlingPositionAttribute = [colorSwizzlingProgram attributeIndex:@"position"];
  102. colorSwizzlingTextureCoordinateAttribute = [colorSwizzlingProgram attributeIndex:@"inputTextureCoordinate"];
  103. colorSwizzlingInputTextureUniform = [colorSwizzlingProgram uniformIndex:@"inputImageTexture"];
  104. // REFACTOR: Wrap this in a block for the image processing queue
  105. [GPUImageContext setActiveShaderProgram:colorSwizzlingProgram];
  106. glEnableVertexAttribArray(colorSwizzlingPositionAttribute);
  107. glEnableVertexAttribArray(colorSwizzlingTextureCoordinateAttribute);
  108. });
  109. [self initializeMovieWithOutputSettings:outputSettings];
  110. return self;
  111. }
  112. - (void)dealloc;
  113. {
  114. [self destroyDataFBO];
  115. if (frameData != NULL)
  116. {
  117. free(frameData);
  118. }
  119. }
  120. #pragma mark -
  121. #pragma mark Movie recording
  122. - (void)initializeMovieWithOutputSettings:(NSMutableDictionary *)outputSettings;
  123. {
  124. isRecording = NO;
  125. self.enabled = YES;
  126. frameData = (GLubyte *) malloc((int)videoSize.width * (int)videoSize.height * 4);
  127. // frameData = (GLubyte *) calloc(videoSize.width * videoSize.height * 4, sizeof(GLubyte));
  128. NSError *error = nil;
  129. assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType:fileType error:&error];
  130. if (error != nil)
  131. {
  132. NSLog(@"Error: %@", error);
  133. if (failureBlock)
  134. {
  135. failureBlock(error);
  136. }
  137. else
  138. {
  139. if(self.delegate && [self.delegate respondsToSelector:@selector(movieRecordingFailedWithError:)])
  140. {
  141. [self.delegate movieRecordingFailedWithError:error];
  142. }
  143. }
  144. }
  145. // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case.
  146. assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000);
  147. // use default output settings if none specified
  148. if (outputSettings == nil)
  149. {
  150. outputSettings = [[NSMutableDictionary alloc] init];
  151. [outputSettings setObject:AVVideoCodecH264 forKey:AVVideoCodecKey];
  152. [outputSettings setObject:[NSNumber numberWithInt:videoSize.width] forKey:AVVideoWidthKey];
  153. [outputSettings setObject:[NSNumber numberWithInt:videoSize.height] forKey:AVVideoHeightKey];
  154. }
  155. // custom output settings specified
  156. else
  157. {
  158. #ifndef NS_BLOCK_ASSERTIONS
  159. NSString *videoCodec = [outputSettings objectForKey:AVVideoCodecKey];
  160. NSNumber *width = [outputSettings objectForKey:AVVideoWidthKey];
  161. NSNumber *height = [outputSettings objectForKey:AVVideoHeightKey];
  162. NSAssert(videoCodec && width && height, @"OutputSettings is missing required parameters.");
  163. #endif
  164. }
  165. /*
  166. NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:
  167. [NSNumber numberWithInt:videoSize.width], AVVideoCleanApertureWidthKey,
  168. [NSNumber numberWithInt:videoSize.height], AVVideoCleanApertureHeightKey,
  169. [NSNumber numberWithInt:0], AVVideoCleanApertureHorizontalOffsetKey,
  170. [NSNumber numberWithInt:0], AVVideoCleanApertureVerticalOffsetKey,
  171. nil];
  172. NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
  173. [NSNumber numberWithInt:3], AVVideoPixelAspectRatioHorizontalSpacingKey,
  174. [NSNumber numberWithInt:3], AVVideoPixelAspectRatioVerticalSpacingKey,
  175. nil];
  176. NSMutableDictionary * compressionProperties = [[NSMutableDictionary alloc] init];
  177. [compressionProperties setObject:videoCleanApertureSettings forKey:AVVideoCleanApertureKey];
  178. [compressionProperties setObject:videoAspectRatioSettings forKey:AVVideoPixelAspectRatioKey];
  179. [compressionProperties setObject:[NSNumber numberWithInt: 2000000] forKey:AVVideoAverageBitRateKey];
  180. [compressionProperties setObject:[NSNumber numberWithInt: 16] forKey:AVVideoMaxKeyFrameIntervalKey];
  181. [compressionProperties setObject:AVVideoProfileLevelH264Main31 forKey:AVVideoProfileLevelKey];
  182. [outputSettings setObject:compressionProperties forKey:AVVideoCompressionPropertiesKey];
  183. */
  184. assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
  185. assetWriterVideoInput.expectsMediaDataInRealTime = _encodingLiveVideo;
  186. // You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA.
  187. NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
  188. [NSNumber numberWithInt:videoSize.width], kCVPixelBufferWidthKey,
  189. [NSNumber numberWithInt:videoSize.height], kCVPixelBufferHeightKey,
  190. nil];
  191. // NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey,
  192. // nil];
  193. assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:assetWriterVideoInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
  194. [assetWriter addInput:assetWriterVideoInput];
  195. }
  196. - (void)startRecording;
  197. {
  198. isRecording = YES;
  199. startTime = kCMTimeInvalid;
  200. // [assetWriter startWriting];
  201. // [assetWriter startSessionAtSourceTime:kCMTimeZero];
  202. }
  203. - (void)startRecordingInOrientation:(CGAffineTransform)orientationTransform;
  204. {
  205. assetWriterVideoInput.transform = orientationTransform;
  206. [self startRecording];
  207. }
  208. - (void)cancelRecording;
  209. {
  210. if (assetWriter.status == AVAssetWriterStatusCompleted)
  211. {
  212. return;
  213. }
  214. isRecording = NO;
  215. runOnMainQueueWithoutDeadlocking(^{
  216. [assetWriterVideoInput markAsFinished];
  217. [assetWriterAudioInput markAsFinished];
  218. [assetWriter cancelWriting];
  219. });
  220. }
  221. - (void)finishRecording;
  222. {
  223. [self finishRecordingWithCompletionHandler:nil];
  224. }
  225. - (void)finishRecordingWithCompletionHandler:(void (^)(void))handler;
  226. {
  227. if (assetWriter.status == AVAssetWriterStatusCompleted)
  228. {
  229. return;
  230. }
  231. isRecording = NO;
  232. runOnMainQueueWithoutDeadlocking(^{
  233. [assetWriterVideoInput markAsFinished];
  234. [assetWriterAudioInput markAsFinished];
  235. #if (!defined(__IPHONE_6_0) || (__IPHONE_OS_VERSION_MAX_ALLOWED < __IPHONE_6_0))
  236. // Not iOS 6 SDK
  237. [assetWriter finishWriting];
  238. if (handler) handler();
  239. #else
  240. // iOS 6 SDK
  241. if ([assetWriter respondsToSelector:@selector(finishWritingWithCompletionHandler:)]) {
  242. // Running iOS 6
  243. [assetWriter finishWritingWithCompletionHandler:(handler ?: ^{ })];
  244. }
  245. else {
  246. // Not running iOS 6
  247. [assetWriter finishWriting];
  248. if (handler) handler();
  249. }
  250. #endif
  251. });
  252. }
  253. - (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer;
  254. {
  255. if (!isRecording)
  256. {
  257. return;
  258. }
  259. if (_hasAudioTrack)
  260. {
  261. CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer);
  262. if (CMTIME_IS_INVALID(startTime))
  263. {
  264. if (audioInputReadyCallback == NULL)
  265. {
  266. [assetWriter startWriting];
  267. }
  268. [assetWriter startSessionAtSourceTime:currentSampleTime];
  269. startTime = currentSampleTime;
  270. }
  271. if (!assetWriterAudioInput.readyForMoreMediaData)
  272. {
  273. NSLog(@"Had to drop an audio frame");
  274. return;
  275. }
  276. // NSLog(@"Recorded audio sample time: %lld, %d, %lld", currentSampleTime.value, currentSampleTime.timescale, currentSampleTime.epoch);
  277. [assetWriterAudioInput appendSampleBuffer:audioBuffer];
  278. }
  279. }
  280. - (void)enableSynchronizationCallbacks;
  281. {
  282. if (videoInputReadyCallback != NULL)
  283. {
  284. [assetWriter startWriting];
  285. [assetWriterVideoInput requestMediaDataWhenReadyOnQueue:[GPUImageContext sharedContextQueue] usingBlock:videoInputReadyCallback];
  286. }
  287. if (audioInputReadyCallback != NULL)
  288. {
  289. [assetWriterAudioInput requestMediaDataWhenReadyOnQueue:[GPUImageContext sharedContextQueue] usingBlock:audioInputReadyCallback];
  290. }
  291. }
  292. #pragma mark -
  293. #pragma mark Frame rendering
  294. - (void)createDataFBO;
  295. {
  296. glActiveTexture(GL_TEXTURE1);
  297. glGenFramebuffers(1, &movieFramebuffer);
  298. glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);
  299. glGenRenderbuffers(1, &movieRenderbuffer);
  300. glBindRenderbuffer(GL_RENDERBUFFER, movieRenderbuffer);
  301. glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8, (int)videoSize.width, (int)videoSize.height);
  302. glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, movieRenderbuffer);
  303. #ifndef NS_BLOCK_ASSERTIONS
  304. GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
  305. NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status);
  306. #endif
  307. }
  308. - (void)destroyDataFBO;
  309. {
  310. [GPUImageContext useImageProcessingContext];
  311. if (movieFramebuffer)
  312. {
  313. glDeleteFramebuffers(1, &movieFramebuffer);
  314. movieFramebuffer = 0;
  315. }
  316. if (movieRenderbuffer)
  317. {
  318. glDeleteRenderbuffers(1, &movieRenderbuffer);
  319. movieRenderbuffer = 0;
  320. }
  321. }
  322. - (void)setFilterFBO;
  323. {
  324. if (!movieFramebuffer)
  325. {
  326. [self createDataFBO];
  327. }
  328. glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);
  329. glViewport(0, 0, (int)videoSize.width, (int)videoSize.height);
  330. }
  331. - (void)renderAtInternalSize;
  332. {
  333. [GPUImageContext useImageProcessingContext];
  334. [self setFilterFBO];
  335. [GPUImageContext setActiveShaderProgram:colorSwizzlingProgram];
  336. glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
  337. glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
  338. // This needs to be flipped to write out to video correctly
  339. static const GLfloat squareVertices[] = {
  340. -1.0f, -1.0f,
  341. 1.0f, -1.0f,
  342. -1.0f, 1.0f,
  343. 1.0f, 1.0f,
  344. };
  345. static const GLfloat textureCoordinates[] = {
  346. 0.0f, 0.0f,
  347. 1.0f, 0.0f,
  348. 0.0f, 1.0f,
  349. 1.0f, 1.0f,
  350. };
  351. glActiveTexture(GL_TEXTURE4);
  352. glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
  353. glUniform1i(colorSwizzlingInputTextureUniform, 4);
  354. glVertexAttribPointer(colorSwizzlingPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
  355. glVertexAttribPointer(colorSwizzlingTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
  356. glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
  357. glFinish();
  358. [firstInputFramebuffer unlock];
  359. }
  360. #pragma mark -
  361. #pragma mark GPUImageInput protocol
  362. - (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
  363. {
  364. if (!isRecording)
  365. {
  366. [firstInputFramebuffer unlock];
  367. return;
  368. }
  369. // Drop frames forced by images and other things with no time constants
  370. // Also, if two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case
  371. if ( (CMTIME_IS_INVALID(frameTime)) || (CMTIME_COMPARE_INLINE(frameTime, ==, previousFrameTime)) || (CMTIME_IS_INDEFINITE(frameTime)) )
  372. {
  373. [firstInputFramebuffer unlock];
  374. return;
  375. }
  376. if (CMTIME_IS_INVALID(startTime))
  377. {
  378. if (videoInputReadyCallback == NULL)
  379. {
  380. [assetWriter startWriting];
  381. }
  382. [assetWriter startSessionAtSourceTime:frameTime];
  383. startTime = frameTime;
  384. }
  385. if (!assetWriterVideoInput.readyForMoreMediaData)
  386. {
  387. [firstInputFramebuffer unlock];
  388. NSLog(@"Had to drop a video frame");
  389. return;
  390. }
  391. // Render the frame with swizzled colors, so that they can be uploaded quickly as BGRA frames
  392. [GPUImageContext useImageProcessingContext];
  393. [self renderAtInternalSize];
  394. CVPixelBufferRef pixel_buffer = NULL;
  395. CVReturn status = CVPixelBufferPoolCreatePixelBuffer (NULL, [assetWriterPixelBufferInput pixelBufferPool], &pixel_buffer);
  396. if ((pixel_buffer == NULL) || (status != kCVReturnSuccess))
  397. {
  398. return;
  399. }
  400. else
  401. {
  402. CVPixelBufferLockBaseAddress(pixel_buffer, 0);
  403. GLubyte *pixelBufferData = (GLubyte *)CVPixelBufferGetBaseAddress(pixel_buffer);
  404. glReadPixels(0, 0, videoSize.width, videoSize.height, GL_RGBA, GL_UNSIGNED_BYTE, pixelBufferData);
  405. }
  406. // if(![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:CMTimeSubtract(frameTime, startTime)])
  407. if(![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:frameTime])
  408. {
  409. NSLog(@"Problem appending pixel buffer at time: %lld", frameTime.value);
  410. }
  411. else
  412. {
  413. // NSLog(@"Recorded video sample time: %lld, %d, %lld", frameTime.value, frameTime.timescale, frameTime.epoch);
  414. }
  415. CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);
  416. previousFrameTime = frameTime;
  417. if (![GPUImageContext supportsFastTextureUpload])
  418. {
  419. CVPixelBufferRelease(pixel_buffer);
  420. }
  421. }
  422. - (NSInteger)nextAvailableTextureIndex;
  423. {
  424. return 0;
  425. }
  426. - (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
  427. {
  428. firstInputFramebuffer = newInputFramebuffer;
  429. [firstInputFramebuffer lock];
  430. }
  431. - (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
  432. {
  433. inputRotation = newInputRotation;
  434. }
  435. - (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
  436. {
  437. }
  438. - (CGSize)maximumOutputSize;
  439. {
  440. return videoSize;
  441. }
  442. - (void)endProcessing
  443. {
  444. if (completionBlock)
  445. {
  446. completionBlock();
  447. }
  448. else
  449. {
  450. if (_delegate && [_delegate respondsToSelector:@selector(movieRecordingCompleted)])
  451. {
  452. [_delegate movieRecordingCompleted];
  453. }
  454. }
  455. }
  456. - (BOOL)shouldIgnoreUpdatesToThisTarget;
  457. {
  458. return NO;
  459. }
  460. - (void)conserveMemoryForNextFrame;
  461. {
  462. }
  463. - (BOOL)wantsMonochromeInput;
  464. {
  465. return NO;
  466. }
  467. - (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
  468. {
  469. }
  470. #pragma mark -
  471. #pragma mark Accessors
  472. - (void)setHasAudioTrack:(BOOL)newValue
  473. {
  474. [self setHasAudioTrack:newValue audioSettings:nil];
  475. }
  476. - (void)setHasAudioTrack:(BOOL)newValue audioSettings:(NSDictionary *)audioOutputSettings;
  477. {
  478. _hasAudioTrack = newValue;
  479. if (_hasAudioTrack)
  480. {
  481. if (_shouldPassthroughAudio)
  482. {
  483. // Do not set any settings so audio will be the same as passthrough
  484. audioOutputSettings = nil;
  485. }
  486. else if (audioOutputSettings == nil)
  487. {
  488. // double preferredHardwareSampleRate = [[AVAudioSession sharedInstance] currentHardwareSampleRate];
  489. double preferredHardwareSampleRate = 48000; // ? - TODO: Fix this, because it's probably broken
  490. AudioChannelLayout acl;
  491. bzero( &acl, sizeof(acl));
  492. acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
  493. audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
  494. [ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
  495. [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
  496. [ NSNumber numberWithFloat: preferredHardwareSampleRate ], AVSampleRateKey,
  497. [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
  498. //[ NSNumber numberWithInt:AVAudioQualityLow], AVEncoderAudioQualityKey,
  499. [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
  500. nil];
  501. /*
  502. AudioChannelLayout acl;
  503. bzero( &acl, sizeof(acl));
  504. acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
  505. audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
  506. [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
  507. [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
  508. [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
  509. [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
  510. [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
  511. nil];*/
  512. }
  513. assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
  514. [assetWriter addInput:assetWriterAudioInput];
  515. assetWriterAudioInput.expectsMediaDataInRealTime = _encodingLiveVideo;
  516. }
  517. else
  518. {
  519. // Remove audio track if it exists
  520. }
  521. }
  522. @end