123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832 |
- #import "GPUImageAVCamera.h"
- #import "GPUImageMovieWriter.h"
- #import "GPUImageFilter.h"
- NSString *const kGPUImageYUVVideoRangeConversionForRGFragmentShaderString = SHADER_STRING
- (
- varying vec2 textureCoordinate;
-
- uniform sampler2D luminanceTexture;
- uniform sampler2D chrominanceTexture;
-
- void main()
- {
- vec3 yuv;
- vec3 rgb;
-
- yuv.x = texture2D(luminanceTexture, textureCoordinate).r;
- yuv.yz = texture2D(chrominanceTexture, textureCoordinate).rg - vec2(0.5, 0.5);
-
- // BT.601, which is the standard for SDTV is provided as a reference
- /*
- rgb = mat3( 1, 1, 1,
- 0, -.39465, 2.03211,
- 1.13983, -.58060, 0) * yuv;
- */
-
- // Using BT.709 which is the standard for HDTV
- rgb = mat3( 1, 1, 1,
- 0, -.21482, 2.12798,
- 1.28033, -.38059, 0) * yuv;
-
- gl_FragColor = vec4(rgb, 1);
- }
- );
- NSString *const kGPUImageYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRING
- (
- varying vec2 textureCoordinate;
-
- uniform sampler2D luminanceTexture;
- uniform sampler2D chrominanceTexture;
-
- void main()
- {
- vec3 yuv;
- vec3 rgb;
-
- yuv.x = texture2D(luminanceTexture, textureCoordinate).r;
- yuv.yz = texture2D(chrominanceTexture, textureCoordinate).ra - vec2(0.5, 0.5);
-
- // BT.601, which is the standard for SDTV is provided as a reference
- /*
- rgb = mat3( 1, 1, 1,
- 0, -.39465, 2.03211,
- 1.13983, -.58060, 0) * yuv;
- */
-
- // Using BT.709 which is the standard for HDTV
- rgb = mat3( 1, 1, 1,
- 0, -.21482, 2.12798,
- 1.28033, -.38059, 0) * yuv;
-
- gl_FragColor = vec4(rgb, 1);
- }
- );
- #pragma mark -
- #pragma mark Private methods and instance variables
- @interface GPUImageAVCamera ()
- {
- AVCaptureDeviceInput *audioInput;
- AVCaptureAudioDataOutput *audioOutput;
- NSDate *startingCaptureTime;
-
- NSInteger _frameRate;
-
- dispatch_queue_t cameraProcessingQueue, audioProcessingQueue;
-
- GLProgram *yuvConversionProgram;
- GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;
- GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;
-
- int imageBufferWidth, imageBufferHeight;
- }
- - (void)updateOrientationSendToTargets;
- - (void)convertYUVToRGBOutput;
- @end
- @implementation GPUImageAVCamera
- @synthesize captureSessionPreset = _captureSessionPreset;
- @synthesize captureSession = _captureSession;
- @synthesize inputCamera = _inputCamera;
- @synthesize runBenchmark = _runBenchmark;
- @synthesize delegate = _delegate;
- @synthesize horizontallyMirrorFrontFacingCamera = _horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera = _horizontallyMirrorRearFacingCamera;
- #pragma mark -
- #pragma mark Initialization and teardown
- + (NSArray *)connectedCameraDevices;
- {
- NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
- return devices;
- }
- - (id)init;
- {
- if (!(self = [self initWithSessionPreset:AVCaptureSessionPreset640x480 cameraDevice:nil]))
- {
- return nil;
- }
-
- return self;
- }
- - (id)initWithDeviceUniqueID:(NSString *)deviceUniqueID;
- {
- if (!(self = [self initWithSessionPreset:AVCaptureSessionPreset640x480 deviceUniqueID:deviceUniqueID]))
- {
- return nil;
- }
-
- return self;
- }
- - (id)initWithSessionPreset:(NSString *)sessionPreset deviceUniqueID:(NSString *)deviceUniqueID;
- {
- if (!(self = [self initWithSessionPreset:sessionPreset cameraDevice:[AVCaptureDevice deviceWithUniqueID:deviceUniqueID]]))
- {
- return nil;
- }
-
- return self;
- }
- - (id)initWithSessionPreset:(NSString *)sessionPreset cameraDevice:(AVCaptureDevice *)cameraDevice;
- {
- if (!(self = [super init]))
- {
- return nil;
- }
-
- cameraProcessingQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.cameraProcessingQueue", NULL);
- audioProcessingQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.audioProcessingQueue", NULL);
- frameRenderingSemaphore = dispatch_semaphore_create(1);
- _frameRate = 0; // This will not set frame rate unless this value gets set to 1 or above
- _runBenchmark = NO;
- capturePaused = NO;
- outputRotation = kGPUImageNoRotation;
- // captureAsYUV = YES;
- captureAsYUV = NO;
- runSynchronouslyOnVideoProcessingQueue(^{
-
- if (captureAsYUV)
- {
- [GPUImageContext useImageProcessingContext];
- // if ([GPUImageContext deviceSupportsRedTextures])
- // {
- // yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForRGFragmentShaderString];
- // }
- // else
- // {
- yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForLAFragmentShaderString];
- // }
- if (!yuvConversionProgram.initialized)
- {
- [yuvConversionProgram addAttribute:@"position"];
- [yuvConversionProgram addAttribute:@"inputTextureCoordinate"];
-
- if (![yuvConversionProgram link])
- {
- NSString *progLog = [yuvConversionProgram programLog];
- NSLog(@"Program link log: %@", progLog);
- NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
- NSLog(@"Fragment shader compile log: %@", fragLog);
- NSString *vertLog = [yuvConversionProgram vertexShaderLog];
- NSLog(@"Vertex shader compile log: %@", vertLog);
- yuvConversionProgram = nil;
- NSAssert(NO, @"Filter shader link failed");
- }
- }
-
- yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
- yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
- yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
- yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
-
- [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
-
- glEnableVertexAttribArray(yuvConversionPositionAttribute);
- glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
- }
- });
-
- // Grab the back-facing or front-facing camera
- _inputCamera = nil;
-
- if (cameraDevice == nil)
- {
- _inputCamera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
- }
- else
- {
- _inputCamera = cameraDevice;
- }
-
- if (!_inputCamera) {
- return nil;
- }
-
- // Create the capture session
- _captureSession = [[AVCaptureSession alloc] init];
-
- [_captureSession beginConfiguration];
-
- // Add the video input
- NSError *error = nil;
- videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_inputCamera error:&error];
- if ([_captureSession canAddInput:videoInput])
- {
- [_captureSession addInput:videoInput];
- }
-
- // Add the video frame output
- videoOutput = [[AVCaptureVideoDataOutput alloc] init];
- [videoOutput setAlwaysDiscardsLateVideoFrames:NO];
-
- // NSLog(@"Camera: %@", _inputCamera);
- // [self printSupportedPixelFormats];
-
- // if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
- if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
- {
- BOOL supportsFullYUVRange = NO;
- NSArray *supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes;
- for (NSNumber *currentPixelFormat in supportedPixelFormats)
- {
- if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
- {
- supportsFullYUVRange = YES;
- }
- }
-
- if (supportsFullYUVRange)
- {
- [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
- }
- else
- {
- [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
- }
- }
- else
- {
- // Despite returning a longer list of supported pixel formats, only RGB, RGBA, BGRA, and the YUV 4:2:2 variants seem to return cleanly
- [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
- // [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_422YpCbCr8_yuvs] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
- }
-
- [videoOutput setSampleBufferDelegate:self queue:cameraProcessingQueue];
- // [videoOutput setSampleBufferDelegate:self queue:[GPUImageContext sharedContextQueue]];
- if ([_captureSession canAddOutput:videoOutput])
- {
- [_captureSession addOutput:videoOutput];
- }
- else
- {
- NSLog(@"Couldn't add video output");
- return nil;
- }
-
- _captureSessionPreset = sessionPreset;
- [_captureSession setSessionPreset:_captureSessionPreset];
- // This will let you get 60 FPS video from the 720p preset on an iPhone 4S, but only that device and that preset
- // AVCaptureConnection *conn = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
- //
- // if (conn.supportsVideoMinFrameDuration)
- // conn.videoMinFrameDuration = CMTimeMake(1,60);
- // if (conn.supportsVideoMaxFrameDuration)
- // conn.videoMaxFrameDuration = CMTimeMake(1,60);
-
- [_captureSession commitConfiguration];
-
- return self;
- }
- - (void)dealloc
- {
- [self stopCameraCapture];
- [videoOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];
- [audioOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];
-
- [self removeInputsAndOutputs];
-
- // ARC forbids explicit message send of 'release'; since iOS 6 even for dispatch_release() calls: stripping it out in that case is required.
- #if ( (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_6_0) || (!defined(__IPHONE_6_0)) )
- if (cameraProcessingQueue != NULL)
- {
- dispatch_release(cameraProcessingQueue);
- }
- if (audioProcessingQueue != NULL)
- {
- dispatch_release(audioProcessingQueue);
- }
-
- if (frameRenderingSemaphore != NULL)
- {
- dispatch_release(frameRenderingSemaphore);
- }
- #endif
- }
- - (void)removeInputsAndOutputs;
- {
- [_captureSession removeInput:videoInput];
- [_captureSession removeOutput:videoOutput];
- if (_microphone != nil)
- {
- [_captureSession removeInput:audioInput];
- [_captureSession removeOutput:audioOutput];
- }
- }
- #pragma mark -
- #pragma mark Managing targets
- - (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;
- {
- [super addTarget:newTarget atTextureLocation:textureLocation];
-
- [newTarget setInputRotation:outputRotation atIndex:textureLocation];
- }
- #pragma mark -
- #pragma mark Manage the camera video stream
- - (void)startCameraCapture;
- {
- if (![_captureSession isRunning])
- {
- startingCaptureTime = [NSDate date];
- [_captureSession startRunning];
- };
- }
- - (void)stopCameraCapture;
- {
- if ([_captureSession isRunning])
- {
- [_captureSession stopRunning];
- }
- }
- - (void)pauseCameraCapture;
- {
- capturePaused = YES;
- }
- - (void)resumeCameraCapture;
- {
- capturePaused = NO;
- }
- - (void)rotateCamera
- {
- if (self.frontFacingCameraPresent == NO)
- return;
-
- NSError *error;
- AVCaptureDeviceInput *newVideoInput;
- AVCaptureDevicePosition currentCameraPosition = [[videoInput device] position];
-
- if (currentCameraPosition == AVCaptureDevicePositionBack)
- {
- currentCameraPosition = AVCaptureDevicePositionFront;
- }
- else
- {
- currentCameraPosition = AVCaptureDevicePositionBack;
- }
-
- AVCaptureDevice *backFacingCamera = nil;
- NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
- for (AVCaptureDevice *device in devices)
- {
- if ([device position] == currentCameraPosition)
- {
- backFacingCamera = device;
- }
- }
- newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:backFacingCamera error:&error];
-
- if (newVideoInput != nil)
- {
- [_captureSession beginConfiguration];
-
- [_captureSession removeInput:videoInput];
- if ([_captureSession canAddInput:newVideoInput])
- {
- [_captureSession addInput:newVideoInput];
- videoInput = newVideoInput;
- }
- else
- {
- [_captureSession addInput:videoInput];
- }
- //captureSession.sessionPreset = oriPreset;
- [_captureSession commitConfiguration];
- }
-
- _inputCamera = backFacingCamera;
- }
- - (AVCaptureDevicePosition)cameraPosition
- {
- return [[videoInput device] position];
- }
- - (BOOL)isFrontFacingCameraPresent;
- {
- NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
-
- for (AVCaptureDevice *device in devices)
- {
- if ([device position] == AVCaptureDevicePositionFront)
- return YES;
- }
-
- return NO;
- }
- - (void)setCaptureSessionPreset:(NSString *)captureSessionPreset;
- {
- [_captureSession beginConfiguration];
-
- _captureSessionPreset = captureSessionPreset;
- [_captureSession setSessionPreset:_captureSessionPreset];
-
- [_captureSession commitConfiguration];
- }
- - (void)setFrameRate:(NSInteger)frameRate;
- {
- _frameRate = frameRate;
-
- if (_frameRate > 0)
- {
- for (AVCaptureConnection *connection in videoOutput.connections)
- {
- if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)])
- connection.videoMinFrameDuration = CMTimeMake(1, (int32_t)_frameRate);
-
- }
- }
- else
- {
- for (AVCaptureConnection *connection in videoOutput.connections)
- {
- if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)])
- connection.videoMinFrameDuration = kCMTimeInvalid; // This sets videoMinFrameDuration back to default
- }
- }
- }
- - (NSInteger)frameRate;
- {
- return _frameRate;
- }
- - (AVCaptureConnection *)videoCaptureConnection {
- for (AVCaptureConnection *connection in [videoOutput connections] ) {
- for ( AVCaptureInputPort *port in [connection inputPorts] ) {
- if ( [[port mediaType] isEqual:AVMediaTypeVideo] ) {
- return connection;
- }
- }
- }
-
- return nil;
- }
- #define INITIALFRAMESTOIGNOREFORBENCHMARK 5
- - (void)updateTargetsForVideoCameraUsingCacheTextureAtWidth:(int)bufferWidth height:(int)bufferHeight time:(CMTime)currentTime;
- {
- // First, update all the framebuffers in the targets
- for (id<GPUImageInput> currentTarget in targets)
- {
- if ([currentTarget enabled])
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
-
- if (currentTarget != self.targetToIgnoreForUpdates)
- {
- [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
- [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget];
-
- if ([currentTarget wantsMonochromeInput] && captureAsYUV)
- {
- [currentTarget setCurrentlyReceivingMonochromeInput:YES];
- // TODO: Replace optimization for monochrome output
- [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
- }
- else
- {
- [currentTarget setCurrentlyReceivingMonochromeInput:NO];
- [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
- }
- }
- else
- {
- [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
- [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
- }
- }
- }
-
- // Then release our hold on the local framebuffer to send it back to the cache as soon as it's no longer needed
- [outputFramebuffer unlock];
-
- // Finally, trigger rendering as needed
- for (id<GPUImageInput> currentTarget in targets)
- {
- if ([currentTarget enabled])
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
-
- if (currentTarget != self.targetToIgnoreForUpdates)
- {
- [currentTarget newFrameReadyAtTime:currentTime atIndex:textureIndexOfTarget];
- }
- }
- }
- }
- - (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
- {
- if (capturePaused)
- {
- return;
- }
-
- CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
- CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
- GLsizei bufferWidth = (GLsizei)CVPixelBufferGetWidth(cameraFrame);
- GLsizei bufferHeight = (GLsizei)CVPixelBufferGetHeight(cameraFrame);
-
- CMTime currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
- [GPUImageContext useImageProcessingContext];
- CVPixelBufferLockBaseAddress(cameraFrame, 0);
-
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bufferWidth, bufferHeight) onlyTexture:YES];
-
- glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
-
- // glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
-
- // Using BGRA extension to pull in video frame data directly
- // glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, bytesPerRow / 3, bufferHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
- // glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_YCBCR_422_APPLE, GL_UNSIGNED_SHORT_8_8_REV_APPLE, CVPixelBufferGetBaseAddress(cameraFrame));
- glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
- [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bufferWidth height:bufferHeight time:currentTime];
- // for (id<GPUImageInput> currentTarget in targets)
- // {
- // if ([currentTarget enabled])
- // {
- // if (currentTarget != self.targetToIgnoreForUpdates)
- // {
- // NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- // NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
- //
- // [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget];
- // [currentTarget newFrameReadyAtTime:currentTime atIndex:textureIndexOfTarget];
- // }
- // }
- // }
-
- CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
-
- if (_runBenchmark)
- {
- numberOfFramesCaptured++;
- if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
- {
- CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
- totalFrameTimeDuringCapture += currentFrameTime;
- NSLog(@"Average frame time : %f ms", [self averageFrameDurationDuringCapture]);
- NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
- }
- }
- }
- - (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
- {
- [self.audioEncodingTarget processAudioBuffer:sampleBuffer];
- }
- - (void)convertYUVToRGBOutput;
- {
- [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(imageBufferWidth, imageBufferHeight) textureOptions:self.outputTextureOptions onlyTexture:NO];
- [outputFramebuffer activateFramebuffer];
-
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
-
- static const GLfloat squareVertices[] = {
- -1.0f, -1.0f,
- 1.0f, -1.0f,
- -1.0f, 1.0f,
- 1.0f, 1.0f,
- };
-
- static const GLfloat textureCoordinates[] = {
- 0.0f, 0.0f,
- 1.0f, 0.0f,
- 0.0f, 1.0f,
- 1.0f, 1.0f,
- };
-
- glActiveTexture(GL_TEXTURE4);
- glBindTexture(GL_TEXTURE_2D, luminanceTexture);
- glUniform1i(yuvConversionLuminanceTextureUniform, 4);
- glActiveTexture(GL_TEXTURE5);
- glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
- glUniform1i(yuvConversionChrominanceTextureUniform, 5);
- glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
- glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
-
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
- }
- #pragma mark -
- #pragma mark Benchmarking
- - (CGFloat)averageFrameDurationDuringCapture;
- {
- return (totalFrameTimeDuringCapture / (CGFloat)(numberOfFramesCaptured - INITIALFRAMESTOIGNOREFORBENCHMARK)) * 1000.0;
- }
- #pragma mark -
- #pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
- - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
- {
- if (captureOutput == audioOutput)
- {
- // if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0)
- // {
- // return;
- // }
- CFRetain(sampleBuffer);
- runAsynchronouslyOnVideoProcessingQueue(^{
- [self processAudioSampleBuffer:sampleBuffer];
- CFRelease(sampleBuffer);
- // dispatch_semaphore_signal(frameRenderingSemaphore);
- });
- }
- else
- {
- if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0)
- {
- return;
- }
- CFRetain(sampleBuffer);
- runAsynchronouslyOnVideoProcessingQueue(^{
- //Feature Detection Hook.
- if (self.delegate && [self.delegate respondsToSelector:@selector(willOutputSampleBuffer:)])
- {
- [self.delegate willOutputSampleBuffer:sampleBuffer];
- }
-
- [self processVideoSampleBuffer:sampleBuffer];
-
- CFRelease(sampleBuffer);
- dispatch_semaphore_signal(frameRenderingSemaphore);
- });
- }
- }
- #pragma mark -
- #pragma mark Accessors
- - (void)setAudioEncodingTarget:(GPUImageMovieWriter *)newValue;
- {
- runSynchronouslyOnVideoProcessingQueue(^{
- [_captureSession beginConfiguration];
-
- if (newValue == nil)
- {
- if (audioOutput)
- {
- [_captureSession removeInput:audioInput];
- [_captureSession removeOutput:audioOutput];
- audioInput = nil;
- audioOutput = nil;
- _microphone = nil;
- }
- }
- else
- {
- _microphone = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
- audioInput = [AVCaptureDeviceInput deviceInputWithDevice:_microphone error:nil];
- if ([_captureSession canAddInput:audioInput])
- {
- [_captureSession addInput:audioInput];
- }
- audioOutput = [[AVCaptureAudioDataOutput alloc] init];
-
- if ([_captureSession canAddOutput:audioOutput])
- {
- [_captureSession addOutput:audioOutput];
- }
- else
- {
- NSLog(@"Couldn't add audio output");
- }
- [audioOutput setSampleBufferDelegate:self queue:audioProcessingQueue];
- }
-
- [_captureSession commitConfiguration];
-
- [super setAudioEncodingTarget:newValue];
- });
- }
- - (void)updateOrientationSendToTargets;
- {
- runSynchronouslyOnVideoProcessingQueue(^{
-
- // From the iOS 5.0 release notes:
- // In previous iOS versions, the front-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeLeft and the back-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeRight.
-
- outputRotation = kGPUImageNoRotation;
- for (id<GPUImageInput> currentTarget in targets)
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- [currentTarget setInputRotation:outputRotation atIndex:[[targetTextureIndices objectAtIndex:indexOfObject] integerValue]];
- }
- });
- }
- - (void)setHorizontallyMirrorFrontFacingCamera:(BOOL)newValue
- {
- _horizontallyMirrorFrontFacingCamera = newValue;
- [self updateOrientationSendToTargets];
- }
- - (void)setHorizontallyMirrorRearFacingCamera:(BOOL)newValue
- {
- _horizontallyMirrorRearFacingCamera = newValue;
- [self updateOrientationSendToTargets];
- }
- - (void)printSupportedPixelFormats;
- {
- NSArray *supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes;
- for (NSNumber *currentPixelFormat in supportedPixelFormats)
- {
- NSString *pixelFormatName = nil;
-
- switch([currentPixelFormat intValue])
- {
- case kCVPixelFormatType_1Monochrome: pixelFormatName = @"kCVPixelFormatType_1Monochrome"; break;
- case kCVPixelFormatType_2Indexed: pixelFormatName = @"kCVPixelFormatType_2Indexed"; break;
- case kCVPixelFormatType_4Indexed: pixelFormatName = @"kCVPixelFormatType_4Indexed"; break;
- case kCVPixelFormatType_8Indexed: pixelFormatName = @"kCVPixelFormatType_8Indexed"; break;
- case kCVPixelFormatType_1IndexedGray_WhiteIsZero: pixelFormatName = @"kCVPixelFormatType_1IndexedGray_WhiteIsZero"; break;
- case kCVPixelFormatType_2IndexedGray_WhiteIsZero: pixelFormatName = @"kCVPixelFormatType_2IndexedGray_WhiteIsZero"; break;
- case kCVPixelFormatType_4IndexedGray_WhiteIsZero: pixelFormatName = @"kCVPixelFormatType_4IndexedGray_WhiteIsZero"; break;
- case kCVPixelFormatType_8IndexedGray_WhiteIsZero: pixelFormatName = @"kCVPixelFormatType_8IndexedGray_WhiteIsZero"; break;
- case kCVPixelFormatType_16BE555: pixelFormatName = @"kCVPixelFormatType_16BE555"; break;
- case kCVPixelFormatType_16LE555: pixelFormatName = @"kCVPixelFormatType_16LE555"; break;
- case kCVPixelFormatType_16LE5551: pixelFormatName = @"kCVPixelFormatType_16LE5551"; break;
- case kCVPixelFormatType_16BE565: pixelFormatName = @"kCVPixelFormatType_16BE565"; break;
- case kCVPixelFormatType_16LE565: pixelFormatName = @"kCVPixelFormatType_16LE565"; break;
- case kCVPixelFormatType_24RGB: pixelFormatName = @"kCVPixelFormatType_24RGB"; break;
- case kCVPixelFormatType_24BGR: pixelFormatName = @"kCVPixelFormatType_24BGR"; break;
- case kCVPixelFormatType_32ARGB: pixelFormatName = @"kCVPixelFormatType_32ARGB"; break;
- case kCVPixelFormatType_32BGRA: pixelFormatName = @"kCVPixelFormatType_32BGRA"; break;
- case kCVPixelFormatType_32ABGR: pixelFormatName = @"kCVPixelFormatType_32ABGR"; break;
- case kCVPixelFormatType_32RGBA: pixelFormatName = @"kCVPixelFormatType_32RGBA"; break;
- case kCVPixelFormatType_64ARGB: pixelFormatName = @"kCVPixelFormatType_64ARGB"; break;
- case kCVPixelFormatType_48RGB: pixelFormatName = @"kCVPixelFormatType_48RGB"; break;
- case kCVPixelFormatType_32AlphaGray: pixelFormatName = @"kCVPixelFormatType_32AlphaGray"; break;
- case kCVPixelFormatType_16Gray: pixelFormatName = @"kCVPixelFormatType_16Gray"; break;
- case kCVPixelFormatType_30RGB: pixelFormatName = @"kCVPixelFormatType_30RGB"; break;
- case kCVPixelFormatType_422YpCbCr8: pixelFormatName = @"kCVPixelFormatType_422YpCbCr8"; break;
- case kCVPixelFormatType_4444YpCbCrA8: pixelFormatName = @"kCVPixelFormatType_4444YpCbCrA8"; break;
- case kCVPixelFormatType_4444YpCbCrA8R: pixelFormatName = @"kCVPixelFormatType_4444YpCbCrA8R"; break;
- case kCVPixelFormatType_4444AYpCbCr8: pixelFormatName = @"kCVPixelFormatType_4444AYpCbCr8"; break;
- case kCVPixelFormatType_4444AYpCbCr16: pixelFormatName = @"kCVPixelFormatType_4444AYpCbCr16"; break;
- case kCVPixelFormatType_444YpCbCr8: pixelFormatName = @"kCVPixelFormatType_444YpCbCr8"; break;
- case kCVPixelFormatType_422YpCbCr16: pixelFormatName = @"kCVPixelFormatType_422YpCbCr16"; break;
- case kCVPixelFormatType_422YpCbCr10: pixelFormatName = @"kCVPixelFormatType_422YpCbCr10"; break;
- case kCVPixelFormatType_444YpCbCr10: pixelFormatName = @"kCVPixelFormatType_444YpCbCr10"; break;
- case kCVPixelFormatType_420YpCbCr8Planar: pixelFormatName = @"kCVPixelFormatType_420YpCbCr8Planar"; break;
- case kCVPixelFormatType_420YpCbCr8PlanarFullRange: pixelFormatName = @"kCVPixelFormatType_420YpCbCr8PlanarFullRange"; break;
- case kCVPixelFormatType_422YpCbCr_4A_8BiPlanar: pixelFormatName = @"kCVPixelFormatType_422YpCbCr_4A_8BiPlanar"; break;
- case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: pixelFormatName = @"kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange"; break;
- case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange: pixelFormatName = @"kCVPixelFormatType_420YpCbCr8BiPlanarFullRange"; break;
- case kCVPixelFormatType_422YpCbCr8_yuvs: pixelFormatName = @"kCVPixelFormatType_422YpCbCr8_yuvs"; break;
- case kCVPixelFormatType_422YpCbCr8FullRange: pixelFormatName = @"kCVPixelFormatType_422YpCbCr8FullRange"; break;
- case kCVPixelFormatType_OneComponent8: pixelFormatName = @"kCVPixelFormatType_OneComponent8"; break;
- case kCVPixelFormatType_TwoComponent8: pixelFormatName = @"kCVPixelFormatType_TwoComponent8"; break;
- }
- NSLog(@"Supported pixel format: %@", pixelFormatName);
- }
- }
- @end
|