CameraCapture.mm 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770
  1. #if !PLATFORM_TVOS && UNITY_USES_WEBCAM
  2. #include "CameraCapture.h"
  3. #include "AVCapture.h"
  4. #include "CMVideoSampling.h"
  5. #include "CVTextureCache.h"
  6. #import <CoreVideo/CoreVideo.h>
  7. #include <cmath>
  8. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  9. static NSMutableArray<CameraCaptureController*> *activeColorAndDepthCameraControllers = nil;
  10. #endif
  11. @implementation CameraCaptureController
  12. {
  13. AVCaptureDevice* _captureDevice;
  14. AVCaptureSession* _captureSession;
  15. AVCaptureDeviceInput* _captureInput;
  16. AVCaptureVideoDataOutput* _captureOutput;
  17. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  18. AVCaptureDepthDataOutput* _captureDepthOutput;
  19. AVCaptureDataOutputSynchronizer* _captureSynchronizer;
  20. @public bool _isDepth;
  21. #endif
  22. uint8_t* _pixelBufferCopy;
  23. CMVideoSampling _cmVideoSampling;
  24. NSString* _preset;
  25. CGPoint _focusPoint;
  26. AVCaptureFocusMode _focusMode;
  27. @public void* _userData;
  28. @public size_t _width, _height;
  29. }
  30. - (bool)initCapture:(AVCaptureDevice*)device
  31. {
  32. if (UnityGetAVCapturePermission(avVideoCapture) == avCapturePermissionDenied)
  33. return false;
  34. self.captureDevice = device;
  35. self.captureInput = [AVCaptureDeviceInput deviceInputWithDevice: device error: nil];
  36. self.captureOutput = [[AVCaptureVideoDataOutput alloc] init];
  37. if (self.captureOutput == nil || self.captureInput == nil)
  38. return false;
  39. self.captureOutput.alwaysDiscardsLateVideoFrames = YES;
  40. NSDictionary* options = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
  41. [self.captureOutput setVideoSettings: options];
  42. CMVideoSampling_Initialize(&self->_cmVideoSampling);
  43. _width = _height = 0;
  44. _focusPoint = CGPointMake(0.5, 0.5); // default focus point is center
  45. _focusMode = AVCaptureFocusModeContinuousAutoFocus;
  46. _pixelBufferCopy = nullptr;
  47. return true;
  48. }
  49. - (void)setCaptureFPS:(float)fps
  50. {
  51. if ([self.captureDevice lockForConfiguration: nil])
  52. {
  53. if (self.captureDevice.activeFormat)
  54. {
  55. fps = [self pickAvailableFrameRate: fps];
  56. self.captureDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps);
  57. self.captureDevice.activeVideoMaxFrameDuration = CMTimeMake(1, fps);
  58. }
  59. else
  60. {
  61. // In some corner cases (seeing this on iPod iOS 6.1.5) activeFormat is null.
  62. #pragma clang diagnostic push
  63. #pragma clang diagnostic ignored "-Wdeprecated-declarations"
  64. self.captureOutput.minFrameDuration = CMTimeMake(1, fps);
  65. #pragma clang diagnostic pop
  66. }
  67. [self.captureDevice unlockForConfiguration];
  68. }
  69. }
  70. - (bool)initCapture:(AVCaptureDevice*)device preset:(NSString*)preset fps:(float)fps
  71. {
  72. if (![self initCapture: device])
  73. return false;
  74. self.captureSession = [[AVCaptureSession alloc] init];
  75. [self.captureSession addInput: self.captureInput];
  76. [self.captureSession addOutput: self.captureOutput];
  77. // queue on main thread to simplify gles life
  78. [self.captureOutput setSampleBufferDelegate: self queue: dispatch_get_main_queue()];
  79. self->_preset = preset;
  80. [self.captureSession setSessionPreset: preset];
  81. [self setCaptureFPS: fps];
  82. return true;
  83. }
  84. - (void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection
  85. {
  86. intptr_t tex = (intptr_t)CMVideoSampling_SampleBuffer(&self->_cmVideoSampling, sampleBuffer, &_width, &_height);
  87. UnityDidCaptureVideoFrame(tex, self->_userData);
  88. }
  89. - (void)capturePixelBufferToMemBuffer:(uint8_t*)dst
  90. {
  91. CVPixelBufferRef pbuf = (CVPixelBufferRef)self->_cmVideoSampling.cvImageBuffer;
  92. const size_t srcRowSize = CVPixelBufferGetBytesPerRow(pbuf);
  93. const size_t bufSize = srcRowSize * self->_height;
  94. if (self->_pixelBufferCopy == nullptr)
  95. {
  96. self->_pixelBufferCopy = (uint8_t*)::malloc(bufSize);
  97. }
  98. // while not the best way memory-wise, we want to minimize stalling
  99. CVPixelBufferLockBaseAddress(pbuf, kCVPixelBufferLock_ReadOnly);
  100. {
  101. ::memcpy(self->_pixelBufferCopy, CVPixelBufferGetBaseAddress(pbuf), bufSize);
  102. }
  103. CVPixelBufferUnlockBaseAddress(pbuf, kCVPixelBufferLock_ReadOnly);
  104. OSType pixelFormat = CVPixelBufferGetPixelFormatType(pbuf);
  105. size_t bpp = 0;
  106. switch (pixelFormat)
  107. {
  108. case kCVPixelFormatType_32BGRA:
  109. bpp = 4;
  110. break;
  111. #if UNITY_HAS_IOSSDK_11_0
  112. case kCVPixelFormatType_DepthFloat16:
  113. bpp = 2;
  114. break;
  115. #endif
  116. default:
  117. assert(false);
  118. break;
  119. }
  120. const size_t dstRowSize = self->_width * bpp;
  121. uint8_t* src = self->_pixelBufferCopy + (self->_height - 1) * srcRowSize;
  122. for (size_t i = 0; i < self->_height; ++i)
  123. {
  124. ::memcpy(dst, src, dstRowSize);
  125. dst += dstRowSize;
  126. src -= srcRowSize;
  127. }
  128. }
  129. - (int)isCVTextureFlipped
  130. {
  131. return IsCVTextureFlipped(self->_cmVideoSampling.cvTextureCacheTexture);
  132. }
  133. + (BOOL)focusPointSupported:(AVCaptureDevice*)captureDevice withFocusMode:(AVCaptureFocusMode)focusMode
  134. {
  135. return captureDevice.focusPointOfInterestSupported && [captureDevice isFocusModeSupported: focusMode];
  136. }
  137. - (int)setFocusPointWithX:(float)x Y:(float)y
  138. {
  139. if (x < 0 || x > 1 || y < 0 || y > 1)
  140. {
  141. _focusPoint = CGPointMake(0.5, 0.5); // default value for iOS
  142. _focusMode = AVCaptureFocusModeContinuousAutoFocus;
  143. }
  144. else
  145. {
  146. _focusPoint = CGPointMake(x, 1.0 - y);
  147. _focusMode = AVCaptureFocusModeAutoFocus;
  148. }
  149. return [self setFocusPoint];
  150. }
  151. - (int)setFocusPoint
  152. {
  153. if (self.captureDevice != nil && [CameraCaptureController focusPointSupported: self.captureDevice withFocusMode: _focusMode])
  154. {
  155. if ([self.captureDevice lockForConfiguration: nil])
  156. {
  157. self.captureDevice.focusPointOfInterest = _focusPoint;
  158. self.captureDevice.focusMode = _focusMode;
  159. [self.captureDevice unlockForConfiguration];
  160. return 1;
  161. }
  162. }
  163. return 0;
  164. }
  165. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  166. + (NSMutableArray<CameraCaptureController*>*)getActiveColorAndDepthCameraControllers
  167. {
  168. if (activeColorAndDepthCameraControllers == nil)
  169. {
  170. activeColorAndDepthCameraControllers = [[NSMutableArray alloc] init];
  171. }
  172. return activeColorAndDepthCameraControllers;
  173. }
  174. + (void)addColorAndDepthCameraController:(CameraCaptureController*)controller
  175. {
  176. CameraCaptureController* prevController = [self findColorAndDepthCameraController: controller.captureDevice isDepth: controller->_isDepth];
  177. if (prevController != nil)
  178. [prevController pause];
  179. CameraCaptureController* otherController = [self findColorAndDepthCameraController: controller.captureDevice isDepth: !controller->_isDepth];
  180. if (otherController != nil)
  181. {
  182. [otherController.captureSession stopRunning];
  183. [otherController clearColorAndDepthCameraCaptureSession];
  184. }
  185. [[self getActiveColorAndDepthCameraControllers] addObject: controller];
  186. }
  187. + (void)removeColorAndDepthCameraController:(CameraCaptureController*)controller
  188. {
  189. [[self getActiveColorAndDepthCameraControllers] removeObject: controller];
  190. CameraCaptureController* otherController = [self findColorAndDepthCameraController: controller.captureDevice isDepth: !controller->_isDepth];
  191. if (otherController != nil)
  192. {
  193. [otherController initColorAndDepthCameraCaptureSession];
  194. [otherController.captureSession startRunning];
  195. }
  196. }
  197. + (void)clearColorAndDepthCameraControllers
  198. {
  199. NSMutableArray<CameraCaptureController*>* activeColorAndDepthCameraControllers = [self getActiveColorAndDepthCameraControllers];
  200. for (CameraCaptureController *controller in activeColorAndDepthCameraControllers)
  201. {
  202. if (controller.captureSession != nil)
  203. {
  204. [controller.captureSession stopRunning];
  205. [controller clearColorAndDepthCameraCaptureSession];
  206. }
  207. }
  208. [activeColorAndDepthCameraControllers removeAllObjects];
  209. }
  210. + (CameraCaptureController*)findColorAndDepthCameraController:(AVCaptureDevice*)device isDepth:(bool)isDepth
  211. {
  212. for (CameraCaptureController *controller in [self getActiveColorAndDepthCameraControllers])
  213. {
  214. if (controller.captureDevice == device && controller->_isDepth == isDepth)
  215. return controller;
  216. }
  217. return nil;
  218. }
  219. - (bool)initColorAndDepthCameraCapture:(AVCaptureDevice*)device preset:(NSString*)preset fps:(float)fps isDepth:(bool)isDepth
  220. {
  221. if (!UnityiOS110orNewer())
  222. return false;
  223. if (![self initCapture: device])
  224. return false;
  225. self.captureDepthOutput = [[AVCaptureDepthDataOutput alloc] init];
  226. if (self.captureDepthOutput == nil)
  227. return false;
  228. self.captureDepthOutput.filteringEnabled = YES; // getting filtered depth data to avoid invalid values
  229. self.captureDepthOutput.alwaysDiscardsLateDepthData = YES;
  230. self->_preset = preset;
  231. [self initColorAndDepthCameraCaptureSession];
  232. [self setCaptureFPS: fps];
  233. NSArray<AVCaptureOutput*> *outputs = [NSArray arrayWithObjects: self.captureOutput, self.captureDepthOutput, nil];
  234. self.captureSynchronizer = [[AVCaptureDataOutputSynchronizer alloc] initWithDataOutputs: outputs];
  235. // queue on main thread to simplify gles life
  236. [self.captureSynchronizer setDelegate: self queue: dispatch_get_main_queue()];
  237. _isDepth = isDepth;
  238. return true;
  239. }
  240. - (void)initColorAndDepthCameraCaptureSession
  241. {
  242. if (!UnityiOS110orNewer())
  243. return;
  244. self.captureSession = [[AVCaptureSession alloc] init];
  245. [self.captureSession setSessionPreset: self->_preset];
  246. [self.captureSession addInput: self.captureInput];
  247. [self.captureSession addOutput: self.captureOutput];
  248. [self.captureSession addOutput: self.captureDepthOutput];
  249. }
  250. - (void)clearColorAndDepthCameraCaptureSession
  251. {
  252. if (!UnityiOS110orNewer())
  253. return;
  254. [self.captureSession removeInput: self.captureInput];
  255. [self.captureSession removeOutput: self.captureOutput];
  256. [self.captureSession removeOutput: self.captureDepthOutput];
  257. self.captureSession = nil;
  258. }
  259. - (void)dataOutputSynchronizer:(AVCaptureDataOutputSynchronizer *)synchronizer didOutputSynchronizedDataCollection:(AVCaptureSynchronizedDataCollection *)synchronizedDataCollection
  260. {
  261. AVCaptureSynchronizedSampleBufferData *sampleData = (AVCaptureSynchronizedSampleBufferData*)[synchronizedDataCollection synchronizedDataForCaptureOutput: self.captureOutput];
  262. if (CMSampleBufferGetImageBuffer(sampleData.sampleBuffer) != nil)
  263. {
  264. CameraCaptureController* colorController = !self->_isDepth ? self : [CameraCaptureController findColorAndDepthCameraController: self.captureDevice isDepth: false];
  265. if (colorController != nil)
  266. {
  267. intptr_t tex = (intptr_t)CMVideoSampling_SampleBuffer(&colorController->_cmVideoSampling, sampleData.sampleBuffer, &(colorController->_width), &(colorController->_height));
  268. UnityDidCaptureVideoFrame(tex, colorController->_userData);
  269. }
  270. }
  271. AVCaptureSynchronizedDepthData *depthData = (AVCaptureSynchronizedDepthData*)[synchronizedDataCollection synchronizedDataForCaptureOutput: self.captureDepthOutput];
  272. if (depthData.depthData.depthDataMap != nil)
  273. {
  274. CameraCaptureController* depthController = self->_isDepth ? self : [CameraCaptureController findColorAndDepthCameraController: self.captureDevice isDepth: true];
  275. if (depthController != nil)
  276. {
  277. intptr_t tex = (intptr_t)CMVideoSampling_ImageBuffer(&depthController->_cmVideoSampling, [depthData.depthData depthDataByConvertingToDepthDataType: kCVPixelFormatType_DepthFloat16].depthDataMap, &(depthController->_width), &(depthController->_height));
  278. UnityDidCaptureVideoFrame(tex, depthController->_userData);
  279. }
  280. }
  281. }
  282. #endif
  283. - (void)start
  284. {
  285. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  286. if (self.captureDepthOutput != nil)
  287. {
  288. [CameraCaptureController addColorAndDepthCameraController: self];
  289. }
  290. else
  291. {
  292. [CameraCaptureController clearColorAndDepthCameraControllers];
  293. }
  294. #endif
  295. [self.captureSession startRunning];
  296. }
  297. - (void)pause
  298. {
  299. [self.captureSession stopRunning];
  300. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  301. if (self.captureDepthOutput != nil)
  302. {
  303. [CameraCaptureController removeColorAndDepthCameraController: self];
  304. }
  305. #endif
  306. }
  307. - (void)stop
  308. {
  309. [self.captureSession stopRunning];
  310. [self.captureSession removeInput: self.captureInput];
  311. [self.captureSession removeOutput: self.captureOutput];
  312. self.captureInput = nil;
  313. self.captureOutput = nil;
  314. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  315. if (self.captureDepthOutput != nil)
  316. {
  317. self.captureSynchronizer = nil;
  318. [self.captureSession removeOutput: self.captureDepthOutput];
  319. self.captureDepthOutput = nil;
  320. [CameraCaptureController removeColorAndDepthCameraController: self];
  321. }
  322. #endif
  323. self.captureDevice = nil;
  324. self.captureSession = nil;
  325. if (self->_pixelBufferCopy != nullptr)
  326. {
  327. ::free(self->_pixelBufferCopy);
  328. self->_pixelBufferCopy = nullptr;
  329. }
  330. CMVideoSampling_Uninitialize(&self->_cmVideoSampling);
  331. }
  332. - (float)pickAvailableFrameRate:(float)fps
  333. {
  334. AVFrameRateRange* bestRange = nil;
  335. float minDiff = INFINITY;
  336. float epsilon = 0.1;
  337. fps = fps > epsilon ? fps : 24;
  338. for (AVFrameRateRange* rate in self.captureDevice.activeFormat.videoSupportedFrameRateRanges)
  339. {
  340. if (fps + epsilon > rate.minFrameRate && fps - epsilon < rate.maxFrameRate)
  341. return fps;
  342. else
  343. {
  344. float diff = ::fmin(::fabs(fps - rate.minFrameRate), ::fabs(fps - rate.maxFrameRate));
  345. if (diff < minDiff)
  346. {
  347. minDiff = diff;
  348. bestRange = rate;
  349. }
  350. }
  351. }
  352. return fps > bestRange.maxFrameRate ? bestRange.maxFrameRate : bestRange.minFrameRate;
  353. }
  354. @synthesize captureDevice = _captureDevice;
  355. @synthesize captureSession = _captureSession;
  356. @synthesize captureOutput = _captureOutput;
  357. @synthesize captureInput = _captureInput;
  358. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  359. @synthesize captureDepthOutput = _captureDepthOutput;
  360. @synthesize captureSynchronizer = _captureSynchronizer;
  361. #endif
  362. @end
  363. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  364. // Preset for getting depth data with max resolution available
  365. static NSString* const depthCaptureSessionPreset = AVCaptureSessionPresetPhoto;
  366. #endif
  367. static NSMutableArray<CameraCaptureDevice*> *videoCaptureDevices = nil;
  368. @implementation CameraCaptureDevice
  369. {
  370. @public AVCaptureDevice* _device;
  371. @public int _frontFacing;
  372. @public int _autoFocusPointSupported;
  373. @public WebCamKind _kind;
  374. @public NSMutableArray<NSValue*>* _resolutions;
  375. NSMutableArray<NSString*>* _resPresets;
  376. }
  377. - (bool)isColorAndDepthCaptureDevice
  378. {
  379. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  380. if (UnityiOS110orNewer())
  381. {
  382. for (AVCaptureDeviceFormat *format in [self->_device formats])
  383. {
  384. if ([format supportedDepthDataFormats].count > 0)
  385. return true;
  386. }
  387. }
  388. #endif
  389. return false;
  390. }
  391. - (WebCamKind)getKind
  392. {
  393. if ([self->_device.localizedName containsString: @"Telephoto"])
  394. return kWebCamTelephoto;
  395. if ([self->_device.localizedName containsString: @"Dual"] && [self isColorAndDepthCaptureDevice])
  396. return kWebCamColorAndDepth;
  397. if ([self->_device.localizedName containsString: @"TrueDepth"] && [self isColorAndDepthCaptureDevice])
  398. return kWebCamColorAndDepth;
  399. return kWebCamWideAngle;
  400. }
  401. - (void)fillCaptureDeviceResolutions
  402. {
  403. static NSString* preset[] =
  404. {
  405. AVCaptureSessionPresetLow, // usually 192x144
  406. AVCaptureSessionPreset352x288,
  407. AVCaptureSessionPresetMedium, // usually 480x320
  408. AVCaptureSessionPreset640x480,
  409. AVCaptureSessionPreset1280x720,
  410. AVCaptureSessionPreset1920x1080, // usually the same as AVCaptureSessionPresetHigh
  411. AVCaptureSessionPreset3840x2160,
  412. };
  413. const int count = sizeof(preset) / sizeof(preset[0]);
  414. self->_resolutions = [NSMutableArray arrayWithCapacity: count];
  415. self->_resPresets = [NSMutableArray arrayWithCapacity: count];
  416. AVCaptureInput* captureInput = [AVCaptureDeviceInput deviceInputWithDevice: self->_device error: nil];
  417. //Don't attempt to setup an AVCaptureSession if the user has explicitly denied permission to use the camera.
  418. if (captureInput != nil)
  419. {
  420. AVCaptureSession* captureSession = [[AVCaptureSession alloc] init];
  421. [captureSession addInput: captureInput];
  422. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  423. if (self->_kind == kWebCamColorAndDepth)
  424. {
  425. AVCaptureDepthDataOutput* captureDepthOutput = [[AVCaptureDepthDataOutput alloc] init];
  426. if ([captureSession canSetSessionPreset: depthCaptureSessionPreset])
  427. {
  428. [captureSession setSessionPreset: AVCaptureSessionPresetPhoto];
  429. [captureSession addOutput: captureDepthOutput];
  430. CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(self->_device.activeDepthDataFormat.formatDescription); // for ColorAndDepth camera return depth buffer resolution
  431. [self->_resolutions addObject: [NSValue valueWithCGSize: CGSizeMake(dim.width, dim.height)]];
  432. [self->_resPresets addObject: AVCaptureSessionPresetPhoto];
  433. }
  434. }
  435. else
  436. #endif
  437. {
  438. for (int i = 0; i < count; ++i)
  439. {
  440. if ([captureSession canSetSessionPreset: preset[i]])
  441. {
  442. [captureSession setSessionPreset: preset[i]];
  443. CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(self->_device.activeFormat.formatDescription);
  444. [self->_resolutions addObject: [NSValue valueWithCGSize: CGSizeMake(dim.width, dim.height)]];
  445. [self->_resPresets addObject: preset[i]];
  446. }
  447. }
  448. }
  449. }
  450. }
  451. - (NSString*)pickPresetFromWidth:(int)w height:(int)h
  452. {
  453. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  454. if (self->_kind == kWebCamColorAndDepth)
  455. {
  456. return depthCaptureSessionPreset;
  457. }
  458. #endif
  459. int requestedWidth = w > 0 ? w : 640;
  460. int requestedHeight = h > 0 ? h : 480;
  461. if (requestedHeight > requestedWidth) // hardware camera frame is landscape oriented
  462. std::swap(requestedWidth, requestedHeight);
  463. NSInteger ret = -1;
  464. double bestMatch = std::numeric_limits<double>::max();
  465. for (NSInteger i = 0, n = [_resolutions count]; i < n; ++i)
  466. {
  467. double width = [self->_resolutions[i] CGSizeValue].width;
  468. double height = [self->_resolutions[i] CGSizeValue].height;
  469. double match = std::abs(std::log(requestedWidth / width)) + std::abs(std::log(requestedHeight / height));
  470. if (match < bestMatch)
  471. {
  472. ret = i;
  473. bestMatch = match;
  474. }
  475. }
  476. NSAssert(ret != -1, @"Cannot pick capture preset");
  477. return ret != -1 ? self->_resPresets[ret] : AVCaptureSessionPresetHigh;
  478. }
  479. - (CameraCaptureDevice*)initWithDevice:(AVCaptureDevice*)device
  480. {
  481. self->_device = device;
  482. self->_frontFacing = device.position == AVCaptureDevicePositionFront ? 1 : 0;
  483. self->_autoFocusPointSupported = [CameraCaptureController focusPointSupported: device withFocusMode: AVCaptureFocusModeAutoFocus] ? 1 : 0;
  484. self->_kind = [self getKind];
  485. [self fillCaptureDeviceResolutions];
  486. return self;
  487. }
  488. - (bool)initCaptureForController:(CameraCaptureController*)controller width:(int)w height:(int)h fps:(float)fps isDepth:(bool)isDepth
  489. {
  490. bool initResult = false;
  491. NSString *preset = [self pickPresetFromWidth: w height: h];
  492. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  493. if (UnityiOS110orNewer() && [self isColorAndDepthCaptureDevice])
  494. {
  495. initResult = [controller initColorAndDepthCameraCapture: self->_device preset: preset fps: fps isDepth: isDepth];
  496. }
  497. else
  498. #endif
  499. {
  500. assert(!isDepth);
  501. initResult = [controller initCapture: self->_device preset: preset fps: fps];
  502. }
  503. return initResult;
  504. }
  505. + (bool)initialized
  506. {
  507. return videoCaptureDevices != nil;
  508. }
  509. + (void)createCameraCaptureDevicesArray
  510. {
  511. videoCaptureDevices = [NSMutableArray arrayWithCapacity: 2];
  512. }
  513. + (void)addCameraCaptureDevice:(AVCaptureDevice*)device
  514. {
  515. [videoCaptureDevices addObject: [[CameraCaptureDevice alloc] initWithDevice: device]];
  516. }
  517. @end
  518. extern "C" void UnityEnumVideoCaptureDevices(void* udata, void(*callback)(void* udata, const char* name, int frontFacing, int autoFocusPointSupported, int kind, const int* resolutions, int resCount))
  519. {
  520. AVCaptureDevice* device;
  521. if (![CameraCaptureDevice initialized])
  522. {
  523. [CameraCaptureDevice createCameraCaptureDevicesArray];
  524. for (device in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo])
  525. {
  526. [CameraCaptureDevice addCameraCaptureDevice: device];
  527. }
  528. device = [AVCaptureDevice defaultDeviceWithDeviceType: AVCaptureDeviceTypeBuiltInTelephotoCamera mediaType: AVMediaTypeVideo position: AVCaptureDevicePositionBack];
  529. if (device != nil)
  530. [CameraCaptureDevice addCameraCaptureDevice: device];
  531. if (UnityiOS102orNewer())
  532. {
  533. device = [AVCaptureDevice defaultDeviceWithDeviceType: AVCaptureDeviceTypeBuiltInDualCamera mediaType: AVMediaTypeVideo position: AVCaptureDevicePositionBack];
  534. if (device != nil)
  535. [CameraCaptureDevice addCameraCaptureDevice: device];
  536. }
  537. if (UnityiOS111orNewer())
  538. {
  539. device = [AVCaptureDevice defaultDeviceWithDeviceType: AVCaptureDeviceTypeBuiltInTrueDepthCamera mediaType: AVMediaTypeVideo position: AVCaptureDevicePositionFront];
  540. if (device != nil)
  541. [CameraCaptureDevice addCameraCaptureDevice: device];
  542. }
  543. }
  544. for (CameraCaptureDevice *cameraCaptureDevice in videoCaptureDevices)
  545. {
  546. int resCount = [cameraCaptureDevice->_resolutions count];
  547. int *resolutions = new int[resCount * 2];
  548. for (int i = 0; i < resCount; ++i)
  549. {
  550. resolutions[i * 2] = [cameraCaptureDevice->_resolutions[i] CGSizeValue].width;
  551. resolutions[i * 2 + 1] = [cameraCaptureDevice->_resolutions[i] CGSizeValue].height;
  552. }
  553. callback(udata, [cameraCaptureDevice->_device.localizedName UTF8String], cameraCaptureDevice->_frontFacing, cameraCaptureDevice->_autoFocusPointSupported, cameraCaptureDevice->_kind, resolutions, resCount);
  554. delete[] resolutions;
  555. }
  556. }
  557. extern "C" void* UnityInitCameraCapture(int deviceIndex, int w, int h, int fps, int isDepth, void* udata)
  558. {
  559. if (videoCaptureDevices != nil && deviceIndex < videoCaptureDevices.count)
  560. {
  561. CameraCaptureController* controller = [CameraCaptureController alloc];
  562. bool initResult = [videoCaptureDevices[deviceIndex] initCaptureForController: controller width: w height: h fps: (float)fps isDepth: (isDepth != 0)];
  563. if (initResult)
  564. {
  565. controller->_userData = udata;
  566. return (__bridge_retained void*)controller;
  567. }
  568. controller = nil;
  569. }
  570. return 0;
  571. }
  572. extern "C" void UnityStartCameraCapture(void* capture)
  573. {
  574. [(__bridge CameraCaptureController*)capture start];
  575. }
  576. extern "C" void UnityPauseCameraCapture(void* capture)
  577. {
  578. [(__bridge CameraCaptureController*)capture pause];
  579. }
  580. extern "C" void UnityStopCameraCapture(void* capture)
  581. {
  582. CameraCaptureController* controller = (__bridge_transfer CameraCaptureController*)capture;
  583. [controller stop];
  584. controller = nil;
  585. }
  586. extern "C" void UnityCameraCaptureExtents(void* capture, int* w, int* h)
  587. {
  588. CameraCaptureController* controller = (__bridge CameraCaptureController*)capture;
  589. if (controller == nil)
  590. return;
  591. *w = (int)controller->_width;
  592. *h = (int)controller->_height;
  593. }
  594. extern "C" void UnityCameraCaptureReadToMemory(void* capture, void* dst_, int w, int h)
  595. {
  596. CameraCaptureController* controller = (__bridge CameraCaptureController*)capture;
  597. if (controller == nil)
  598. return;
  599. assert(w == controller->_width && h == controller->_height);
  600. [controller capturePixelBufferToMemBuffer: (uint8_t*)dst_];
  601. }
  602. extern "C" int UnityCameraCaptureVideoRotationDeg(void* capture)
  603. {
  604. CameraCaptureController* controller = (__bridge CameraCaptureController*)capture;
  605. if (controller == nil)
  606. return 0;
  607. // all cams are landscape.
  608. switch (UnityCurrentOrientation())
  609. {
  610. case portrait: return 90;
  611. case portraitUpsideDown: return 270;
  612. case landscapeLeft: return controller.captureDevice.position == AVCaptureDevicePositionFront ? 180 : 0;
  613. case landscapeRight: return controller.captureDevice.position == AVCaptureDevicePositionFront ? 0 : 180;
  614. default: assert(false && "bad orientation returned from UnityCurrentOrientation()"); break;
  615. }
  616. return 0;
  617. }
  618. extern "C" int UnityCameraCaptureVerticallyMirrored(void* capture)
  619. {
  620. CameraCaptureController* controller = (__bridge CameraCaptureController*)capture;
  621. if (controller == nil)
  622. return 0;
  623. return [controller isCVTextureFlipped];
  624. }
  625. extern "C" int UnityCameraCaptureSetAutoFocusPoint(void* capture, float x, float y)
  626. {
  627. CameraCaptureController* controller = (__bridge CameraCaptureController*)capture;
  628. if (controller == nil)
  629. return 0;
  630. return [controller setFocusPointWithX: x Y: y];
  631. }
  632. #else
  633. // STUBBED OUT UNTIL DEVELOPER FINDs AN AWESOME CAMERA SOLUTION FOR APPLE TV //
  634. extern "C" void UnityEnumVideoCaptureDevices(void* udata, void(*callback)(void* udata, const char* name, int frontFacing, int autoFocusPointSupported, int kind, const int* resolutions, int resCount))
  635. {
  636. }
  637. extern "C" void* UnityInitCameraCapture(int deviceIndex, int w, int h, int fps, int isDepth, void* udata)
  638. {
  639. return 0;
  640. }
  641. extern "C" void UnityStartCameraCapture(void* capture)
  642. {
  643. }
  644. extern "C" void UnityPauseCameraCapture(void* capture)
  645. {
  646. }
  647. extern "C" void UnityStopCameraCapture(void* capture)
  648. {
  649. }
  650. extern "C" void UnityCameraCaptureExtents(void* capture, int* w, int* h)
  651. {
  652. }
  653. extern "C" void UnityCameraCaptureReadToMemory(void* capture, void* dst_, int w, int h)
  654. {
  655. }
  656. extern "C" int UnityCameraCaptureVideoRotationDeg(void* capture)
  657. {
  658. return 0;
  659. }
  660. extern "C" int UnityCameraCaptureVerticallyMirrored(void* capture)
  661. {
  662. return 0;
  663. }
  664. extern "C" int UnityCameraCaptureSetAutoFocusPoint(void* capture, float x, float y)
  665. {
  666. return 0;
  667. }
  668. #endif