CameraCapture.mm 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783
  1. #if !PLATFORM_TVOS && UNITY_USES_WEBCAM
  2. #include "CameraCapture.h"
  3. #include "AVCapture.h"
  4. #include "CMVideoSampling.h"
  5. #include "CVTextureCache.h"
  6. #import <CoreVideo/CoreVideo.h>
  7. #include <cmath>
  8. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  9. static NSMutableArray<CameraCaptureController*> *activeColorAndDepthCameraControllers = nil;
  10. #endif
  11. @implementation CameraCaptureController
  12. {
  13. AVCaptureDevice* _captureDevice;
  14. AVCaptureSession* _captureSession;
  15. AVCaptureDeviceInput* _captureInput;
  16. AVCaptureVideoDataOutput* _captureOutput;
  17. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  18. AVCaptureDepthDataOutput* _captureDepthOutput;
  19. AVCaptureDataOutputSynchronizer* _captureSynchronizer;
  20. @public bool _isDepth;
  21. #endif
  22. uint8_t* _pixelBufferCopy;
  23. CMVideoSampling _cmVideoSampling;
  24. NSString* _preset;
  25. CGPoint _focusPoint;
  26. AVCaptureFocusMode _focusMode;
  27. @public void* _userData;
  28. @public size_t _width, _height;
  29. }
  30. - (bool)initCapture:(AVCaptureDevice*)device
  31. {
  32. if (UnityGetAVCapturePermission(avVideoCapture) == avCapturePermissionDenied)
  33. return false;
  34. self.captureDevice = device;
  35. self.captureInput = [AVCaptureDeviceInput deviceInputWithDevice: device error: nil];
  36. self.captureOutput = [[AVCaptureVideoDataOutput alloc] init];
  37. if (self.captureOutput == nil || self.captureInput == nil)
  38. return false;
  39. self.captureOutput.alwaysDiscardsLateVideoFrames = YES;
  40. NSDictionary* options = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
  41. [self.captureOutput setVideoSettings: options];
  42. CMVideoSampling_Initialize(&self->_cmVideoSampling);
  43. _width = _height = 0;
  44. _focusPoint = CGPointMake(0.5, 0.5); // default focus point is center
  45. _focusMode = AVCaptureFocusModeContinuousAutoFocus;
  46. _pixelBufferCopy = nullptr;
  47. return true;
  48. }
  49. - (void)setCaptureFPS:(float)fps
  50. {
  51. if ([self.captureDevice lockForConfiguration: nil])
  52. {
  53. if (self.captureDevice.activeFormat)
  54. {
  55. fps = [self pickAvailableFrameRate: fps];
  56. self.captureDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps);
  57. self.captureDevice.activeVideoMaxFrameDuration = CMTimeMake(1, fps);
  58. }
  59. else
  60. {
  61. // In some corner cases (seeing this on iPod iOS 6.1.5) activeFormat is null.
  62. #pragma clang diagnostic push
  63. #pragma clang diagnostic ignored "-Wdeprecated-declarations"
  64. self.captureOutput.minFrameDuration = CMTimeMake(1, fps);
  65. #pragma clang diagnostic pop
  66. }
  67. [self.captureDevice unlockForConfiguration];
  68. }
  69. }
  70. - (bool)initCapture:(AVCaptureDevice*)device preset:(NSString*)preset fps:(float)fps
  71. {
  72. if (![self initCapture: device])
  73. return false;
  74. self.captureSession = [[AVCaptureSession alloc] init];
  75. [self.captureSession addInput: self.captureInput];
  76. [self.captureSession addOutput: self.captureOutput];
  77. // queue on main thread to simplify gles life
  78. [self.captureOutput setSampleBufferDelegate: self queue: dispatch_get_main_queue()];
  79. self->_preset = preset;
  80. [self.captureSession setSessionPreset: preset];
  81. [self setCaptureFPS: fps];
  82. return true;
  83. }
  84. - (void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection
  85. {
  86. intptr_t tex = (intptr_t)CMVideoSampling_SampleBuffer(&self->_cmVideoSampling, sampleBuffer, &_width, &_height);
  87. UnityDidCaptureVideoFrame(tex, self->_userData);
  88. }
  89. - (void)capturePixelBufferToMemBuffer:(uint8_t*)dst
  90. {
  91. CVPixelBufferRef pbuf = (CVPixelBufferRef)self->_cmVideoSampling.cvImageBuffer;
  92. const size_t srcRowSize = CVPixelBufferGetBytesPerRow(pbuf);
  93. const size_t bufSize = srcRowSize * self->_height;
  94. if (self->_pixelBufferCopy == nullptr)
  95. {
  96. self->_pixelBufferCopy = (uint8_t*)::malloc(bufSize);
  97. }
  98. // while not the best way memory-wise, we want to minimize stalling
  99. CVPixelBufferLockBaseAddress(pbuf, kCVPixelBufferLock_ReadOnly);
  100. {
  101. ::memcpy(self->_pixelBufferCopy, CVPixelBufferGetBaseAddress(pbuf), bufSize);
  102. }
  103. CVPixelBufferUnlockBaseAddress(pbuf, kCVPixelBufferLock_ReadOnly);
  104. OSType pixelFormat = CVPixelBufferGetPixelFormatType(pbuf);
  105. size_t bpp = 0;
  106. switch (pixelFormat)
  107. {
  108. case kCVPixelFormatType_32BGRA:
  109. bpp = 4;
  110. break;
  111. #if UNITY_HAS_IOSSDK_11_0
  112. case kCVPixelFormatType_DepthFloat16:
  113. bpp = 2;
  114. break;
  115. #endif
  116. default:
  117. assert(false);
  118. break;
  119. }
  120. const size_t dstRowSize = self->_width * bpp;
  121. uint8_t* src = self->_pixelBufferCopy + (self->_height - 1) * srcRowSize;
  122. for (size_t i = 0; i < self->_height; ++i)
  123. {
  124. ::memcpy(dst, src, dstRowSize);
  125. dst += dstRowSize;
  126. src -= srcRowSize;
  127. }
  128. }
  129. - (int)isCVTextureFlipped
  130. {
  131. return IsCVTextureFlipped(self->_cmVideoSampling.cvTextureCacheTexture);
  132. }
  133. + (BOOL)focusPointSupported:(AVCaptureDevice*)captureDevice withFocusMode:(AVCaptureFocusMode)focusMode
  134. {
  135. return captureDevice.focusPointOfInterestSupported && [captureDevice isFocusModeSupported: focusMode];
  136. }
  137. - (int)setFocusPointWithX:(float)x Y:(float)y
  138. {
  139. if (x < 0 || x > 1 || y < 0 || y > 1)
  140. {
  141. _focusPoint = CGPointMake(0.5, 0.5); // default value for iOS
  142. _focusMode = AVCaptureFocusModeContinuousAutoFocus;
  143. }
  144. else
  145. {
  146. _focusPoint = CGPointMake(x, 1.0 - y);
  147. _focusMode = AVCaptureFocusModeAutoFocus;
  148. }
  149. return [self setFocusPoint];
  150. }
  151. - (int)setFocusPoint
  152. {
  153. if (self.captureDevice != nil && [CameraCaptureController focusPointSupported: self.captureDevice withFocusMode: _focusMode])
  154. {
  155. if ([self.captureDevice lockForConfiguration: nil])
  156. {
  157. self.captureDevice.focusPointOfInterest = _focusPoint;
  158. self.captureDevice.focusMode = _focusMode;
  159. [self.captureDevice unlockForConfiguration];
  160. return 1;
  161. }
  162. }
  163. return 0;
  164. }
  165. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  166. + (NSMutableArray<CameraCaptureController*>*)getActiveColorAndDepthCameraControllers
  167. {
  168. if (activeColorAndDepthCameraControllers == nil)
  169. {
  170. activeColorAndDepthCameraControllers = [[NSMutableArray alloc] init];
  171. }
  172. return activeColorAndDepthCameraControllers;
  173. }
  174. + (void)addColorAndDepthCameraController:(CameraCaptureController*)controller
  175. {
  176. CameraCaptureController* prevController = [self findColorAndDepthCameraController: controller.captureDevice isDepth: controller->_isDepth];
  177. if (prevController != nil)
  178. [prevController pause];
  179. CameraCaptureController* otherController = [self findColorAndDepthCameraController: controller.captureDevice isDepth: !controller->_isDepth];
  180. if (otherController != nil)
  181. {
  182. [otherController.captureSession stopRunning];
  183. [otherController clearColorAndDepthCameraCaptureSession];
  184. }
  185. [[self getActiveColorAndDepthCameraControllers] addObject: controller];
  186. }
  187. + (void)removeColorAndDepthCameraController:(CameraCaptureController*)controller
  188. {
  189. [[self getActiveColorAndDepthCameraControllers] removeObject: controller];
  190. CameraCaptureController* otherController = [self findColorAndDepthCameraController: controller.captureDevice isDepth: !controller->_isDepth];
  191. if (otherController != nil)
  192. {
  193. [otherController initColorAndDepthCameraCaptureSession];
  194. [otherController.captureSession startRunning];
  195. }
  196. }
  197. + (void)clearColorAndDepthCameraControllers
  198. {
  199. NSMutableArray<CameraCaptureController*>* activeColorAndDepthCameraControllers = [self getActiveColorAndDepthCameraControllers];
  200. for (CameraCaptureController *controller in activeColorAndDepthCameraControllers)
  201. {
  202. if (controller.captureSession != nil)
  203. {
  204. [controller.captureSession stopRunning];
  205. [controller clearColorAndDepthCameraCaptureSession];
  206. }
  207. }
  208. [activeColorAndDepthCameraControllers removeAllObjects];
  209. }
  210. + (CameraCaptureController*)findColorAndDepthCameraController:(AVCaptureDevice*)device isDepth:(bool)isDepth
  211. {
  212. for (CameraCaptureController *controller in [self getActiveColorAndDepthCameraControllers])
  213. {
  214. if (controller.captureDevice == device && controller->_isDepth == isDepth)
  215. return controller;
  216. }
  217. return nil;
  218. }
  219. - (bool)initColorAndDepthCameraCapture:(AVCaptureDevice*)device preset:(NSString*)preset fps:(float)fps isDepth:(bool)isDepth
  220. {
  221. if (!UnityiOS110orNewer())
  222. return false;
  223. if (![self initCapture: device])
  224. return false;
  225. self.captureDepthOutput = [[AVCaptureDepthDataOutput alloc] init];
  226. if (self.captureDepthOutput == nil)
  227. return false;
  228. self.captureDepthOutput.filteringEnabled = YES; // getting filtered depth data to avoid invalid values
  229. self.captureDepthOutput.alwaysDiscardsLateDepthData = YES;
  230. self->_preset = preset;
  231. [self initColorAndDepthCameraCaptureSession];
  232. [self setCaptureFPS: fps];
  233. NSArray<AVCaptureOutput*> *outputs = [NSArray arrayWithObjects: self.captureOutput, self.captureDepthOutput, nil];
  234. self.captureSynchronizer = [[AVCaptureDataOutputSynchronizer alloc] initWithDataOutputs: outputs];
  235. // queue on main thread to simplify gles life
  236. [self.captureSynchronizer setDelegate: self queue: dispatch_get_main_queue()];
  237. _isDepth = isDepth;
  238. return true;
  239. }
  240. - (void)initColorAndDepthCameraCaptureSession
  241. {
  242. if (!UnityiOS110orNewer())
  243. return;
  244. self.captureSession = [[AVCaptureSession alloc] init];
  245. [self.captureSession setSessionPreset: self->_preset];
  246. [self.captureSession addInput: self.captureInput];
  247. [self.captureSession addOutput: self.captureOutput];
  248. [self.captureSession addOutput: self.captureDepthOutput];
  249. }
  250. - (void)clearColorAndDepthCameraCaptureSession
  251. {
  252. if (!UnityiOS110orNewer())
  253. return;
  254. [self.captureSession removeInput: self.captureInput];
  255. [self.captureSession removeOutput: self.captureOutput];
  256. [self.captureSession removeOutput: self.captureDepthOutput];
  257. self.captureSession = nil;
  258. }
  259. - (void)dataOutputSynchronizer:(AVCaptureDataOutputSynchronizer *)synchronizer didOutputSynchronizedDataCollection:(AVCaptureSynchronizedDataCollection *)synchronizedDataCollection
  260. {
  261. AVCaptureSynchronizedSampleBufferData *sampleData = (AVCaptureSynchronizedSampleBufferData*)[synchronizedDataCollection synchronizedDataForCaptureOutput: self.captureOutput];
  262. if (CMSampleBufferGetImageBuffer(sampleData.sampleBuffer) != nil)
  263. {
  264. CameraCaptureController* colorController = !self->_isDepth ? self : [CameraCaptureController findColorAndDepthCameraController: self.captureDevice isDepth: false];
  265. if (colorController != nil)
  266. {
  267. intptr_t tex = (intptr_t)CMVideoSampling_SampleBuffer(&colorController->_cmVideoSampling, sampleData.sampleBuffer, &(colorController->_width), &(colorController->_height));
  268. UnityDidCaptureVideoFrame(tex, colorController->_userData);
  269. }
  270. }
  271. AVCaptureSynchronizedDepthData *depthData = (AVCaptureSynchronizedDepthData*)[synchronizedDataCollection synchronizedDataForCaptureOutput: self.captureDepthOutput];
  272. if (depthData.depthData.depthDataMap != nil)
  273. {
  274. CameraCaptureController* depthController = self->_isDepth ? self : [CameraCaptureController findColorAndDepthCameraController: self.captureDevice isDepth: true];
  275. if (depthController != nil)
  276. {
  277. intptr_t tex = (intptr_t)CMVideoSampling_ImageBuffer(&depthController->_cmVideoSampling, [depthData.depthData depthDataByConvertingToDepthDataType: kCVPixelFormatType_DepthFloat16].depthDataMap, &(depthController->_width), &(depthController->_height));
  278. UnityDidCaptureVideoFrame(tex, depthController->_userData);
  279. }
  280. }
  281. }
  282. #endif
  283. - (void)start
  284. {
  285. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  286. if (self.captureDepthOutput != nil)
  287. {
  288. [CameraCaptureController addColorAndDepthCameraController: self];
  289. }
  290. else
  291. {
  292. [CameraCaptureController clearColorAndDepthCameraControllers];
  293. }
  294. #endif
  295. [self.captureSession startRunning];
  296. }
  297. - (void)pause
  298. {
  299. [self.captureSession stopRunning];
  300. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  301. if (self.captureDepthOutput != nil)
  302. {
  303. [CameraCaptureController removeColorAndDepthCameraController: self];
  304. }
  305. #endif
  306. }
  307. - (void)stop
  308. {
  309. [self.captureSession stopRunning];
  310. [self.captureSession removeInput: self.captureInput];
  311. [self.captureSession removeOutput: self.captureOutput];
  312. self.captureInput = nil;
  313. self.captureOutput = nil;
  314. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  315. if (self.captureDepthOutput != nil)
  316. {
  317. self.captureSynchronizer = nil;
  318. [self.captureSession removeOutput: self.captureDepthOutput];
  319. self.captureDepthOutput = nil;
  320. [CameraCaptureController removeColorAndDepthCameraController: self];
  321. }
  322. #endif
  323. self.captureDevice = nil;
  324. self.captureSession = nil;
  325. if (self->_pixelBufferCopy != nullptr)
  326. {
  327. ::free(self->_pixelBufferCopy);
  328. self->_pixelBufferCopy = nullptr;
  329. }
  330. CMVideoSampling_Uninitialize(&self->_cmVideoSampling);
  331. }
  332. - (float)pickAvailableFrameRate:(float)fps
  333. {
  334. AVFrameRateRange* bestRange = nil;
  335. float minDiff = INFINITY;
  336. float epsilon = 0.1;
  337. fps = fps > epsilon ? fps : 24;
  338. for (AVFrameRateRange* rate in self.captureDevice.activeFormat.videoSupportedFrameRateRanges)
  339. {
  340. if (fps + epsilon > rate.minFrameRate && fps - epsilon < rate.maxFrameRate)
  341. return fps;
  342. else
  343. {
  344. float diff = ::fmin(::fabs(fps - rate.minFrameRate), ::fabs(fps - rate.maxFrameRate));
  345. if (diff < minDiff)
  346. {
  347. minDiff = diff;
  348. bestRange = rate;
  349. }
  350. }
  351. }
  352. return fps > bestRange.maxFrameRate ? bestRange.maxFrameRate : bestRange.minFrameRate;
  353. }
  354. @synthesize captureDevice = _captureDevice;
  355. @synthesize captureSession = _captureSession;
  356. @synthesize captureOutput = _captureOutput;
  357. @synthesize captureInput = _captureInput;
  358. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  359. @synthesize captureDepthOutput = _captureDepthOutput;
  360. @synthesize captureSynchronizer = _captureSynchronizer;
  361. #endif
  362. @end
  363. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  364. // Preset for getting depth data with max resolution available
  365. static NSString* const depthCaptureSessionPreset = AVCaptureSessionPresetPhoto;
  366. #endif
  367. static NSMutableArray<CameraCaptureDevice*> *videoCaptureDevices = nil;
  368. @implementation CameraCaptureDevice
  369. {
  370. @public AVCaptureDevice* _device;
  371. @public int _frontFacing;
  372. @public int _autoFocusPointSupported;
  373. @public WebCamKind _kind;
  374. @public NSMutableArray<NSValue*>* _resolutions;
  375. NSMutableArray<NSString*>* _resPresets;
  376. }
  377. - (bool)isColorAndDepthCaptureDevice
  378. {
  379. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  380. if (UnityiOS110orNewer())
  381. {
  382. for (AVCaptureDeviceFormat *format in [self->_device formats])
  383. {
  384. if ([format supportedDepthDataFormats].count > 0)
  385. return true;
  386. }
  387. }
  388. #endif
  389. return false;
  390. }
  391. - (WebCamKind)getKind
  392. {
  393. if ([self->_device.localizedName containsString: @"Telephoto"])
  394. return kWebCamTelephoto;
  395. if ([self->_device.localizedName containsString: @"Ultra Wide"])
  396. return kWebCamUltraWideAngle;
  397. if ([self->_device.localizedName containsString: @"Dual"] && [self isColorAndDepthCaptureDevice])
  398. return kWebCamColorAndDepth;
  399. if ([self->_device.localizedName containsString: @"TrueDepth"] && [self isColorAndDepthCaptureDevice])
  400. return kWebCamColorAndDepth;
  401. return kWebCamWideAngle;
  402. }
  403. - (void)fillCaptureDeviceResolutions
  404. {
  405. static NSString* preset[] =
  406. {
  407. AVCaptureSessionPresetLow, // usually 192x144
  408. AVCaptureSessionPreset352x288,
  409. AVCaptureSessionPresetMedium, // usually 480x320
  410. AVCaptureSessionPreset640x480,
  411. AVCaptureSessionPreset1280x720,
  412. AVCaptureSessionPreset1920x1080, // usually the same as AVCaptureSessionPresetHigh
  413. AVCaptureSessionPreset3840x2160,
  414. };
  415. const int count = sizeof(preset) / sizeof(preset[0]);
  416. self->_resolutions = [NSMutableArray arrayWithCapacity: count];
  417. self->_resPresets = [NSMutableArray arrayWithCapacity: count];
  418. AVCaptureInput* captureInput = [AVCaptureDeviceInput deviceInputWithDevice: self->_device error: nil];
  419. //Don't attempt to setup an AVCaptureSession if the user has explicitly denied permission to use the camera.
  420. if (captureInput != nil)
  421. {
  422. AVCaptureSession* captureSession = [[AVCaptureSession alloc] init];
  423. [captureSession addInput: captureInput];
  424. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  425. if (self->_kind == kWebCamColorAndDepth)
  426. {
  427. AVCaptureDepthDataOutput* captureDepthOutput = [[AVCaptureDepthDataOutput alloc] init];
  428. if ([captureSession canSetSessionPreset: depthCaptureSessionPreset])
  429. {
  430. [captureSession setSessionPreset: AVCaptureSessionPresetPhoto];
  431. [captureSession addOutput: captureDepthOutput];
  432. CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(self->_device.activeDepthDataFormat.formatDescription); // for ColorAndDepth camera return depth buffer resolution
  433. [self->_resolutions addObject: [NSValue valueWithCGSize: CGSizeMake(dim.width, dim.height)]];
  434. [self->_resPresets addObject: AVCaptureSessionPresetPhoto];
  435. }
  436. }
  437. else
  438. #endif
  439. {
  440. for (int i = 0; i < count; ++i)
  441. {
  442. if ([captureSession canSetSessionPreset: preset[i]])
  443. {
  444. [captureSession setSessionPreset: preset[i]];
  445. CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(self->_device.activeFormat.formatDescription);
  446. [self->_resolutions addObject: [NSValue valueWithCGSize: CGSizeMake(dim.width, dim.height)]];
  447. [self->_resPresets addObject: preset[i]];
  448. }
  449. }
  450. }
  451. }
  452. }
  453. - (NSString*)pickPresetFromWidth:(int)w height:(int)h
  454. {
  455. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  456. if (self->_kind == kWebCamColorAndDepth)
  457. {
  458. return depthCaptureSessionPreset;
  459. }
  460. #endif
  461. int requestedWidth = w > 0 ? w : 640;
  462. int requestedHeight = h > 0 ? h : 480;
  463. if (requestedHeight > requestedWidth) // hardware camera frame is landscape oriented
  464. std::swap(requestedWidth, requestedHeight);
  465. NSInteger ret = -1;
  466. double bestMatch = std::numeric_limits<double>::max();
  467. for (NSInteger i = 0, n = [_resolutions count]; i < n; ++i)
  468. {
  469. double width = [self->_resolutions[i] CGSizeValue].width;
  470. double height = [self->_resolutions[i] CGSizeValue].height;
  471. double match = std::abs(std::log(requestedWidth / width)) + std::abs(std::log(requestedHeight / height));
  472. if (match < bestMatch)
  473. {
  474. ret = i;
  475. bestMatch = match;
  476. }
  477. }
  478. NSAssert(ret != -1, @"Cannot pick capture preset");
  479. return ret != -1 ? self->_resPresets[ret] : AVCaptureSessionPresetHigh;
  480. }
  481. - (CameraCaptureDevice*)initWithDevice:(AVCaptureDevice*)device
  482. {
  483. self->_device = device;
  484. self->_frontFacing = device.position == AVCaptureDevicePositionFront ? 1 : 0;
  485. self->_autoFocusPointSupported = [CameraCaptureController focusPointSupported: device withFocusMode: AVCaptureFocusModeAutoFocus] ? 1 : 0;
  486. self->_kind = [self getKind];
  487. [self fillCaptureDeviceResolutions];
  488. return self;
  489. }
  490. - (bool)initCaptureForController:(CameraCaptureController*)controller width:(int)w height:(int)h fps:(float)fps isDepth:(bool)isDepth
  491. {
  492. bool initResult = false;
  493. NSString *preset = [self pickPresetFromWidth: w height: h];
  494. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  495. if (UnityiOS110orNewer() && [self isColorAndDepthCaptureDevice])
  496. {
  497. initResult = [controller initColorAndDepthCameraCapture: self->_device preset: preset fps: fps isDepth: isDepth];
  498. }
  499. else
  500. #endif
  501. {
  502. assert(!isDepth);
  503. initResult = [controller initCapture: self->_device preset: preset fps: fps];
  504. }
  505. return initResult;
  506. }
  507. + (bool)initialized
  508. {
  509. return videoCaptureDevices != nil;
  510. }
  511. + (void)createCameraCaptureDevicesArray
  512. {
  513. videoCaptureDevices = [NSMutableArray arrayWithCapacity: 2];
  514. }
  515. + (void)addCameraCaptureDevice:(AVCaptureDevice*)device
  516. {
  517. [videoCaptureDevices addObject: [[CameraCaptureDevice alloc] initWithDevice: device]];
  518. }
  519. @end
  520. extern "C" void UnityEnumVideoCaptureDevices(void* udata, void(*callback)(void* udata, const char* name, int frontFacing, int autoFocusPointSupported, int kind, const int* resolutions, int resCount))
  521. {
  522. if (![CameraCaptureDevice initialized])
  523. {
  524. [CameraCaptureDevice createCameraCaptureDevicesArray];
  525. NSMutableArray<AVCaptureDeviceType>* captureDevices = [NSMutableArray arrayWithObjects: AVCaptureDeviceTypeBuiltInWideAngleCamera, AVCaptureDeviceTypeBuiltInTelephotoCamera, nil];
  526. #if UNITY_HAS_COLORANDDEPTH_CAMERA
  527. if (UnityiOS102orNewer())
  528. {
  529. [captureDevices addObject: AVCaptureDeviceTypeBuiltInDualCamera];
  530. }
  531. #endif
  532. #if UNITY_HAS_IOSSDK_11_1
  533. if (UnityiOS111orNewer())
  534. {
  535. [captureDevices addObject: AVCaptureDeviceTypeBuiltInTrueDepthCamera];
  536. }
  537. #endif
  538. #if UNITY_HAS_IOSSDK_13_0
  539. if (UnityiOS130orNewer())
  540. {
  541. [captureDevices addObject: AVCaptureDeviceTypeBuiltInUltraWideCamera];
  542. [captureDevices addObject: AVCaptureDeviceTypeBuiltInDualWideCamera];
  543. [captureDevices addObject: AVCaptureDeviceTypeBuiltInTripleCamera];
  544. }
  545. #endif
  546. AVCaptureDeviceDiscoverySession *captureDeviceDiscoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes: captureDevices mediaType: AVMediaTypeVideo position: AVCaptureDevicePositionUnspecified];
  547. for (AVCaptureDevice* device in [captureDeviceDiscoverySession devices])
  548. {
  549. [CameraCaptureDevice addCameraCaptureDevice: device];
  550. }
  551. }
  552. // we should not provide camera devices information while access has not been granted
  553. // but we need to try to enumerate camera devices anyway to trigger permission request dialog
  554. if ([AVCaptureDevice authorizationStatusForMediaType: AVMediaTypeVideo] != AVAuthorizationStatusAuthorized)
  555. return;
  556. for (CameraCaptureDevice *cameraCaptureDevice in videoCaptureDevices)
  557. {
  558. int resCount = (int)[cameraCaptureDevice->_resolutions count];
  559. int *resolutions = new int[resCount * 2];
  560. for (int i = 0; i < resCount; ++i)
  561. {
  562. resolutions[i * 2] = (int)[cameraCaptureDevice->_resolutions[i] CGSizeValue].width;
  563. resolutions[i * 2 + 1] = (int)[cameraCaptureDevice->_resolutions[i] CGSizeValue].height;
  564. }
  565. callback(udata, [cameraCaptureDevice->_device.localizedName UTF8String], cameraCaptureDevice->_frontFacing, cameraCaptureDevice->_autoFocusPointSupported, cameraCaptureDevice->_kind, resolutions, resCount);
  566. delete[] resolutions;
  567. }
  568. }
  569. extern "C" void* UnityInitCameraCapture(int deviceIndex, int w, int h, int fps, int isDepth, void* udata)
  570. {
  571. if (videoCaptureDevices != nil && deviceIndex < videoCaptureDevices.count)
  572. {
  573. CameraCaptureController* controller = [CameraCaptureController alloc];
  574. bool initResult = [videoCaptureDevices[deviceIndex] initCaptureForController: controller width: w height: h fps: (float)fps isDepth: (isDepth != 0)];
  575. if (initResult)
  576. {
  577. controller->_userData = udata;
  578. return (__bridge_retained void*)controller;
  579. }
  580. controller = nil;
  581. }
  582. return 0;
  583. }
  584. extern "C" void UnityStartCameraCapture(void* capture)
  585. {
  586. [(__bridge CameraCaptureController*)capture start];
  587. }
  588. extern "C" void UnityPauseCameraCapture(void* capture)
  589. {
  590. [(__bridge CameraCaptureController*)capture pause];
  591. }
  592. extern "C" void UnityStopCameraCapture(void* capture)
  593. {
  594. CameraCaptureController* controller = (__bridge_transfer CameraCaptureController*)capture;
  595. [controller stop];
  596. controller = nil;
  597. }
  598. extern "C" void UnityCameraCaptureExtents(void* capture, int* w, int* h)
  599. {
  600. CameraCaptureController* controller = (__bridge CameraCaptureController*)capture;
  601. if (controller == nil)
  602. return;
  603. *w = (int)controller->_width;
  604. *h = (int)controller->_height;
  605. }
  606. extern "C" void UnityCameraCaptureReadToMemory(void* capture, void* dst_, int w, int h)
  607. {
  608. CameraCaptureController* controller = (__bridge CameraCaptureController*)capture;
  609. if (controller == nil)
  610. return;
  611. assert(w == controller->_width && h == controller->_height);
  612. [controller capturePixelBufferToMemBuffer: (uint8_t*)dst_];
  613. }
  614. extern "C" int UnityCameraCaptureVideoRotationDeg(void* capture)
  615. {
  616. CameraCaptureController* controller = (__bridge CameraCaptureController*)capture;
  617. if (controller == nil)
  618. return 0;
  619. // all cams are landscape.
  620. switch (UnityCurrentOrientation())
  621. {
  622. case portrait: return 90;
  623. case portraitUpsideDown: return 270;
  624. case landscapeLeft: return controller.captureDevice.position == AVCaptureDevicePositionFront ? 180 : 0;
  625. case landscapeRight: return controller.captureDevice.position == AVCaptureDevicePositionFront ? 0 : 180;
  626. default: assert(false && "bad orientation returned from UnityCurrentOrientation()"); break;
  627. }
  628. return 0;
  629. }
  630. extern "C" int UnityCameraCaptureVerticallyMirrored(void* capture)
  631. {
  632. CameraCaptureController* controller = (__bridge CameraCaptureController*)capture;
  633. if (controller == nil)
  634. return 0;
  635. return [controller isCVTextureFlipped];
  636. }
  637. extern "C" int UnityCameraCaptureSetAutoFocusPoint(void* capture, float x, float y)
  638. {
  639. CameraCaptureController* controller = (__bridge CameraCaptureController*)capture;
  640. if (controller == nil)
  641. return 0;
  642. return [controller setFocusPointWithX: x Y: y];
  643. }
  644. #else
  645. // STUBBED OUT UNTIL DEVELOPER FINDs AN AWESOME CAMERA SOLUTION FOR APPLE TV //
  646. extern "C" void UnityEnumVideoCaptureDevices(void* udata, void(*callback)(void* udata, const char* name, int frontFacing, int autoFocusPointSupported, int kind, const int* resolutions, int resCount))
  647. {
  648. }
  649. extern "C" void* UnityInitCameraCapture(int deviceIndex, int w, int h, int fps, int isDepth, void* udata)
  650. {
  651. return 0;
  652. }
  653. extern "C" void UnityStartCameraCapture(void* capture)
  654. {
  655. }
  656. extern "C" void UnityPauseCameraCapture(void* capture)
  657. {
  658. }
  659. extern "C" void UnityStopCameraCapture(void* capture)
  660. {
  661. }
  662. extern "C" void UnityCameraCaptureExtents(void* capture, int* w, int* h)
  663. {
  664. }
  665. extern "C" void UnityCameraCaptureReadToMemory(void* capture, void* dst_, int w, int h)
  666. {
  667. }
  668. extern "C" int UnityCameraCaptureVideoRotationDeg(void* capture)
  669. {
  670. return 0;
  671. }
  672. extern "C" int UnityCameraCaptureVerticallyMirrored(void* capture)
  673. {
  674. return 0;
  675. }
  676. extern "C" int UnityCameraCaptureSetAutoFocusPoint(void* capture, float x, float y)
  677. {
  678. return 0;
  679. }
  680. #endif