Advertisement
Guest User

Untitled

a guest
Jul 1st, 2015
201
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 22.37 KB | None | 0 0
  1. //
  2. // TBExampleVideoCapture.m
  3. // otkit-objc-libs
  4. //
  5. // Created by Charley Robinson on 10/11/13.
  6. //
  7. //
  8.  
  9. #import <Availability.h>
  10. #import <UIKit/UIKit.h>
  11. #import <CoreVideo/CoreVideo.h>
  12. #import <OpenTok/OpenTok.h>
  13. #import "TBExampleVideoCapture.h"
  14.  
  15.  
  16. #define SYSTEM_VERSION_EQUAL_TO(v) \
  17. ([[[UIDevice currentDevice] systemVersion] compare:v options:NSNumericSearch] == NSOrderedSame)
  18. #define SYSTEM_VERSION_GREATER_THAN(v) \
  19. ([[[UIDevice currentDevice] systemVersion] compare:v options:NSNumericSearch] == NSOrderedDescending)
  20. #define SYSTEM_VERSION_GREATER_THAN_OR_EQUAL_TO(v) \
  21. ([[[UIDevice currentDevice] systemVersion] compare:v options:NSNumericSearch] != NSOrderedAscending)
  22. #define SYSTEM_VERSION_LESS_THAN(v) \
  23. ([[[UIDevice currentDevice] systemVersion] compare:v options:NSNumericSearch] == NSOrderedAscending)
  24. #define SYSTEM_VERSION_LESS_THAN_OR_EQUAL_TO(v) \
  25. ([[[UIDevice currentDevice] systemVersion] compare:v options:NSNumericSearch] != NSOrderedDescending)
  26.  
  27. @implementation TBExampleVideoCapture {
  28. id<OTVideoCaptureConsumer> _videoCaptureConsumer;
  29. OTVideoFrame* _videoFrame;
  30.  
  31. uint32_t _captureWidth;
  32. uint32_t _captureHeight;
  33. NSString* _capturePreset;
  34.  
  35. AVCaptureSession *_captureSession;
  36. AVCaptureDeviceInput *_videoInput;
  37. AVCaptureVideoDataOutput *_videoOutput;
  38.  
  39. BOOL _capturing;
  40.  
  41. }
  42.  
  43. @synthesize captureSession = _captureSession;
  44. @synthesize videoInput = _videoInput, videoOutput = _videoOutput;
  45. @synthesize videoCaptureConsumer = _videoCaptureConsumer;
  46.  
  47. #define OT_VIDEO_CAPTURE_IOS_DEFAULT_INITIAL_FRAMERATE 15
  48.  
  49. -(id)init {
  50. self = [super init];
  51. if (self) {
  52. _capturePreset = AVCaptureSessionPreset1280x720;
  53. [[self class] dimensionsForCapturePreset:_capturePreset
  54. width:&_captureWidth
  55. height:&_captureHeight];
  56. _capture_queue = dispatch_queue_create("com.tokbox.OTVideoCapture",
  57. DISPATCH_QUEUE_SERIAL);
  58. _videoFrame = [[OTVideoFrame alloc] initWithFormat:
  59. [OTVideoFormat videoFormatNV12WithWidth:_captureWidth
  60. height:_captureHeight]];
  61. }
  62. return self;
  63. }
  64.  
  65. - (int32_t)captureSettings:(OTVideoFormat*)videoFormat {
  66. videoFormat.pixelFormat = OTPixelFormatNV12;
  67. videoFormat.imageWidth = _captureWidth;
  68. videoFormat.imageHeight = _captureHeight;
  69. return 0;
  70. }
  71.  
  72. - (void)dealloc {
  73. [self stopCapture];
  74. [self releaseCapture];
  75.  
  76. if (_capture_queue) {
  77. dispatch_release(_capture_queue);
  78. _capture_queue = nil;
  79. }
  80.  
  81. [_videoFrame release];
  82.  
  83. [super dealloc];
  84. }
  85.  
  86. - (AVCaptureDevice *) cameraWithPosition:(AVCaptureDevicePosition) position {
  87. NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  88. for (AVCaptureDevice *device in devices) {
  89. if ([device position] == position) {
  90. return device;
  91. }
  92. }
  93. return nil;
  94. }
  95.  
  96. - (AVCaptureDevice *) frontFacingCamera {
  97. return [self cameraWithPosition:AVCaptureDevicePositionFront];
  98. }
  99.  
  100. - (AVCaptureDevice *) backFacingCamera {
  101. return [self cameraWithPosition:AVCaptureDevicePositionBack];
  102. }
  103.  
  104. - (BOOL) hasMultipleCameras {
  105. return [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count] > 1;
  106. }
  107.  
  108. - (BOOL) hasTorch {
  109. return [[[self videoInput] device] hasTorch];
  110. }
  111.  
  112. - (AVCaptureTorchMode) torchMode {
  113. return [[[self videoInput] device] torchMode];
  114. }
  115.  
  116. - (void) setTorchMode:(AVCaptureTorchMode) torchMode {
  117.  
  118. AVCaptureDevice *device = [[self videoInput] device];
  119. if ([device isTorchModeSupported:torchMode] && [device torchMode] != torchMode) {
  120. NSError *error;
  121. if ([device lockForConfiguration:&error]) {
  122. [device setTorchMode:torchMode];
  123. [device unlockForConfiguration];
  124. } else {
  125. //Handle Error
  126. }
  127. }
  128. }
  129.  
  130. - (double) maxSupportedFrameRate {
  131. AVFrameRateRange* firstRange =
  132. [_videoInput.device.activeFormat.videoSupportedFrameRateRanges
  133. objectAtIndex:0];
  134.  
  135. CMTime bestDuration = firstRange.minFrameDuration;
  136. double bestFrameRate = bestDuration.timescale / bestDuration.value;
  137. CMTime currentDuration;
  138. double currentFrameRate;
  139. for (AVFrameRateRange* range in
  140. _videoInput.device.activeFormat.videoSupportedFrameRateRanges)
  141. {
  142. currentDuration = range.minFrameDuration;
  143. currentFrameRate = currentDuration.timescale / currentDuration.value;
  144. if (currentFrameRate > bestFrameRate) {
  145. bestFrameRate = currentFrameRate;
  146. }
  147. }
  148.  
  149. return bestFrameRate;
  150. }
  151.  
  152. - (BOOL)isAvailableActiveFrameRate:(double)frameRate
  153. {
  154. return (nil != [self frameRateRangeForFrameRate:frameRate]);
  155. }
  156.  
  157. - (double) activeFrameRate {
  158. CMTime minFrameDuration = _videoInput.device.activeVideoMinFrameDuration;
  159. double framesPerSecond = minFrameDuration.timescale / minFrameDuration.value;
  160.  
  161. return framesPerSecond;
  162. }
  163.  
  164. - (AVFrameRateRange*)frameRateRangeForFrameRate:(double)frameRate {
  165. for (AVFrameRateRange* range in
  166. _videoInput.device.activeFormat.videoSupportedFrameRateRanges)
  167. {
  168. if (range.minFrameRate <= frameRate && frameRate <= range.maxFrameRate)
  169. {
  170. return range;
  171. }
  172. }
  173. return nil;
  174. }
  175.  
  176. - (void)setActiveFrameRate:(double)frameRate {
  177.  
  178. if (!_videoOutput || !_videoInput) {
  179. return;
  180. }
  181.  
  182. AVFrameRateRange* frameRateRange =
  183. [self frameRateRangeForFrameRate:frameRate];
  184. if (nil == frameRateRange) {
  185. NSLog(@"unsupported frameRate %f", frameRate);
  186. return;
  187. }
  188. CMTime desiredMinFrameDuration = CMTimeMake(1, 0);
  189. CMTime desiredMaxFrameDuration = CMTimeMake(1, 15);
  190.  
  191. [_captureSession beginConfiguration];
  192.  
  193. if (SYSTEM_VERSION_GREATER_THAN_OR_EQUAL_TO(@"7.0")) {
  194. NSError* error;
  195. if ([_videoInput.device lockForConfiguration:&error]) {
  196. [_videoInput.device setActiveVideoMinFrameDuration:desiredMinFrameDuration];
  197. [_videoInput.device setActiveVideoMaxFrameDuration:desiredMaxFrameDuration];
  198. [_videoInput.device unlockForConfiguration];
  199. } else {
  200. NSLog(@"%@", error);
  201. }
  202. } else {
  203. AVCaptureConnection *conn = [_videoOutput connectionWithMediaType:AVMediaTypeVideo];
  204. if (conn.supportsVideoMinFrameDuration)
  205. conn.videoMinFrameDuration = desiredMinFrameDuration;
  206. if (conn.supportsVideoMaxFrameDuration)
  207. conn.videoMaxFrameDuration = desiredMaxFrameDuration;
  208. }
  209. [_captureSession commitConfiguration];
  210. }
  211.  
  212. + (void)dimensionsForCapturePreset:(NSString*)preset
  213. width:(uint32_t*)width
  214. height:(uint32_t*)height
  215. {
  216. if ([preset isEqualToString:AVCaptureSessionPreset352x288]) {
  217. *width = 352;
  218. *height = 288;
  219. } else if ([preset isEqualToString:AVCaptureSessionPreset640x480]) {
  220. *width = 640;
  221. *height = 480;
  222. } else if ([preset isEqualToString:AVCaptureSessionPreset1280x720]) {
  223. *width = 1280;
  224. *height = 720;
  225. } else if ([preset isEqualToString:AVCaptureSessionPreset1920x1080]) {
  226. *width = 1920;
  227. *height = 1080;
  228. } else if ([preset isEqualToString:AVCaptureSessionPresetPhoto]) {
  229. // see AVCaptureSessionPresetLow
  230. *width = 1920;
  231. *height = 1080;
  232. } else if ([preset isEqualToString:AVCaptureSessionPresetHigh]) {
  233. // see AVCaptureSessionPresetLow
  234. *width = 640;
  235. *height = 480;
  236. } else if ([preset isEqualToString:AVCaptureSessionPresetMedium]) {
  237. // see AVCaptureSessionPresetLow
  238. *width = 480;
  239. *height = 360;
  240. } else if ([preset isEqualToString:AVCaptureSessionPresetLow]) {
  241. // WARNING: This is a guess. might be wrong for certain devices.
  242. // We'll use updeateCaptureFormatWithWidth:height if actual output
  243. // differs from expected value
  244. *width = 192;
  245. *height = 144;
  246. }
  247. }
  248.  
  249. + (NSSet *)keyPathsForValuesAffectingAvailableCaptureSessionPresets
  250. {
  251. return [NSSet setWithObjects:@"captureSession", @"videoInput", nil];
  252. }
  253.  
  254. - (NSArray *)availableCaptureSessionPresets
  255. {
  256. NSArray *allSessionPresets = [NSArray arrayWithObjects:
  257. AVCaptureSessionPreset352x288,
  258. AVCaptureSessionPreset640x480,
  259. AVCaptureSessionPreset1280x720,
  260. AVCaptureSessionPreset1920x1080,
  261. AVCaptureSessionPresetPhoto,
  262. AVCaptureSessionPresetHigh,
  263. AVCaptureSessionPresetMedium,
  264. AVCaptureSessionPresetLow,
  265. nil];
  266.  
  267. NSMutableArray *availableSessionPresets =
  268. [NSMutableArray arrayWithCapacity:9];
  269. for (NSString *sessionPreset in allSessionPresets) {
  270. if ([[self captureSession] canSetSessionPreset:sessionPreset])
  271. [availableSessionPresets addObject:sessionPreset];
  272. }
  273.  
  274. return availableSessionPresets;
  275. }
  276.  
  277. - (void)updateCaptureFormatWithWidth:(int)width height:(int)height
  278. {
  279. _captureWidth = width;
  280. _captureHeight = height;
  281. [_videoFrame setFormat:[OTVideoFormat
  282. videoFormatNV12WithWidth:_captureWidth
  283. height:_captureHeight]];
  284.  
  285. }
  286.  
  287. - (NSString*)captureSessionPreset {
  288. return _captureSession.sessionPreset;
  289. }
  290.  
  291.  
  292. - (void) setCaptureSessionPreset:(NSString*)preset {
  293. AVCaptureSession *session = [self captureSession];
  294.  
  295. if ([session canSetSessionPreset:preset] &&
  296. ![preset isEqualToString:session.sessionPreset]) {
  297.  
  298. [_captureSession beginConfiguration];
  299. _captureSession.sessionPreset = preset;
  300. _capturePreset = preset;
  301.  
  302. [_videoOutput setVideoSettings:
  303. [NSDictionary dictionaryWithObjectsAndKeys:
  304. [NSNumber numberWithInt:
  305. kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange],
  306. kCVPixelBufferPixelFormatTypeKey,
  307. nil]];
  308.  
  309. [_captureSession commitConfiguration];
  310. }
  311. }
  312.  
  313. - (BOOL) toggleCameraPosition {
  314. AVCaptureDevicePosition currentPosition = _videoInput.device.position;
  315. if (AVCaptureDevicePositionBack == currentPosition) {
  316. [self setCameraPosition:AVCaptureDevicePositionFront];
  317. } else if (AVCaptureDevicePositionFront == currentPosition) {
  318. [self setCameraPosition:AVCaptureDevicePositionBack];
  319. }
  320.  
  321. // TODO: check for success
  322. return YES;
  323. }
  324.  
  325. - (NSArray*)availableCameraPositions {
  326. NSArray* devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
  327. NSMutableSet* result = [NSMutableSet setWithCapacity:devices.count];
  328. for (AVCaptureDevice* device in devices) {
  329. [result addObject:[NSNumber numberWithInt:device.position]];
  330. }
  331. return [result allObjects];
  332. }
  333.  
  334. - (AVCaptureDevicePosition)cameraPosition {
  335. return _videoInput.device.position;
  336. }
  337.  
  338. - (void)setCameraPosition:(AVCaptureDevicePosition) position {
  339. BOOL success = NO;
  340.  
  341. NSString* preset = self.captureSession.sessionPreset;
  342.  
  343. if ([self hasMultipleCameras]) {
  344. NSError *error;
  345. AVCaptureDeviceInput *newVideoInput;
  346.  
  347. if (position == AVCaptureDevicePositionBack) {
  348. newVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:
  349. [self backFacingCamera] error:&error];
  350. [self setTorchMode:AVCaptureTorchModeOff];
  351. _videoOutput.alwaysDiscardsLateVideoFrames = YES;
  352. } else if (position == AVCaptureDevicePositionFront) {
  353. newVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:
  354. [self frontFacingCamera] error:&error];
  355. _videoOutput.alwaysDiscardsLateVideoFrames = YES;
  356. } else {
  357. goto bail;
  358. }
  359.  
  360. AVCaptureSession *session = [self captureSession];
  361. if (newVideoInput != nil) {
  362. [session beginConfiguration];
  363. [session removeInput:_videoInput];
  364. if ([session canAddInput:newVideoInput]) {
  365. [session addInput:newVideoInput];
  366. [_videoInput release];
  367. _videoInput = [newVideoInput retain];
  368. } else {
  369. success = NO;
  370. [session addInput:_videoInput];
  371. }
  372. [session commitConfiguration];
  373. success = YES;
  374. } else if (error) {
  375. success = NO;
  376. //Handle error
  377. }
  378. }
  379.  
  380. if (success) {
  381. [self setCaptureSessionPreset:preset];
  382. }
  383. bail:
  384. return;
  385. }
  386.  
  387. -(void)releaseCapture {
  388. [self stopCapture];
  389. [_videoOutput setSampleBufferDelegate:nil queue:NULL];
  390. dispatch_sync(_capture_queue, ^() {
  391. [_captureSession stopRunning];
  392. });
  393. [_captureSession release];
  394. _captureSession = nil;
  395. [_videoOutput release];
  396. _videoOutput = nil;
  397.  
  398. [_videoInput release];
  399. _videoInput = nil;
  400.  
  401. }
  402.  
  403. - (void) initCapture {
  404. //-- Setup Capture Session.
  405.  
  406. _captureSession = [[AVCaptureSession alloc] init];
  407. [_captureSession beginConfiguration];
  408.  
  409. [_captureSession setSessionPreset:_capturePreset];
  410.  
  411. if (SYSTEM_VERSION_GREATER_THAN_OR_EQUAL_TO(@"7.0")) {
  412. //Needs to be set in order to receive audio route/interruption events.
  413. _captureSession.usesApplicationAudioSession = NO;
  414. }
  415.  
  416. //-- Create a video device and input from that Device.
  417. // Add the input to the capture session.
  418. AVCaptureDevice * videoDevice = [self frontFacingCamera];
  419. if(videoDevice == nil)
  420. assert(0);
  421.  
  422. //-- Add the device to the session.
  423. NSError *error;
  424. _videoInput = [[AVCaptureDeviceInput deviceInputWithDevice:videoDevice
  425. error:&error] retain];
  426.  
  427. if(error)
  428. assert(0); //TODO: Handle error
  429.  
  430. [_captureSession addInput:_videoInput];
  431.  
  432. //-- Create the output for the capture session.
  433. _videoOutput = [[AVCaptureVideoDataOutput alloc] init];
  434. [_videoOutput setAlwaysDiscardsLateVideoFrames:YES];
  435.  
  436. [_videoOutput setVideoSettings:
  437. [NSDictionary dictionaryWithObject:
  438. [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]
  439. forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
  440.  
  441. [_videoOutput setSampleBufferDelegate:self queue:_capture_queue];
  442.  
  443. [_captureSession addOutput:_videoOutput];
  444. [_captureSession commitConfiguration];
  445.  
  446. [_captureSession startRunning];
  447.  
  448. [self setActiveFrameRate:OT_VIDEO_CAPTURE_IOS_DEFAULT_INITIAL_FRAMERATE];
  449.  
  450. }
  451.  
  452. - (BOOL) isCaptureStarted {
  453. return _captureSession && _capturing;
  454. }
  455.  
  456. - (int32_t) startCapture {
  457. _capturing = YES;
  458. return 0;
  459. }
  460.  
  461. - (int32_t) stopCapture {
  462. _capturing = NO;
  463. return 0;
  464. }
  465.  
  466. - (OTVideoOrientation)currentDeviceOrientation {
  467. UIInterfaceOrientation orientation =
  468. [[UIApplication sharedApplication] statusBarOrientation];
  469. // transforms are different for
  470. if (AVCaptureDevicePositionFront == self.cameraPosition)
  471. {
  472. switch (orientation) {
  473. case UIInterfaceOrientationLandscapeLeft:
  474. return OTVideoOrientationUp;
  475. case UIInterfaceOrientationLandscapeRight:
  476. return OTVideoOrientationDown;
  477. case UIInterfaceOrientationPortrait:
  478. return OTVideoOrientationLeft;
  479. case UIInterfaceOrientationPortraitUpsideDown:
  480. return OTVideoOrientationRight;
  481. }
  482. }
  483. else
  484. {
  485. switch (orientation) {
  486. case UIInterfaceOrientationLandscapeLeft:
  487. return OTVideoOrientationDown;
  488. case UIInterfaceOrientationLandscapeRight:
  489. return OTVideoOrientationUp;
  490. case UIInterfaceOrientationPortrait:
  491. return OTVideoOrientationLeft;
  492. case UIInterfaceOrientationPortraitUpsideDown:
  493. return OTVideoOrientationRight;
  494. }
  495. }
  496.  
  497. return OTVideoOrientationUp;
  498. }
  499.  
  500. - (void)captureOutput:(AVCaptureOutput *)captureOutput
  501. didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
  502. fromConnection:(AVCaptureConnection *)connection
  503. {
  504.  
  505. }
  506.  
  507. /**
  508. * Def: sanitary(n): A contiguous image buffer with no padding. All bytes in the
  509. * store are actual pixel data.
  510. */
  511. - (BOOL)imageBufferIsSanitary:(CVImageBufferRef)imageBuffer
  512. {
  513. size_t planeCount = CVPixelBufferGetPlaneCount(imageBuffer);
  514. // (Apple bug?) interleaved chroma plane measures in at half of actual size.
  515. // No idea how many pixel formats this applys to, but we're specifically
  516. // targeting 4:2:0 here, so there are some assuptions that must be made.
  517. BOOL biplanar = (2 == planeCount);
  518.  
  519. for (int i = 0; i < CVPixelBufferGetPlaneCount(imageBuffer); i++) {
  520. size_t imageWidth =
  521. CVPixelBufferGetWidthOfPlane(imageBuffer, i) *
  522. CVPixelBufferGetHeightOfPlane(imageBuffer, i);
  523.  
  524. if (biplanar && 1 == i) {
  525. imageWidth *= 2;
  526. }
  527.  
  528. size_t dataWidth =
  529. CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, i) *
  530. CVPixelBufferGetHeightOfPlane(imageBuffer, i);
  531.  
  532. if (imageWidth != dataWidth) {
  533. return NO;
  534. }
  535.  
  536. BOOL hasNextAddress = CVPixelBufferGetPlaneCount(imageBuffer) > i + 1;
  537. BOOL nextPlaneContiguous = YES;
  538.  
  539. if (hasNextAddress) {
  540. size_t planeLength =
  541. dataWidth * CVPixelBufferGetHeightOfPlane(imageBuffer, i);
  542.  
  543. uint8_t* baseAddress =
  544. CVPixelBufferGetBaseAddressOfPlane(imageBuffer, i);
  545.  
  546. uint8_t* nextAddress =
  547. CVPixelBufferGetBaseAddressOfPlane(imageBuffer, i + 1);
  548.  
  549. nextPlaneContiguous = &(baseAddress[planeLength]) == nextAddress;
  550. }
  551.  
  552. if (!nextPlaneContiguous) {
  553. return NO;
  554. }
  555. }
  556.  
  557. return YES;
  558. }
  559. - (size_t)sanitizeImageBuffer:(CVImageBufferRef)imageBuffer
  560. data:(uint8_t**)data
  561. planes:(NSPointerArray*)planes
  562. {
  563. uint32_t pixelFormat = CVPixelBufferGetPixelFormatType(imageBuffer);
  564. if (kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange == pixelFormat ||
  565. kCVPixelFormatType_420YpCbCr8BiPlanarFullRange == pixelFormat)
  566. {
  567. return [self sanitizeBiPlanarImageBuffer:imageBuffer
  568. data:data
  569. planes:planes];
  570. } else {
  571. NSLog(@"No sanitization implementation for pixelFormat %d",
  572. pixelFormat);
  573. *data = NULL;
  574. return 0;
  575. }
  576. }
  577.  
  578. - (size_t)sanitizeBiPlanarImageBuffer:(CVImageBufferRef)imageBuffer
  579. data:(uint8_t**)data
  580. planes:(NSPointerArray*)planes
  581. {
  582. size_t sanitaryBufferSize = 0;
  583. for (int i = 0; i < CVPixelBufferGetPlaneCount(imageBuffer); i++) {
  584. size_t planeImageWidth =
  585. // TODO: (Apple bug?) biplanar pixel format reports 1/2 the width of
  586. // what actually ends up in the pixel buffer for interleaved chroma.
  587. // The only thing I could do about it is use image width for both plane
  588. // calculations, in spite of this being technically wrong.
  589. //CVPixelBufferGetWidthOfPlane(imageBuffer, i);
  590. CVPixelBufferGetWidth(imageBuffer);
  591. size_t planeImageHeight =
  592. CVPixelBufferGetHeightOfPlane(imageBuffer, i);
  593. sanitaryBufferSize += (planeImageWidth * planeImageHeight);
  594. }
  595. uint8_t* newImageBuffer = malloc(sanitaryBufferSize);
  596. size_t bytesCopied = 0;
  597. for (int i = 0; i < CVPixelBufferGetPlaneCount(imageBuffer); i++) {
  598. [planes addPointer:&(newImageBuffer[bytesCopied])];
  599. void* planeBaseAddress =
  600. CVPixelBufferGetBaseAddressOfPlane(imageBuffer, i);
  601. size_t planeDataWidth =
  602. CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, i);
  603. size_t planeImageWidth =
  604. // Same as above. Use full image width for both luma and interleaved
  605. // chroma planes.
  606. //CVPixelBufferGetWidthOfPlane(imageBuffer, i);
  607. CVPixelBufferGetWidth(imageBuffer);
  608. size_t planeImageHeight =
  609. CVPixelBufferGetHeightOfPlane(imageBuffer, i);
  610. for (int rowIndex = 0; rowIndex < planeImageHeight; rowIndex++) {
  611. memcpy(&(newImageBuffer[bytesCopied]),
  612. &(planeBaseAddress[planeDataWidth * rowIndex]),
  613. planeImageWidth);
  614. bytesCopied += planeImageWidth;
  615. }
  616. }
  617. assert(bytesCopied == sanitaryBufferSize);
  618. *data = newImageBuffer;
  619. return bytesCopied;
  620. }
  621.  
  622. - (void)captureOutput:(AVCaptureOutput *)captureOutput
  623. didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
  624. fromConnection:(AVCaptureConnection *)connection {
  625.  
  626. if (!(_capturing && _videoCaptureConsumer)) {
  627. return;
  628. }
  629.  
  630. CMTime time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
  631. CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  632. CVPixelBufferLockBaseAddress(imageBuffer, 0);
  633.  
  634. _videoFrame.timestamp = time;
  635. size_t height = CVPixelBufferGetHeight(imageBuffer);
  636. size_t width = CVPixelBufferGetWidth(imageBuffer);
  637. if (width != _captureWidth || height != _captureHeight) {
  638. [self updateCaptureFormatWithWidth:width height:height];
  639. }
  640. _videoFrame.format.imageWidth = width;
  641. _videoFrame.format.imageHeight = height;
  642. CMTime minFrameDuration;
  643. if (SYSTEM_VERSION_GREATER_THAN_OR_EQUAL_TO(@"7.0")) {
  644. minFrameDuration = _videoInput.device.activeVideoMinFrameDuration;
  645. } else {
  646. AVCaptureConnection *conn =
  647. [_videoOutput connectionWithMediaType:AVMediaTypeVideo];
  648. minFrameDuration = conn.videoMinFrameDuration;
  649. }
  650. _videoFrame.format.estimatedFramesPerSecond =
  651. minFrameDuration.timescale / minFrameDuration.value;
  652. // TODO: how do we measure this from AVFoundation?
  653. _videoFrame.format.estimatedCaptureDelay = 100;
  654. _videoFrame.orientation = [self currentDeviceOrientation];
  655.  
  656. [_videoFrame clearPlanes];
  657. uint8_t* sanitizedImageBuffer = NULL;
  658.  
  659. if (!CVPixelBufferIsPlanar(imageBuffer))
  660. {
  661. [_videoFrame.planes
  662. addPointer:CVPixelBufferGetBaseAddress(imageBuffer)];
  663. } else if ([self imageBufferIsSanitary:imageBuffer]) {
  664. for (int i = 0; i < CVPixelBufferGetPlaneCount(imageBuffer); i++) {
  665. [_videoFrame.planes addPointer:
  666. CVPixelBufferGetBaseAddressOfPlane(imageBuffer, i)];
  667. }
  668. } else {
  669. [self sanitizeImageBuffer:imageBuffer
  670. data:&sanitizedImageBuffer
  671. planes:_videoFrame.planes];
  672. }
  673.  
  674. [_videoCaptureConsumer consumeFrame:_videoFrame];
  675.  
  676. free(sanitizedImageBuffer);
  677.  
  678. CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
  679.  
  680. }
  681.  
  682. @end
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement