Advertisement
Guest User

Untitled

a guest
Dec 11th, 2012
166
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. This is the JMCamController.h file
  2. ==================================
  3. //
  4. // JMCamController.h
  5. // JMCamController
  6. //
  7. // Created by Gary Gardner on 11/28/12.
  8. // Copyright (c) 2012 Gary Gardner. All rights reserved.
  9. //
  10. #import <Cocoa/Cocoa.h>
  11. #import <AVFoundation/AVFoundation.h>
  12. #import <QuartzCore/QuartzCore.h>
  13. #import <CoreMedia/CoreMedia.h>
  14. #import <AppKit/AppKit.h>
  15.  
  16. @class AVCaptureVideoPreviewLayer;
  17. @class AVCaptureSession;
  18. @class AVCaptureDeviceInput;
  19. @class AVCaptureConnection;
  20. @class AVCaptureDevice;
  21.  
  22. @interface JMCamController : NSObject
  23. {
  24. @private
  25. AVCaptureSession *session;
  26. AVCaptureDeviceInput *videoDeviceInput;
  27. AVCaptureVideoDataOutput *videoDataOutput;
  28.  
  29. //AVAssetWriter *assetWriter;
  30. AVAssetWriterInput *videoInput;
  31.  
  32. NSArray *videoDevices;
  33. NSArray *observers;
  34.  
  35. NSData *videoData;
  36.  
  37. //CMTime frameDuration;
  38. }
  39.  
  40. #pragma mark Device Selection
  41. @property (retain) NSArray *videoDevices;
  42. @property (assign) AVCaptureDevice *selectedVideoDevice;
  43. //@property (retain) NSArray *observers;
  44. //@property (retain) AVCaptureDeviceInput *videoDeviceInput;
  45.  
  46. #pragma mark Recording
  47. @property (retain) AVCaptureSession *session;
  48.  
  49. #pragma mark Transport Controls
  50. @property (readonly,getter=isPlaying) BOOL playing;
  51.  
  52. //@property (nonatomic) float framesPerSecond;
  53.  
  54. - (BOOL)setVideoDevice:(NSUInteger *)deviceNumber;
  55. - (BOOL)run;
  56. - (NSData *)getData;
  57. - (void)refreshDevices;
  58. - (void)videoBytes:(NSImage*)videoImage;
  59. - (void)captureOutput:(AVCaptureOutput *)captureOutput
  60. didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
  61. fromConnection:(AVCaptureConnection *)connection;
  62. - (NSImage*)videoImageFromSampleBuffer:(CMSampleBufferRef*)sampleBuffer;
  63. - (NSArray *)getDeviceArray;
  64.  
  65. @end
  66.  
  67.  
  68. This is the JMCamController.m file
  69. ==================================
  70. //
  71. // JMCamController.m
  72. // JMCamController
  73. //
  74. // Created by Gary Gardner on 11/28/12.
  75. // Copyright (c) 2012 Gary Gardner. All rights reserved.
  76. //
  77.  
  78. #import "JMCamController.h"
  79.  
  80. #define DEFAULT_FRAMES_PER_SECOND 5.0
  81.  
  82. @interface JMCamController () <AVCaptureVideoDataOutputSampleBufferDelegate>
  83.  
  84. // Properties for internal use
  85. @property (retain) AVCaptureDeviceInput *videoDeviceInput;
  86. @property (retain) NSArray *observers;
  87.  
  88. // Methods for internal use
  89. - (void)refreshDevices;
  90.  
  91. @end
  92.  
  93. @implementation JMCamController
  94.  
  95. @synthesize videoDeviceInput;
  96. @synthesize videoDevices;
  97. @synthesize session;
  98. @synthesize observers;
  99.  
  100. - (id)init
  101. {
  102. self = [super init];
  103. //frameDuration = CMTimeMakeWithSeconds(1. / DEFAULT_FRAMES_PER_SECOND, 90000);
  104.  
  105. if (self) {
  106. // Create a capture session
  107. session = [[AVCaptureSession alloc] init];
  108.  
  109. // Capture Notification Observers
  110. NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter];
  111. id runtimeErrorObserver = [notificationCenter addObserverForName:AVCaptureSessionRuntimeErrorNotification
  112. object:session
  113. queue:[NSOperationQueue mainQueue]
  114. usingBlock:^(NSNotification *note) {
  115. NSLog(@"an AVCaptureSessionRuntimeErrorNotification occurred %@", AVCaptureSessionErrorKey);
  116. }];
  117. id didStartRunningObserver = [notificationCenter addObserverForName:AVCaptureSessionDidStartRunningNotification
  118. object:session
  119. queue:[NSOperationQueue mainQueue]
  120. usingBlock:^(NSNotification *note) {
  121. NSLog(@"did start running");
  122. }];
  123. id didStopRunningObserver = [notificationCenter addObserverForName:AVCaptureSessionDidStopRunningNotification
  124. object:session
  125. queue:[NSOperationQueue mainQueue]
  126. usingBlock:^(NSNotification *note) {
  127. NSLog(@"did stop running");
  128. }];
  129. id deviceWasConnectedObserver = [notificationCenter addObserverForName:AVCaptureDeviceWasConnectedNotification
  130. object:nil
  131. queue:[NSOperationQueue mainQueue]
  132. usingBlock:^(NSNotification *note) {
  133. [self refreshDevices];
  134. }];
  135. id deviceWasDisconnectedObserver = [notificationCenter addObserverForName:AVCaptureDeviceWasDisconnectedNotification
  136. object:nil
  137. queue:[NSOperationQueue mainQueue]
  138. usingBlock:^(NSNotification *note) {
  139. [self refreshDevices];
  140. }];
  141. id invalidArgumentExceptionObserver = [notificationCenter addObserverForName:
  142. NSInvalidArgumentException
  143. object:nil
  144. queue:[NSOperationQueue mainQueue] usingBlock:^(NSNotification *note) {NSLog(@"NSInvalidArgumentException Error");
  145. }];
  146. observers = [[NSArray alloc] initWithObjects:runtimeErrorObserver, didStartRunningObserver, didStopRunningObserver, deviceWasConnectedObserver, deviceWasDisconnectedObserver, invalidArgumentExceptionObserver, nil];
  147.  
  148. // Select devices if any exist
  149. AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
  150. if (videoDevice) {
  151. [self setSelectedVideoDevice:videoDevice];
  152. } else {
  153. [self setSelectedVideoDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeMuxed]];
  154. }
  155.  
  156. // Initial refresh of device list
  157. [self refreshDevices];
  158. }
  159. return self;
  160. }
  161.  
  162. - (void)dealloc
  163. {
  164. [videoDevices release];
  165. [session release];
  166. [videoDeviceInput release];
  167. [videoDataOutput release];
  168.  
  169. [super dealloc];
  170. }
  171.  
  172. - (void)didPresentErrorWithRecovery:(BOOL)didRecover contextInfo:(void *)contextInfo
  173. {
  174. // Do nothing
  175. NSLog(@"An error occurred and should be bubbled up");
  176. }
  177.  
  178. - (void)refreshDevices
  179. {
  180. [self setVideoDevices:[[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] arrayByAddingObjectsFromArray:[AVCaptureDevice devicesWithMediaType:AVMediaTypeMuxed]]];
  181.  
  182. [[self session] beginConfiguration];
  183.  
  184. if (![[self videoDevices] containsObject:[self selectedVideoDevice]])
  185. [self setSelectedVideoDevice:nil];
  186.  
  187. [[self session] commitConfiguration];
  188. }
  189.  
  190. - (AVCaptureDevice *)selectedVideoDevice
  191. {
  192. return [videoDeviceInput device];
  193. }
  194.  
  195. - (void)setSelectedVideoDevice:(AVCaptureDevice *)selectedVideoDevice
  196. {
  197. [[self session] beginConfiguration];
  198.  
  199. if ([self videoDeviceInput]) {
  200. // Remove the old device input from the session
  201. [session removeInput:[self videoDeviceInput]];
  202. [self setVideoDeviceInput:nil];
  203. }
  204.  
  205. if (selectedVideoDevice) {
  206. NSError *error = nil;
  207.  
  208. // Create a device input for the device and add it to the session
  209. AVCaptureDeviceInput *newVideoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:selectedVideoDevice error:&error];
  210. if (error) {
  211. //NSLog(@"deviceInputWithDevice: failed (%@)", error);
  212. }
  213. if (newVideoDeviceInput == nil || error) {
  214. //NSLog(@"deviceInputWithDevice: failed (%@)", error);
  215. /*dispatch_async(dispatch_get_main_queue(), ^(void) {
  216. [self presentError:error];
  217. });*/
  218. } else {
  219. if (![selectedVideoDevice supportsAVCaptureSessionPreset:[session sessionPreset]])
  220. [[self session] setSessionPreset:AVCaptureSessionPresetHigh];
  221.  
  222. [[self session] addInput:newVideoDeviceInput];
  223. [self setVideoDeviceInput:newVideoDeviceInput];
  224.  
  225.  
  226. videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
  227. [session addOutput:videoDataOutput];
  228.  
  229. // Configure your output.
  230. dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
  231. [videoDataOutput setSampleBufferDelegate:self queue:queue];
  232. //[videoDataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
  233.  
  234. NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
  235. NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
  236. NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
  237.  
  238. [videoDataOutput setVideoSettings:videoSettings];
  239.  
  240. dispatch_release(queue);
  241.  
  242. }
  243.  
  244. }
  245.  
  246. [[self session] commitConfiguration];
  247. }
  248.  
  249. - (BOOL)setVideoDevice:(NSUInteger *)deviceNumber
  250. {
  251. BOOL success = NO;
  252. [self setSelectedVideoDevice:[videoDevices objectAtIndex:*deviceNumber]];
  253.  
  254. AVCaptureDevice *aDevice = [self selectedVideoDevice];
  255. if (aDevice == nil) {
  256. success = NO;
  257. } else {
  258. success = YES;
  259. }
  260. return success;
  261. }
  262.  
  263. - (BOOL)run
  264. {
  265. // This method actually starts the cam running
  266. //NSLog(@"Inside the run method");
  267. [session startRunning];
  268. return YES;
  269. }
  270.  
  271. - (NSData *)getData
  272. {
  273. //NSLog(@"*** Inside getData()..... The bytes to be returned is: %lu", [videoData length]);
  274. return videoData;
  275. }
  276.  
  277. // Delegate routine that is called when a sample buffer was written
  278. - (void)captureOutput:(AVCaptureOutput *)captureOutput
  279. didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
  280. fromConnection:(AVCaptureConnection *)connection
  281. {
  282.  
  283. //NSLog(@"About to call imageFromSampleBuffer");
  284. //CIImage *image = imageFromSampleBuffer(sampleBuffer);
  285.  
  286. //NSLog(@"About to call videoImageFromSampleBuffer");
  287. NSImage *image = [self videoImageFromSampleBuffer:&sampleBuffer];
  288.  
  289. if (image == nil) {
  290. // Don't process the image
  291. //NSLog(@"The image would be nil, so nothing to do");
  292. } else {
  293. // process the image
  294. //NSLog(@"The image will be processed here");
  295.  
  296. [self videoBytes:image];
  297. //[self convertVideoBytes:image];
  298. }
  299.  
  300. }
  301.  
  302. - (void)videoBytes:(NSImage*)videoImage
  303. {
  304. //NSLog(@"Inside videoBytes");
  305. videoData = [videoImage TIFFRepresentation];
  306.  
  307. // Print the bytes if you want. Just uncomment this line
  308. //NSLog(@"Bytes:%lu", videoData.length);
  309.  
  310. }
  311.  
  312. - (NSImage*)videoImageFromSampleBuffer:(CMSampleBufferRef*)sampleBuffer
  313. {
  314.  
  315. //NSLog(@"Inside videoImageFromSampleBuffer method");
  316.  
  317. CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(*sampleBuffer);
  318. // Lock the base address of the pixel buffer.
  319. CVPixelBufferLockBaseAddress(imageBuffer,0);
  320.  
  321. // Get the number of bytes per row for the pixel buffer.
  322. size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
  323. // Get the pixel buffer width and height.
  324. size_t width = CVPixelBufferGetWidth(imageBuffer);
  325. size_t height = CVPixelBufferGetHeight(imageBuffer);
  326.  
  327. // Create a device-dependent RGB color space.
  328. static CGColorSpaceRef colorSpace = NULL;
  329. if (colorSpace == NULL) {
  330. colorSpace = CGColorSpaceCreateDeviceRGB();
  331. if (colorSpace == NULL) {
  332. // Handle the error appropriately.
  333. return nil;
  334. }
  335. }
  336.  
  337. // Get the base address of the pixel buffer.
  338. void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
  339. // Get the data size for contiguous planes of the pixel buffer.
  340. size_t bufferSize = CVPixelBufferGetDataSize(imageBuffer);
  341.  
  342. // Create a Quartz direct-access data provider that uses data we supply.
  343. CGDataProviderRef dataProvider =
  344. CGDataProviderCreateWithData(NULL, baseAddress, bufferSize, NULL);
  345. // Create a bitmap image from data supplied by the data provider.
  346. CGImageRef cgImage =
  347. CGImageCreate(width, height, 8, 32, bytesPerRow,
  348. colorSpace, kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little,
  349. dataProvider, NULL, true, kCGRenderingIntentDefault);
  350. CGDataProviderRelease(dataProvider);
  351.  
  352. // Create and return an image object to represent the Quartz image.
  353. //NSImage *image = [NSImage imageWithCGImage:cgImage];
  354. NSImage *image = [[NSImage alloc] initWithCGImage:cgImage size:NSZeroSize];
  355. CGImageRelease(cgImage);
  356.  
  357. CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
  358.  
  359. return image;
  360. }
  361.  
  362. - (NSArray *)getDeviceArray
  363. {
  364. NSUInteger arraySize = [videoDevices count];
  365. //NSLog(@"The number of items in the array is %lu", arraySize);
  366. NSMutableArray *mutableArray = [NSMutableArray array];
  367.  
  368. int i = 0;
  369. for (i=0; i<arraySize; i++)
  370. {
  371. AVCaptureDevice *deviceString = [videoDevices objectAtIndex:i];
  372. //NSLog(@"The device = %@", deviceString);
  373. [mutableArray addObject:[deviceString description]];
  374. }
  375. NSArray *stringArray = [NSArray arrayWithArray:mutableArray];
  376. return stringArray;
  377. }
  378. @end
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement