Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- This is the JMCamController.h file
- ==================================
- //
- // JMCamController.h
- // JMCamController
- //
- // Created by Gary Gardner on 11/28/12.
- // Copyright (c) 2012 Gary Gardner. All rights reserved.
- //
- #import <Cocoa/Cocoa.h>
- #import <AVFoundation/AVFoundation.h>
- #import <QuartzCore/QuartzCore.h>
- #import <CoreMedia/CoreMedia.h>
- #import <AppKit/AppKit.h>
- @class AVCaptureVideoPreviewLayer;
- @class AVCaptureSession;
- @class AVCaptureDeviceInput;
- @class AVCaptureConnection;
- @class AVCaptureDevice;
- @interface JMCamController : NSObject
- {
- @private
- AVCaptureSession *session;
- AVCaptureDeviceInput *videoDeviceInput;
- AVCaptureVideoDataOutput *videoDataOutput;
- //AVAssetWriter *assetWriter;
- AVAssetWriterInput *videoInput;
- NSArray *videoDevices;
- NSArray *observers;
- NSData *videoData;
- //CMTime frameDuration;
- }
- #pragma mark Device Selection
- @property (retain) NSArray *videoDevices;
- @property (assign) AVCaptureDevice *selectedVideoDevice;
- //@property (retain) NSArray *observers;
- //@property (retain) AVCaptureDeviceInput *videoDeviceInput;
- #pragma mark Recording
- @property (retain) AVCaptureSession *session;
- #pragma mark Transport Controls
- @property (readonly,getter=isPlaying) BOOL playing;
- //@property (nonatomic) float framesPerSecond;
- - (BOOL)setVideoDevice:(NSUInteger *)deviceNumber;
- - (BOOL)run;
- - (NSData *)getData;
- - (void)refreshDevices;
- - (void)videoBytes:(NSImage*)videoImage;
- - (void)captureOutput:(AVCaptureOutput *)captureOutput
- didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
- fromConnection:(AVCaptureConnection *)connection;
- - (NSImage*)videoImageFromSampleBuffer:(CMSampleBufferRef*)sampleBuffer;
- - (NSArray *)getDeviceArray;
- @end
- This is the JMCamController.m file
- ==================================
- //
- // JMCamController.m
- // JMCamController
- //
- // Created by Gary Gardner on 11/28/12.
- // Copyright (c) 2012 Gary Gardner. All rights reserved.
- //
- #import "JMCamController.h"
- #define DEFAULT_FRAMES_PER_SECOND 5.0
- @interface JMCamController () <AVCaptureVideoDataOutputSampleBufferDelegate>
- // Properties for internal use
- @property (retain) AVCaptureDeviceInput *videoDeviceInput;
- @property (retain) NSArray *observers;
- // Methods for internal use
- - (void)refreshDevices;
- @end
- @implementation JMCamController
- @synthesize videoDeviceInput;
- @synthesize videoDevices;
- @synthesize session;
- @synthesize observers;
- - (id)init
- {
- self = [super init];
- //frameDuration = CMTimeMakeWithSeconds(1. / DEFAULT_FRAMES_PER_SECOND, 90000);
- if (self) {
- // Create a capture session
- session = [[AVCaptureSession alloc] init];
- // Capture Notification Observers
- NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter];
- id runtimeErrorObserver = [notificationCenter addObserverForName:AVCaptureSessionRuntimeErrorNotification
- object:session
- queue:[NSOperationQueue mainQueue]
- usingBlock:^(NSNotification *note) {
- NSLog(@"an AVCaptureSessionRuntimeErrorNotification occurred %@", AVCaptureSessionErrorKey);
- }];
- id didStartRunningObserver = [notificationCenter addObserverForName:AVCaptureSessionDidStartRunningNotification
- object:session
- queue:[NSOperationQueue mainQueue]
- usingBlock:^(NSNotification *note) {
- NSLog(@"did start running");
- }];
- id didStopRunningObserver = [notificationCenter addObserverForName:AVCaptureSessionDidStopRunningNotification
- object:session
- queue:[NSOperationQueue mainQueue]
- usingBlock:^(NSNotification *note) {
- NSLog(@"did stop running");
- }];
- id deviceWasConnectedObserver = [notificationCenter addObserverForName:AVCaptureDeviceWasConnectedNotification
- object:nil
- queue:[NSOperationQueue mainQueue]
- usingBlock:^(NSNotification *note) {
- [self refreshDevices];
- }];
- id deviceWasDisconnectedObserver = [notificationCenter addObserverForName:AVCaptureDeviceWasDisconnectedNotification
- object:nil
- queue:[NSOperationQueue mainQueue]
- usingBlock:^(NSNotification *note) {
- [self refreshDevices];
- }];
- id invalidArgumentExceptionObserver = [notificationCenter addObserverForName:
- NSInvalidArgumentException
- object:nil
- queue:[NSOperationQueue mainQueue] usingBlock:^(NSNotification *note) {NSLog(@"NSInvalidArgumentException Error");
- }];
- observers = [[NSArray alloc] initWithObjects:runtimeErrorObserver, didStartRunningObserver, didStopRunningObserver, deviceWasConnectedObserver, deviceWasDisconnectedObserver, invalidArgumentExceptionObserver, nil];
- // Select devices if any exist
- AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
- if (videoDevice) {
- [self setSelectedVideoDevice:videoDevice];
- } else {
- [self setSelectedVideoDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeMuxed]];
- }
- // Initial refresh of device list
- [self refreshDevices];
- }
- return self;
- }
- - (void)dealloc
- {
- [videoDevices release];
- [session release];
- [videoDeviceInput release];
- [videoDataOutput release];
- [super dealloc];
- }
- - (void)didPresentErrorWithRecovery:(BOOL)didRecover contextInfo:(void *)contextInfo
- {
- // Do nothing
- NSLog(@"An error occurred and should be bubbled up");
- }
- - (void)refreshDevices
- {
- [self setVideoDevices:[[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] arrayByAddingObjectsFromArray:[AVCaptureDevice devicesWithMediaType:AVMediaTypeMuxed]]];
- [[self session] beginConfiguration];
- if (![[self videoDevices] containsObject:[self selectedVideoDevice]])
- [self setSelectedVideoDevice:nil];
- [[self session] commitConfiguration];
- }
- - (AVCaptureDevice *)selectedVideoDevice
- {
- return [videoDeviceInput device];
- }
- - (void)setSelectedVideoDevice:(AVCaptureDevice *)selectedVideoDevice
- {
- [[self session] beginConfiguration];
- if ([self videoDeviceInput]) {
- // Remove the old device input from the session
- [session removeInput:[self videoDeviceInput]];
- [self setVideoDeviceInput:nil];
- }
- if (selectedVideoDevice) {
- NSError *error = nil;
- // Create a device input for the device and add it to the session
- AVCaptureDeviceInput *newVideoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:selectedVideoDevice error:&error];
- if (error) {
- //NSLog(@"deviceInputWithDevice: failed (%@)", error);
- }
- if (newVideoDeviceInput == nil || error) {
- //NSLog(@"deviceInputWithDevice: failed (%@)", error);
- /*dispatch_async(dispatch_get_main_queue(), ^(void) {
- [self presentError:error];
- });*/
- } else {
- if (![selectedVideoDevice supportsAVCaptureSessionPreset:[session sessionPreset]])
- [[self session] setSessionPreset:AVCaptureSessionPresetHigh];
- [[self session] addInput:newVideoDeviceInput];
- [self setVideoDeviceInput:newVideoDeviceInput];
- videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
- [session addOutput:videoDataOutput];
- // Configure your output.
- dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
- [videoDataOutput setSampleBufferDelegate:self queue:queue];
- //[videoDataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
- NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
- NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
- NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
- [videoDataOutput setVideoSettings:videoSettings];
- dispatch_release(queue);
- }
- }
- [[self session] commitConfiguration];
- }
- - (BOOL)setVideoDevice:(NSUInteger *)deviceNumber
- {
- BOOL success = NO;
- [self setSelectedVideoDevice:[videoDevices objectAtIndex:*deviceNumber]];
- AVCaptureDevice *aDevice = [self selectedVideoDevice];
- if (aDevice == nil) {
- success = NO;
- } else {
- success = YES;
- }
- return success;
- }
- - (BOOL)run
- {
- // This method actually starts the cam running
- //NSLog(@"Inside the run method");
- [session startRunning];
- return YES;
- }
- - (NSData *)getData
- {
- //NSLog(@"*** Inside getData()..... The bytes to be returned is: %lu", [videoData length]);
- return videoData;
- }
- // Delegate routine that is called when a sample buffer was written
- - (void)captureOutput:(AVCaptureOutput *)captureOutput
- didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
- fromConnection:(AVCaptureConnection *)connection
- {
- //NSLog(@"About to call imageFromSampleBuffer");
- //CIImage *image = imageFromSampleBuffer(sampleBuffer);
- //NSLog(@"About to call videoImageFromSampleBuffer");
- NSImage *image = [self videoImageFromSampleBuffer:&sampleBuffer];
- if (image == nil) {
- // Don't process the image
- //NSLog(@"The image would be nil, so nothing to do");
- } else {
- // process the image
- //NSLog(@"The image will be processed here");
- [self videoBytes:image];
- //[self convertVideoBytes:image];
- }
- }
- - (void)videoBytes:(NSImage*)videoImage
- {
- //NSLog(@"Inside videoBytes");
- videoData = [videoImage TIFFRepresentation];
- // Print the bytes if you want. Just uncomment this line
- //NSLog(@"Bytes:%lu", videoData.length);
- }
- - (NSImage*)videoImageFromSampleBuffer:(CMSampleBufferRef*)sampleBuffer
- {
- //NSLog(@"Inside videoImageFromSampleBuffer method");
- CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(*sampleBuffer);
- // Lock the base address of the pixel buffer.
- CVPixelBufferLockBaseAddress(imageBuffer,0);
- // Get the number of bytes per row for the pixel buffer.
- size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
- // Get the pixel buffer width and height.
- size_t width = CVPixelBufferGetWidth(imageBuffer);
- size_t height = CVPixelBufferGetHeight(imageBuffer);
- // Create a device-dependent RGB color space.
- static CGColorSpaceRef colorSpace = NULL;
- if (colorSpace == NULL) {
- colorSpace = CGColorSpaceCreateDeviceRGB();
- if (colorSpace == NULL) {
- // Handle the error appropriately.
- return nil;
- }
- }
- // Get the base address of the pixel buffer.
- void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
- // Get the data size for contiguous planes of the pixel buffer.
- size_t bufferSize = CVPixelBufferGetDataSize(imageBuffer);
- // Create a Quartz direct-access data provider that uses data we supply.
- CGDataProviderRef dataProvider =
- CGDataProviderCreateWithData(NULL, baseAddress, bufferSize, NULL);
- // Create a bitmap image from data supplied by the data provider.
- CGImageRef cgImage =
- CGImageCreate(width, height, 8, 32, bytesPerRow,
- colorSpace, kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little,
- dataProvider, NULL, true, kCGRenderingIntentDefault);
- CGDataProviderRelease(dataProvider);
- // Create and return an image object to represent the Quartz image.
- //NSImage *image = [NSImage imageWithCGImage:cgImage];
- NSImage *image = [[NSImage alloc] initWithCGImage:cgImage size:NSZeroSize];
- CGImageRelease(cgImage);
- CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
- return image;
- }
- - (NSArray *)getDeviceArray
- {
- NSUInteger arraySize = [videoDevices count];
- //NSLog(@"The number of items in the array is %lu", arraySize);
- NSMutableArray *mutableArray = [NSMutableArray array];
- int i = 0;
- for (i=0; i<arraySize; i++)
- {
- AVCaptureDevice *deviceString = [videoDevices objectAtIndex:i];
- //NSLog(@"The device = %@", deviceString);
- [mutableArray addObject:[deviceString description]];
- }
- NSArray *stringArray = [NSArray arrayWithArray:mutableArray];
- return stringArray;
- }
- @end
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement