Advertisement
Guest User

Untitled

a guest
Sep 15th, 2018
79
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. //
  2. //  LaughingManViewController.m
  3. //  LaughingMan
  4. //
  5. //  Created by theinfamousrj on 2/6/12.
  6. //  Copyright (c) 2012 omfgp.com. All rights reserved.
  7. //
  8.  
  9. #import "LaughingManViewController.h"
  10.  
  11. @interface LaughingManViewController()
  12. @property (nonatomic, strong) AVCaptureSession *session;
  13. @property (nonatomic, strong) AVCaptureDevice *videoDevice;
  14. @property (nonatomic, strong) AVCaptureDeviceInput *videoInput;
  15. @property (nonatomic, strong) AVCaptureVideoDataOutput *frameOutput;
  16.  
  17. // Step 12: Add UIImageView to storyboard
  18. // Step 13: Add outlet for UIImageView
  19. @property (nonatomic, strong) IBOutlet UIImageView *imageView;
  20.  
  21. // Step 15: Create CoreImageContext
  22. @property (nonatomic, strong) CIContext *context;
  23.  
  24. // Step 25: Create a CIDetector property for lazy instation later
  25. @property (nonatomic, strong) CIDetector *faceDetector;
  26.  
  27. // Step 31: Create a UIImageView property for laughingMan
  28. @property (nonatomic, strong) UIImageView *laughingMan;
  29. @end
  30.  
  31.  
  32. @implementation LaughingManViewController
  33. @synthesize session = _session;
  34. @synthesize videoDevice = _videoDevice;
  35. @synthesize videoInput = _videoInput;
  36. @synthesize frameOutput = _frameOutput;
  37. @synthesize imageView = _imageView;
  38. @synthesize context = _context;
  39. @synthesize faceDetector = _faceDetector;
  40. @synthesize laughingMan = _laughingMan;
  41.  
  42. // Step 26: Lazy instantiation of CIDetector faceDetector for initialization
  43. // It is currently on CIDetectorAccuracyLow becauce CIDetectorAccuracyHigh drops the framerate significantly
  44. - (CIDetector *)faceDetector
  45. {
  46.     if (!_faceDetector) {
  47.         NSDictionary *detectorOptions = [NSDictionary dictionaryWithObjectsAndKeys:CIDetectorAccuracyLow,CIDetectorAccuracy,nil];
  48.         _faceDetector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil options:detectorOptions];
  49.     }
  50.     return _faceDetector;
  51. }
  52.  
  53. // Step 16: Lazy instantiation of CIContext context for initialization
  54. - (CIContext *)context
  55. {
  56.     if (!_context) {
  57.         _context = [CIContext contextWithOptions:nil];
  58.     }
  59.     return _context;
  60. }
  61.  
  62. - (void)didReceiveMemoryWarning
  63. {
  64.     [super didReceiveMemoryWarning];
  65.     // Release any cached data, images, etc that aren't in use.
  66. }
  67.  
  68. #pragma mark - View lifecycle
  69.  
  70. - (void)viewDidLoad
  71. {
  72.     [super viewDidLoad];
  73.     // Step 1: Allocate a session
  74.     self.session = [[AVCaptureSession alloc] init];
  75.    
  76.     // Step 2: Set a session preset (resolution)
  77.     self.session.sessionPreset = AVCaptureSessionPreset640x480;
  78.    
  79.     // Step 3: Create video device
  80. //    self.videoDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInTelephotoCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionFront];
  81.    
  82.     self.videoDevice = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
  83.                                                                               mediaType:AVMediaTypeVideo
  84.                                                                                position:AVCaptureDevicePositionFront].devices.firstObject;
  85.    
  86.     // Step 4: Create video input (in a real app  you'd want to pass an error object instead of nil)
  87.     NSError *error;
  88.     self.videoInput = [AVCaptureDeviceInput deviceInputWithDevice:self.videoDevice error:&error];
  89.    
  90.     AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:self.session];
  91.     [previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
  92.     CALayer *rootLayer = [[self view]layer];
  93.     [rootLayer setMasksToBounds:YES];
  94.     [previewLayer setFrame:self.view.frame];
  95.     [rootLayer insertSublayer:previewLayer atIndex:0];
  96.    
  97.     // Step 5: Create frame output that will take session data
  98.     self.frameOutput = [[AVCaptureVideoDataOutput alloc] init];
  99.    
  100.     // Step 6: Set up pixel format for output
  101.     self.frameOutput.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
  102.    
  103.     // Step 7: Wire it all together
  104.     [self.session addInput:self.videoInput];
  105.     [self.session addOutput:self.frameOutput];
  106.    
  107.     // Step 11: Tell output that AVDemoViewController is the delegate for the output
  108.     // Dispatch queue runs on a different thread other than the UI thread
  109.     [self.frameOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
  110.    
  111.     // Step 8: Gets the device to take camera info and send it
  112.     [self.session startRunning];
  113.    
  114.     // Step 9 is in the header
  115.    
  116.     // Step 32: Add laughingMan to the image
  117.     self.laughingMan = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"h4x.png"]];
  118.     [self.laughingMan setHidden:YES];
  119.     [self.view addSubview:self.laughingMan];
  120. }
  121.  
  122. // Step 10: Implement delegate method
  123. - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
  124. {
  125.     // Can't set image property in the delegate if you run on anything but the main queue
  126.     // We wouldn't be able to modify the UI from a different thread
  127.    
  128.     // Step 14: Creating a reference to the sample buffer in a format that we can pass to coreImage (boilerplate code)
  129.     // coreImage is optimized for doing transformations, UIImage is NOT!
  130.     CVPixelBufferRef pb = CMSampleBufferGetImageBuffer(sampleBuffer);
  131.     CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pb];
  132.    
  133.     // Optional steps for filtering
  134.     // All filter steps are commented out with '///'
  135.     // To re-do the filter, remove the '///' and follow step 24
  136.     // Step 19: Create a filter
  137.     // Filters are in an NSDictionary type setup and are called by a key in the form of an NSString eg: @"CIHueAdjust"
  138.     ///CIFilter *filter = [CIFilter filterWithName:@"CIHueAdjust"];
  139.    
  140.     // Step 20: Set the defaults for the filter
  141.     ///[filter setDefaults];
  142.    
  143.     // Step 21: Send the filter an image
  144.     ///[filter setValue:ciImage forKey:@"inputImage"];
  145.    
  146.     // Step 22: Set the angle
  147.     ///[filter setValue:[NSNumber numberWithFloat:2.0] forKey:@"inputAngle"];
  148.    
  149.     // Step 23: Send the result of the filtered image back
  150.     ///CIImage *result = [filter valueForKey:@"outputImage"];
  151.    
  152.     // Step 27: Create an array of features and loop through it
  153.     NSArray *features = [self.faceDetector featuresInImage:ciImage];
  154.     bool faceFound = false;
  155.     for (CIFaceFeature *face in features) {
  156.         if (face.hasLeftEyePosition && face.hasRightEyePosition) {
  157.             CGPoint eyeCenter = CGPointMake(face.leftEyePosition.x*0.5+face.rightEyePosition.x*0.5, face.leftEyePosition.y*0.5+face.rightEyePosition.y*0.5);
  158.            
  159.             // Step 28: Set the position of the laughingMan based on mouth position
  160.             double scalex = self.imageView.bounds.size.height/ciImage.extent.size.width;
  161.             double scaley = self.imageView.bounds.size.width/ciImage.extent.size.height;
  162.             self.laughingMan.center = CGPointMake(scaley*(eyeCenter.y-self.laughingMan.bounds.size.height/24.0), scalex*(eyeCenter.x));
  163.            
  164.             // Step 29: Set the angle of the laughingMan using eye deltas
  165.             double deltax = face.leftEyePosition.x-face.rightEyePosition.x;
  166.             double deltay = face.leftEyePosition.y-face.rightEyePosition.y;
  167.             double angle = atan2(deltax, deltay);
  168.             self.laughingMan.transform = CGAffineTransformMakeRotation(angle+M_PI);
  169.            
  170.             // Step 30: Set the size based on the dist between the eyes
  171.             double scale = 8.0*sqrt((deltax*deltax)+(deltay+deltay));
  172.             self.laughingMan.bounds = CGRectMake(0, 0, scale, scale);
  173.             faceFound = true;
  174.         }
  175.     }
  176.    
  177.     // Step 33: If the face is found, apply the image to the face
  178.     if (faceFound) {
  179.         [self.laughingMan setHidden:NO];
  180.     } else {
  181.         [self.laughingMan setHidden:YES];
  182.     }
  183.    
  184.     // Step 17: Turn CoreImage into CGImage which can be used in UIImage
  185.     // Step 24: Change createCGImage:ciImage to createCGImage:result
  186.     CGImageRef ref = [self.context createCGImage:ciImage fromRect:ciImage.extent];
  187.     self.imageView.image = [UIImage imageWithCGImage:ref scale:1.0 orientation:UIImageOrientationRight];
  188.    
  189.     // Step 18: Release the reference
  190.     CGImageRelease(ref);
  191. }
  192.  
  193. - (void)viewDidUnload
  194. {
  195.     [super viewDidUnload];
  196.     // Release any retained subviews of the main view.
  197.     // e.g. self.myOutlet = nil;
  198. }
  199.  
  200. - (void)viewWillAppear:(BOOL)animated
  201. {
  202.     [super viewWillAppear:animated];
  203. }
  204.  
  205. - (void)viewDidAppear:(BOOL)animated
  206. {
  207.     [super viewDidAppear:animated];
  208. }
  209.  
  210. - (void)viewWillDisappear:(BOOL)animated
  211. {
  212.     [super viewWillDisappear:animated];
  213. }
  214.  
  215. - (void)viewDidDisappear:(BOOL)animated
  216. {
  217.     [super viewDidDisappear:animated];
  218. }
  219.  
  220. - (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
  221. {
  222.     return NO;
  223. }
  224.  
  225. @end
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement