Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import UIKit
- import AVFoundation
- import Accelerate
- var customPreviewLayer: AVCaptureVideoPreviewLayer?
- class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
- var captureSession: AVCaptureSession?
- var dataOutput: AVCaptureVideoDataOutput?
- //var customPreviewLayer: AVCaptureVideoPreviewLayer?
- @IBOutlet weak var camView: UIView!
- override func viewWillAppear(animated: Bool) {
- super.viewDidAppear(animated)
- //setupCameraSession()
- }
- override func viewDidLoad() {
- super.viewDidLoad()
- // Do any additional setup after loading the view, typically from a nib.
- //captureSession?.startRunning()
- setupCameraSession()
- self.captureSession?.startRunning()
- }
- override func didReceiveMemoryWarning() {
- super.didReceiveMemoryWarning()
- // Dispose of any resources that can be recreated.
- }
- func setupCameraSession() {
- // Session
- self.captureSession = AVCaptureSession()
- self.captureSession!.sessionPreset = AVCaptureSessionPreset1920x1080
- // Capture device
- let inputDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
- var deviceInput = AVCaptureDeviceInput()
- // Device input
- //var deviceInput: AVCaptureDeviceInput? = AVCaptureDeviceInput.deviceInputWithDevice(inputDevice, error: error)
- do {
- deviceInput = try AVCaptureDeviceInput(device: inputDevice)
- } catch let error as NSError {
- // Handle errors
- print(error)
- }
- if self.captureSession!.canAddInput(deviceInput) {
- self.captureSession!.addInput(deviceInput)
- }
- // Preview
- customPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
- customPreviewLayer!.frame = camView.bounds
- customPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspect
- customPreviewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
- self.camView.layer.addSublayer(customPreviewLayer!)
- print("Cam layer added")
- self.dataOutput = AVCaptureVideoDataOutput()
- self.dataOutput!.videoSettings = [
- String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
- ]
- self.dataOutput!.alwaysDiscardsLateVideoFrames = true
- if self.captureSession!.canAddOutput(dataOutput) {
- self.captureSession!.addOutput(dataOutput)
- }
- self.captureSession!.commitConfiguration()
- let queue: dispatch_queue_t = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL)
- //let delegate = VideoDelegate()
- self.dataOutput!.setSampleBufferDelegate(self, queue: queue)
- }
- func captureOutput(captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection) {
- print("buffered")
- let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)!
- CVPixelBufferLockBaseAddress(imageBuffer, 0)
- let width: size_t = CVPixelBufferGetWidthOfPlane(imageBuffer, 0)
- let height: size_t = CVPixelBufferGetHeightOfPlane(imageBuffer, 0)
- let bytesPerRow: size_t = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0)
- let lumaBuffer: UnsafeMutablePointer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0)
- let grayColorSpace: CGColorSpaceRef = CGColorSpaceCreateDeviceGray()!
- let context: CGContextRef = CGBitmapContextCreate(lumaBuffer, width, height, 8, bytesPerRow, grayColorSpace, CGImageAlphaInfo.PremultipliedLast.rawValue)!//problematic
- let dstImageFilter: CGImageRef = CGBitmapContextCreateImage(context)!
- dispatch_sync(dispatch_get_main_queue(), {() -> Void in
- customPreviewLayer!.contents = dstImageFilter as AnyObject
- })
- }
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement