Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- //
- // ViewController.swift
- // VisionSample
- //
- // Created by chris on 19/06/2017.
- // Copyright © 2017 MRM Brand Ltd. All rights reserved.
- //
- import UIKit
- import AVFoundation
- import Vision
- class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
- // video capture session
- let session = AVCaptureSession()
- // preview layer
- var previewLayer: AVCaptureVideoPreviewLayer!
- // queue for processing video frames
- let captureQueue = DispatchQueue(label: "captureQueue")
- // overlay layer
- var gradientLayer: CAGradientLayer!
- // vision request
- var visionRequests = [VNRequest]()
- var recognitionThreshold : Float = 0.25
- @IBOutlet weak var thresholdStackView: UIStackView!
- @IBOutlet weak var threshholdLabel: UILabel!
- @IBOutlet weak var threshholdSlider: UISlider!
- @IBOutlet weak var previewView: UIView!
- @IBOutlet weak var resultView: UILabel!
- @IBOutlet private weak var targetImageView: UIImageView!
- override func viewDidLoad() {
- super.viewDidLoad()
- // get hold of the default video camera
- guard let camera = AVCaptureDevice.default(for: .video) else {
- fatalError("No video camera available")
- }
- do {
- // add the preview layer
- previewLayer = AVCaptureVideoPreviewLayer(session: session)
- previewView.layer.addSublayer(previewLayer)
- // // add a slight gradient overlay so we can read the results easily
- // gradientLayer = CAGradientLayer()
- // gradientLayer.colors = [
- // UIColor.init(red: 0, green: 0, blue: 0, alpha: 0.7).cgColor,
- // UIColor.init(red: 0, green: 0, blue: 0, alpha: 0.0).cgColor,
- // ]
- // gradientLayer.locations = [0.0, 0.3]
- // self.previewView.layer.addSublayer(gradientLayer)
- //
- // create the capture input and the video output
- let cameraInput = try AVCaptureDeviceInput(device: camera)
- let videoOutput = AVCaptureVideoDataOutput()
- videoOutput.setSampleBufferDelegate(self, queue: captureQueue)
- videoOutput.alwaysDiscardsLateVideoFrames = true
- videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]
- session.sessionPreset = .high
- // wire up the session
- session.addInput(cameraInput)
- session.addOutput(videoOutput)
- // make sure we are in portrait mode
- let conn = videoOutput.connection(with: .video)
- conn?.videoOrientation = .portrait
- // Start the session
- session.startRunning()
- // // set up the vision model
- // guard let resNet50Model = try? VNCoreMLModel(for: Resnet50().model) else {
- // fatalError("Could not load model")
- // }
- // // set up the request using our vision model
- // let classificationRequest = VNCoreMLRequest(model: resNet50Model, completionHandler: handleClassifications)
- // classificationRequest.imageCropAndScaleOption = .centerCrop
- // visionRequests = [classificationRequest]
- let request = VNDetectRectanglesRequest(
- completionHandler: detectRectanglesCompletionHandler
- )
- request.maximumObservations = 1
- visionRequests = [request]
- } catch {
- fatalError(error.localizedDescription)
- }
- updateThreshholdLabel()
- }
- func updateThreshholdLabel () {
- self.threshholdLabel.text = "Threshold: " + String(format: "%.2f", recognitionThreshold)
- }
- private func detectRectanglesCompletionHandler(request: VNRequest, error: Error?) {
- DispatchQueue.main.async { [weak self] in
- guard let count = request.results?.count,
- count > 0 else {
- print("No results")
- return
- }
- guard let observations = request.results as? [VNRectangleObservation] else {
- print("No rectangles")
- return
- }
- self?.targetImageView.layer.sublayers = nil
- for observation in observations {
- self?.drawObservedRectangle(observation)
- }
- }
- }
- //MARK: Vision Methods
- private func drawObservedRectangle(_ rectangle: VNRectangleObservation) {
- guard let targetSize = targetImageView?.frame.size else {
- print("targetImageView doesn't exist")
- return
- }
- let rectangleShape = CAShapeLayer()
- rectangleShape.opacity = 0.5
- rectangleShape.lineWidth = 5
- rectangleShape.lineJoin = kCALineJoinRound
- rectangleShape.strokeColor = UIColor.blue.cgColor
- rectangleShape.fillColor = UIColor.blue.withAlphaComponent(0.6).cgColor
- let rectanglePath = UIBezierPath()
- rectanglePath.move(to: rectangle.topLeft.scaled(to: targetSize))
- rectanglePath.addLine(to: rectangle.topRight.scaled(to: targetSize))
- rectanglePath.addLine(to: rectangle.bottomRight.scaled(to: targetSize))
- rectanglePath.addLine(to: rectangle.bottomLeft.scaled(to: targetSize))
- rectanglePath.close()
- rectangleShape.path = rectanglePath.cgPath
- targetImageView?.layer.addSublayer(rectangleShape)
- }
- override func viewDidLayoutSubviews() {
- super.viewDidLayoutSubviews()
- previewLayer.frame = self.previewView.bounds;
- // gradientLayer.frame = self.previewView.bounds;
- let orientation: UIDeviceOrientation = UIDevice.current.orientation;
- switch (orientation) {
- case .portrait:
- previewLayer?.connection?.videoOrientation = .portrait
- case .landscapeRight:
- previewLayer?.connection?.videoOrientation = .landscapeLeft
- case .landscapeLeft:
- previewLayer?.connection?.videoOrientation = .landscapeRight
- case .portraitUpsideDown:
- previewLayer?.connection?.videoOrientation = .portraitUpsideDown
- default:
- previewLayer?.connection?.videoOrientation = .portrait
- }
- }
- func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
- guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
- return
- }
- connection.videoOrientation = .portrait
- var requestOptions:[VNImageOption: Any] = [:]
- if let cameraIntrinsicData = CMGetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_CameraIntrinsicMatrix, nil) {
- requestOptions = [.cameraIntrinsics: cameraIntrinsicData]
- }
- // for orientation see kCGImagePropertyOrientation
- let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: .downMirrored, options: requestOptions)
- do {
- try imageRequestHandler.perform(self.visionRequests)
- } catch {
- print(error)
- }
- }
- @IBAction func userTapped(sender: Any) {
- session.stopRunning()
- // self.thresholdStackView.isHidden = !self.thresholdStackView.isHidden
- }
- @IBAction func sliderValueChanged(slider: UISlider) {
- self.recognitionThreshold = slider.value
- updateThreshholdLabel()
- }
- }
- extension CGPoint {
- func scaled(to size: CGSize) -> CGPoint {
- return CGPoint(x: self.x * size.width, y: self.y * size.height)
- }
- }
- extension CGRect {
- func scaled(to size: CGSize) -> CGRect {
- return CGRect(
- x: self.origin.x * size.width,
- y: self.origin.y * size.height,
- width: self.size.width * size.width,
- height: self.size.height * size.height
- )
- }
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement