Advertisement
Guest User

Untitled

a guest
May 21st, 2018
153
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 8.15 KB | None | 0 0
  1. //
  2. // ViewController.swift
  3. // VisionSample
  4. //
  5. // Created by chris on 19/06/2017.
  6. // Copyright © 2017 MRM Brand Ltd. All rights reserved.
  7. //
  8.  
  9. import UIKit
  10. import AVFoundation
  11. import Vision
  12.  
  13. class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
  14. // video capture session
  15. let session = AVCaptureSession()
  16. // preview layer
  17. var previewLayer: AVCaptureVideoPreviewLayer!
  18. // queue for processing video frames
  19. let captureQueue = DispatchQueue(label: "captureQueue")
  20. // overlay layer
  21. var gradientLayer: CAGradientLayer!
  22. // vision request
  23. var visionRequests = [VNRequest]()
  24.  
  25. var recognitionThreshold : Float = 0.25
  26.  
  27. @IBOutlet weak var thresholdStackView: UIStackView!
  28. @IBOutlet weak var threshholdLabel: UILabel!
  29. @IBOutlet weak var threshholdSlider: UISlider!
  30.  
  31. @IBOutlet weak var previewView: UIView!
  32. @IBOutlet weak var resultView: UILabel!
  33. @IBOutlet private weak var targetImageView: UIImageView!
  34.  
  35. override func viewDidLoad() {
  36. super.viewDidLoad()
  37. // get hold of the default video camera
  38. guard let camera = AVCaptureDevice.default(for: .video) else {
  39. fatalError("No video camera available")
  40. }
  41. do {
  42. // add the preview layer
  43. previewLayer = AVCaptureVideoPreviewLayer(session: session)
  44. previewView.layer.addSublayer(previewLayer)
  45. // // add a slight gradient overlay so we can read the results easily
  46. // gradientLayer = CAGradientLayer()
  47. // gradientLayer.colors = [
  48. // UIColor.init(red: 0, green: 0, blue: 0, alpha: 0.7).cgColor,
  49. // UIColor.init(red: 0, green: 0, blue: 0, alpha: 0.0).cgColor,
  50. // ]
  51. // gradientLayer.locations = [0.0, 0.3]
  52. // self.previewView.layer.addSublayer(gradientLayer)
  53. //
  54. // create the capture input and the video output
  55. let cameraInput = try AVCaptureDeviceInput(device: camera)
  56.  
  57. let videoOutput = AVCaptureVideoDataOutput()
  58. videoOutput.setSampleBufferDelegate(self, queue: captureQueue)
  59. videoOutput.alwaysDiscardsLateVideoFrames = true
  60. videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]
  61. session.sessionPreset = .high
  62.  
  63. // wire up the session
  64. session.addInput(cameraInput)
  65. session.addOutput(videoOutput)
  66.  
  67. // make sure we are in portrait mode
  68. let conn = videoOutput.connection(with: .video)
  69. conn?.videoOrientation = .portrait
  70.  
  71. // Start the session
  72. session.startRunning()
  73.  
  74. // // set up the vision model
  75. // guard let resNet50Model = try? VNCoreMLModel(for: Resnet50().model) else {
  76. // fatalError("Could not load model")
  77. // }
  78. // // set up the request using our vision model
  79. // let classificationRequest = VNCoreMLRequest(model: resNet50Model, completionHandler: handleClassifications)
  80. // classificationRequest.imageCropAndScaleOption = .centerCrop
  81. // visionRequests = [classificationRequest]
  82. let request = VNDetectRectanglesRequest(
  83. completionHandler: detectRectanglesCompletionHandler
  84. )
  85. request.maximumObservations = 1
  86.  
  87. visionRequests = [request]
  88. } catch {
  89. fatalError(error.localizedDescription)
  90. }
  91.  
  92. updateThreshholdLabel()
  93. }
  94.  
  95. func updateThreshholdLabel () {
  96. self.threshholdLabel.text = "Threshold: " + String(format: "%.2f", recognitionThreshold)
  97. }
  98.  
  99. private func detectRectanglesCompletionHandler(request: VNRequest, error: Error?) {
  100. DispatchQueue.main.async { [weak self] in
  101. guard let count = request.results?.count,
  102. count > 0 else {
  103. print("No results")
  104. return
  105. }
  106.  
  107. guard let observations = request.results as? [VNRectangleObservation] else {
  108. print("No rectangles")
  109. return
  110. }
  111.  
  112. self?.targetImageView.layer.sublayers = nil
  113.  
  114. for observation in observations {
  115. self?.drawObservedRectangle(observation)
  116. }
  117. }
  118. }
  119. //MARK: Vision Methods
  120.  
  121. private func drawObservedRectangle(_ rectangle: VNRectangleObservation) {
  122. guard let targetSize = targetImageView?.frame.size else {
  123. print("targetImageView doesn't exist")
  124. return
  125. }
  126.  
  127.  
  128.  
  129. let rectangleShape = CAShapeLayer()
  130. rectangleShape.opacity = 0.5
  131. rectangleShape.lineWidth = 5
  132. rectangleShape.lineJoin = kCALineJoinRound
  133. rectangleShape.strokeColor = UIColor.blue.cgColor
  134. rectangleShape.fillColor = UIColor.blue.withAlphaComponent(0.6).cgColor
  135.  
  136. let rectanglePath = UIBezierPath()
  137. rectanglePath.move(to: rectangle.topLeft.scaled(to: targetSize))
  138. rectanglePath.addLine(to: rectangle.topRight.scaled(to: targetSize))
  139. rectanglePath.addLine(to: rectangle.bottomRight.scaled(to: targetSize))
  140. rectanglePath.addLine(to: rectangle.bottomLeft.scaled(to: targetSize))
  141. rectanglePath.close()
  142.  
  143. rectangleShape.path = rectanglePath.cgPath
  144. targetImageView?.layer.addSublayer(rectangleShape)
  145. }
  146.  
  147. override func viewDidLayoutSubviews() {
  148. super.viewDidLayoutSubviews()
  149. previewLayer.frame = self.previewView.bounds;
  150. // gradientLayer.frame = self.previewView.bounds;
  151.  
  152. let orientation: UIDeviceOrientation = UIDevice.current.orientation;
  153. switch (orientation) {
  154. case .portrait:
  155. previewLayer?.connection?.videoOrientation = .portrait
  156. case .landscapeRight:
  157. previewLayer?.connection?.videoOrientation = .landscapeLeft
  158. case .landscapeLeft:
  159. previewLayer?.connection?.videoOrientation = .landscapeRight
  160. case .portraitUpsideDown:
  161. previewLayer?.connection?.videoOrientation = .portraitUpsideDown
  162. default:
  163. previewLayer?.connection?.videoOrientation = .portrait
  164. }
  165. }
  166.  
  167. func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
  168. guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
  169. return
  170. }
  171.  
  172. connection.videoOrientation = .portrait
  173.  
  174. var requestOptions:[VNImageOption: Any] = [:]
  175.  
  176. if let cameraIntrinsicData = CMGetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_CameraIntrinsicMatrix, nil) {
  177. requestOptions = [.cameraIntrinsics: cameraIntrinsicData]
  178. }
  179.  
  180. // for orientation see kCGImagePropertyOrientation
  181. let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: .downMirrored, options: requestOptions)
  182. do {
  183. try imageRequestHandler.perform(self.visionRequests)
  184. } catch {
  185. print(error)
  186. }
  187. }
  188.  
  189. @IBAction func userTapped(sender: Any) {
  190. session.stopRunning()
  191. // self.thresholdStackView.isHidden = !self.thresholdStackView.isHidden
  192. }
  193.  
  194. @IBAction func sliderValueChanged(slider: UISlider) {
  195. self.recognitionThreshold = slider.value
  196. updateThreshholdLabel()
  197. }
  198. }
  199.  
  200.  
  201. extension CGPoint {
  202. func scaled(to size: CGSize) -> CGPoint {
  203. return CGPoint(x: self.x * size.width, y: self.y * size.height)
  204. }
  205. }
  206.  
  207. extension CGRect {
  208. func scaled(to size: CGSize) -> CGRect {
  209. return CGRect(
  210. x: self.origin.x * size.width,
  211. y: self.origin.y * size.height,
  212. width: self.size.width * size.width,
  213. height: self.size.height * size.height
  214. )
  215. }
  216. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement