-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathViewController.swift
More file actions
123 lines (82 loc) · 4.08 KB
/
ViewController.swift
File metadata and controls
123 lines (82 loc) · 4.08 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
//
// ViewController.swift
// BlindSide
//
// Created by Martin Gamboa on 6/3/18.
// Copyright © 2018 RenatoGamboa. All rights reserved.
//
import UIKit
import AVKit
import Vision
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
var current: String?
@IBOutlet weak var label: UILabel!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
// Assign AVCaptureSession
let captureSession = AVCaptureSession()
// Set default camera session to cropped photo style
captureSession.sessionPreset = .photo
// Initiate Capture device to default or rear camera
guard let captureDevice = AVCaptureDevice.default(for: .video) else { return }
// try to use rear camera as the input capture device
guard let input = try? AVCaptureDeviceInput(device: captureDevice) else { return }
// Use the rear camera for the capture session
captureSession.addInput(input)
// Start Running AVCaptureSession
captureSession.startRunning()
// Initiate preview layer as camera capture session
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
// Add sublayer previewLater to view
view.layer.addSublayer(previewLayer)
// Set frame of the preview later to the same the the view
previewLayer.frame = view.frame
// Define Data Output
let dataOutput = AVCaptureVideoDataOutput()
// Connect to Output Delegate
dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue"))
// Add output to captureSession
captureSession.addOutput(dataOutput)
//VNImageRequestHandler(cgImage: <#T##CGImage#>, options: [:]).perform()
// Code for label
label?.sizeToFit()
label?.adjustsFontSizeToFitWidth = true
label?.numberOfLines = 2
label?.textAlignment = .center
label?.text = current
label?.textColor = UIColor.red
self.view.addSubview(label!)
}
// This function gives us access to what the camera is seeing
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// Test Print
//print("Camera was able to capture a frame:", Date())
//Declare pixel buffer
guard let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
// Declare ml Model
guard let model = try? VNCoreMLModel(for: Resnet50().model) else { return }
// Declare request with model
let request = VNCoreMLRequest(model: model) { (finishedReq, err) in
// check for err if occured
//print(finishedReq.results)
// Declare results from capture session
guard let results = finishedReq.results as? [VNClassificationObservation] else { return }
// assign the first result from the results array
guard let firstObservation = results.first else { return }
// Test print for first observation with percentage confidence level
print(firstObservation.identifier, firstObservation.confidence)
var percent = round(firstObservation.confidence * 10000)/100
// Change UILabel text
DispatchQueue.main.async {
self.label?.text = "\(firstObservation.identifier) \n %\(percent)"
}
}
// Try VNImageRequest
try? VNImageRequestHandler(cvPixelBuffer: pixelBuffer, options: [:]).perform([request])
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}