diff --git a/FaceDetection.xcodeproj/project.pbxproj b/FaceDetection.xcodeproj/project.pbxproj old mode 100644 new mode 100755 index 1efdbdb..19c61d9 --- a/FaceDetection.xcodeproj/project.pbxproj +++ b/FaceDetection.xcodeproj/project.pbxproj @@ -161,14 +161,16 @@ attributes = { LastSwiftMigration = 0720; LastSwiftUpdateCheck = 0720; - LastUpgradeCheck = 0620; + LastUpgradeCheck = 0820; ORGANIZATIONNAME = "Aaron Abentheuer"; TargetAttributes = { 80C316021A46F3F800E4493F = { CreatedOnToolsVersion = 6.2; + ProvisioningStyle = Manual; }; 80C316171A46F3F800E4493F = { CreatedOnToolsVersion = 6.2; + DevelopmentTeam = EMZ8C4N537; TestTargetID = 80C316021A46F3F800E4493F; }; }; @@ -274,15 +276,19 @@ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_SUSPICIOUS_MOVE = YES; CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = NO; ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; GCC_C_LANGUAGE_STANDARD = gnu99; GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; GCC_OPTIMIZATION_LEVEL = 0; GCC_PREPROCESSOR_DEFINITIONS = ( "DEBUG=1", @@ -300,6 +306,7 @@ ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + SWIFT_VERSION = 3.0; TARGETED_DEVICE_FAMILY = "1,2"; }; name = Debug; @@ -317,8 +324,10 @@ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_SUSPICIOUS_MOVE = YES; CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; @@ -326,6 +335,7 @@ ENABLE_NS_ASSERTIONS = NO; ENABLE_STRICT_OBJC_MSGSEND = YES; GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_NO_COMMON_BLOCKS = YES; GCC_WARN_64_TO_32_BIT_CONVERSION = YES; GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; GCC_WARN_UNDECLARED_SELECTOR = YES; @@ -335,6 +345,8 @@ IPHONEOS_DEPLOYMENT_TARGET = 8.2; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; + SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; + SWIFT_VERSION = 3.0; TARGETED_DEVICE_FAMILY = "1,2"; VALIDATE_PRODUCT = YES; }; @@ -344,8 +356,11 @@ isa = XCBuildConfiguration; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; + DEVELOPMENT_TEAM = ""; INFOPLIST_FILE = FaceDetection/Info.plist; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + PRODUCT_BUNDLE_IDENTIFIER = com.camera.detection; PRODUCT_NAME = "$(TARGET_NAME)"; TARGETED_DEVICE_FAMILY = 1; }; @@ -355,8 +370,11 @@ isa = XCBuildConfiguration; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; + DEVELOPMENT_TEAM = ""; INFOPLIST_FILE = FaceDetection/Info.plist; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + PRODUCT_BUNDLE_IDENTIFIER = com.camera.detection; PRODUCT_NAME = "$(TARGET_NAME)"; TARGETED_DEVICE_FAMILY = 1; }; @@ -366,6 +384,7 @@ isa = XCBuildConfiguration; buildSettings = { BUNDLE_LOADER = "$(TEST_HOST)"; + DEVELOPMENT_TEAM = EMZ8C4N537; FRAMEWORK_SEARCH_PATHS = ( "$(SDKROOT)/Developer/Library/Frameworks", "$(inherited)", @@ -376,6 +395,7 @@ ); INFOPLIST_FILE = FaceDetectionTests/Info.plist; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; + PRODUCT_BUNDLE_IDENTIFIER = "aaab.$(PRODUCT_NAME:rfc1034identifier)"; PRODUCT_NAME = "$(TARGET_NAME)"; TEST_HOST = "$(BUILT_PRODUCTS_DIR)/FaceDetection.app/FaceDetection"; }; @@ -385,12 +405,14 @@ isa = XCBuildConfiguration; buildSettings = { BUNDLE_LOADER = "$(TEST_HOST)"; + DEVELOPMENT_TEAM = EMZ8C4N537; FRAMEWORK_SEARCH_PATHS = ( "$(SDKROOT)/Developer/Library/Frameworks", "$(inherited)", ); INFOPLIST_FILE = FaceDetectionTests/Info.plist; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; + PRODUCT_BUNDLE_IDENTIFIER = "aaab.$(PRODUCT_NAME:rfc1034identifier)"; PRODUCT_NAME = "$(TARGET_NAME)"; TEST_HOST = "$(BUILT_PRODUCTS_DIR)/FaceDetection.app/FaceDetection"; }; diff --git a/FaceDetection.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/FaceDetection.xcodeproj/project.xcworkspace/contents.xcworkspacedata old mode 100644 new mode 100755 diff --git a/FaceDetection.xcodeproj/project.xcworkspace/xcshareddata/FaceDetection.xccheckout b/FaceDetection.xcodeproj/project.xcworkspace/xcshareddata/FaceDetection.xccheckout old mode 100644 new mode 100755 diff --git a/FaceDetection.xcodeproj/project.xcworkspace/xcshareddata/FaceDetection.xcscmblueprint b/FaceDetection.xcodeproj/project.xcworkspace/xcshareddata/FaceDetection.xcscmblueprint old mode 100644 new mode 100755 index 00b7523..7ebd7c0 --- a/FaceDetection.xcodeproj/project.xcworkspace/xcshareddata/FaceDetection.xcscmblueprint +++ b/FaceDetection.xcodeproj/project.xcworkspace/xcshareddata/FaceDetection.xcscmblueprint @@ -9,7 +9,7 @@ }, "DVTSourceControlWorkspaceBlueprintIdentifierKey" : "1767FBA5-AB45-4A53-A8D7-767E6A8692B3", "DVTSourceControlWorkspaceBlueprintWorkingCopyPathsKey" : { - "40D912770F74B782D271F42DA80123F1A9C2226A" : "AAFaceDetection", + "40D912770F74B782D271F42DA80123F1A9C2226A" : "face\/", "0B0B95484AD488B8C39195A6DAF9FB3BE1F8A511" : "Visage" }, "DVTSourceControlWorkspaceBlueprintNameKey" : "FaceDetection", diff --git a/FaceDetection.xcodeproj/project.xcworkspace/xcuserdata/aaab.xcuserdatad/UserInterfaceState.xcuserstate b/FaceDetection.xcodeproj/project.xcworkspace/xcuserdata/aaab.xcuserdatad/UserInterfaceState.xcuserstate old mode 100644 new mode 100755 diff --git a/FaceDetection.xcodeproj/project.xcworkspace/xcuserdata/zaid.pathan.xcuserdatad/UserInterfaceState.xcuserstate b/FaceDetection.xcodeproj/project.xcworkspace/xcuserdata/zaid.pathan.xcuserdatad/UserInterfaceState.xcuserstate old mode 100644 new mode 100755 diff --git a/FaceDetection.xcodeproj/xcuserdata/aaab.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist b/FaceDetection.xcodeproj/xcuserdata/aaab.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist old mode 100644 new mode 100755 diff --git a/FaceDetection.xcodeproj/xcuserdata/aaab.xcuserdatad/xcschemes/FaceDetection.xcscheme b/FaceDetection.xcodeproj/xcuserdata/aaab.xcuserdatad/xcschemes/FaceDetection.xcscheme old mode 100644 new mode 100755 diff --git a/FaceDetection.xcodeproj/xcuserdata/aaab.xcuserdatad/xcschemes/xcschememanagement.plist b/FaceDetection.xcodeproj/xcuserdata/aaab.xcuserdatad/xcschemes/xcschememanagement.plist old mode 100644 new mode 100755 diff --git a/FaceDetection.xcodeproj/xcuserdata/diazj.xcuserdatad/xcschemes/FaceDetection.xcscheme b/FaceDetection.xcodeproj/xcuserdata/diazj.xcuserdatad/xcschemes/FaceDetection.xcscheme new file mode 100644 index 0000000..1527162 --- /dev/null +++ b/FaceDetection.xcodeproj/xcuserdata/diazj.xcuserdatad/xcschemes/FaceDetection.xcscheme @@ -0,0 +1,101 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/FaceDetection.xcodeproj/xcuserdata/zaid.pathan.xcuserdatad/xcschemes/FaceDetection.xcscheme b/FaceDetection.xcodeproj/xcuserdata/zaid.pathan.xcuserdatad/xcschemes/FaceDetection.xcscheme old mode 100644 new mode 100755 diff --git a/FaceDetection.xcodeproj/xcuserdata/zaid.pathan.xcuserdatad/xcschemes/xcschememanagement.plist b/FaceDetection.xcodeproj/xcuserdata/zaid.pathan.xcuserdatad/xcschemes/xcschememanagement.plist old mode 100644 new mode 100755 diff --git a/FaceDetection/AppDelegate.swift b/FaceDetection/AppDelegate.swift old mode 100644 new mode 100755 index ddbfb16..545b72b --- a/FaceDetection/AppDelegate.swift +++ b/FaceDetection/AppDelegate.swift @@ -16,7 +16,7 @@ class AppDelegate: UIResponder, UIApplicationDelegate { var activeCornerRadius : CGFloat = 0 var incativeCornerRadius : CGFloat = 0 - func application(application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject: AnyObject]?) -> Bool { + private func application(application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject: AnyObject]?) -> Bool { window!.clipsToBounds = true let animation : CABasicAnimation = CABasicAnimation(keyPath: "cornerRadius") @@ -25,29 +25,29 @@ class AppDelegate: UIResponder, UIApplicationDelegate { animation.toValue = activeCornerRadius animation.duration = 0.15 window!.layer.cornerRadius = activeCornerRadius - window!.layer.addAnimation(animation, forKey: "cornerRadius") + window!.layer.add(animation, forKey: "cornerRadius") return true } - func applicationWillResignActive(application: UIApplication) { + func applicationWillResignActive(_ application: UIApplication) { let animation : CABasicAnimation = CABasicAnimation(keyPath: "cornerRadius") animation.timingFunction = CAMediaTimingFunction(name: kCAMediaTimingFunctionEaseOut) animation.fromValue = activeCornerRadius animation.toValue = incativeCornerRadius animation.duration = 0.15 window!.layer.cornerRadius = incativeCornerRadius - window!.layer.addAnimation(animation, forKey: "cornerRadius") + window!.layer.add(animation, forKey: "cornerRadius") } - func applicationDidBecomeActive(application: UIApplication) { + func applicationDidBecomeActive(_ application: UIApplication) { let animation : CABasicAnimation = CABasicAnimation(keyPath: "cornerRadius") animation.timingFunction = CAMediaTimingFunction(name: kCAMediaTimingFunctionEaseOut) animation.fromValue = incativeCornerRadius animation.toValue = activeCornerRadius animation.duration = 0.15 window!.layer.cornerRadius = activeCornerRadius - window!.layer.addAnimation(animation, forKey: "cornerRadius") + window!.layer.add(animation, forKey: "cornerRadius") } } diff --git a/FaceDetection/Base.lproj/LaunchScreen.xib b/FaceDetection/Base.lproj/LaunchScreen.xib old mode 100644 new mode 100755 diff --git a/FaceDetection/Base.lproj/Main.storyboard b/FaceDetection/Base.lproj/Main.storyboard old mode 100644 new mode 100755 diff --git a/FaceDetection/Images.xcassets/AppIcon.appiconset/Contents.json b/FaceDetection/Images.xcassets/AppIcon.appiconset/Contents.json old mode 100644 new mode 100755 diff --git a/FaceDetection/Info.plist b/FaceDetection/Info.plist old mode 100644 new mode 100755 index 93124d8..40985d0 --- a/FaceDetection/Info.plist +++ b/FaceDetection/Info.plist @@ -7,7 +7,7 @@ CFBundleExecutable $(EXECUTABLE_NAME) CFBundleIdentifier - aaab.$(PRODUCT_NAME:rfc1034identifier) + $(PRODUCT_BUNDLE_IDENTIFIER) CFBundleInfoDictionaryVersion 6.0 CFBundleName @@ -20,8 +20,12 @@ ???? CFBundleVersion 1 + LSApplicationCategoryType + LSRequiresIPhoneOS + NSCameraUsageDescription + This app needs to access your device's camera UILaunchStoryboardName LaunchScreen UIMainStoryboardFile @@ -34,10 +38,6 @@ UIInterfaceOrientationPortrait - LSApplicationCategoryType - - UIViewControllerBasedStatusBarAppearance - UISupportedInterfaceOrientations~ipad UIInterfaceOrientationPortrait @@ -45,5 +45,7 @@ UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight + UIViewControllerBasedStatusBarAppearance + diff --git a/FaceDetection/ViewController.swift b/FaceDetection/ViewController.swift old mode 100644 new mode 100755 index fd3d05a..280b2ee --- a/FaceDetection/ViewController.swift +++ b/FaceDetection/ViewController.swift @@ -7,13 +7,14 @@ // import UIKit +import NotificationCenter class ViewController: UIViewController { private var visage : Visage? - private let notificationCenter : NSNotificationCenter = NSNotificationCenter.defaultCenter() + private let notificationCenter : NotificationCenter = NotificationCenter.default - let emojiLabel : UILabel = UILabel(frame: UIScreen.mainScreen().bounds) + let emojiLabel : UILabel = UILabel(frame: UIScreen.main.bounds) override func viewDidLoad() { super.viewDidLoad() @@ -32,20 +33,29 @@ class ViewController: UIViewController { let cameraView = visage!.visageCameraView self.view.addSubview(cameraView) - let visualEffectView = UIVisualEffectView(effect: UIBlurEffect(style: .Light)) + let visualEffectView = UIVisualEffectView(effect: UIBlurEffect(style: .light)) visualEffectView.frame = self.view.bounds self.view.addSubview(visualEffectView) - + emojiLabel.text = "😐" - emojiLabel.font = UIFont.systemFontOfSize(50) - emojiLabel.textAlignment = .Center + emojiLabel.font = UIFont.systemFont(ofSize: 50) + emojiLabel.textAlignment = .center self.view.addSubview(emojiLabel) //Subscribing to the "visageFaceDetectedNotification" (for a list of all available notifications check out the "ReadMe" or switch to "Visage.swift") and reacting to it with a completionHandler. You can also use the other .addObserver-Methods to react to notifications. - NSNotificationCenter.defaultCenter().addObserverForName("visageFaceDetectedNotification", object: nil, queue: NSOperationQueue.mainQueue(), usingBlock: { notification in + + let center = NotificationCenter.default + let mainQueue = OperationQueue.main + + + + + + center.addObserver(forName: Notification.Name(rawValue: "visageFaceDetectedNotification"), object: nil, queue: mainQueue) { (notification) in - UIView.animateWithDuration(0.5, animations: { + UIView.animate(withDuration: 0.5, animations: { self.emojiLabel.alpha = 1 + }) if ((self.visage!.hasSmile == true && self.visage!.isWinking == true)) { @@ -57,18 +67,17 @@ class ViewController: UIViewController { } else { self.emojiLabel.text = "😐" } - }) - - //The same thing for the opposite, when no face is detected things are reset. - NSNotificationCenter.defaultCenter().addObserverForName("visageNoFaceDetectedNotification", object: nil, queue: NSOperationQueue.mainQueue(), usingBlock: { notification in - - UIView.animateWithDuration(0.5, animations: { + } +// +// //The same thing for the opposite, when no face is detected things are reset. + center.addObserver(forName: Notification.Name(rawValue: "visageFaceDetectedNotification"), object: nil, queue: mainQueue) { (notification) in + UIView.animate(withDuration: 0.5, animations: { self.emojiLabel.alpha = 0.25 }) - }) + } } - override func prefersStatusBarHidden() -> Bool { + override var prefersStatusBarHidden: Bool { return true } -} \ No newline at end of file +} diff --git a/FaceDetection/Visage.swift b/FaceDetection/Visage.swift old mode 100644 new mode 100755 index a70407e..ff44d5f --- a/FaceDetection/Visage.swift +++ b/FaceDetection/Visage.swift @@ -42,43 +42,43 @@ class Visage: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { private(set) var rightEyeClosed : Bool? //Notifications you can subscribe to for reacting to changes in the detected properties. - private let visageNoFaceDetectedNotification = NSNotification(name: "visageNoFaceDetectedNotification", object: nil) - private let visageFaceDetectedNotification = NSNotification(name: "visageFaceDetectedNotification", object: nil) - private let visageSmilingNotification = NSNotification(name: "visageHasSmileNotification", object: nil) - private let visageNotSmilingNotification = NSNotification(name: "visageHasNoSmileNotification", object: nil) - private let visageBlinkingNotification = NSNotification(name: "visageBlinkingNotification", object: nil) - private let visageNotBlinkingNotification = NSNotification(name: "visageNotBlinkingNotification", object: nil) - private let visageWinkingNotification = NSNotification(name: "visageWinkingNotification", object: nil) - private let visageNotWinkingNotification = NSNotification(name: "visageNotWinkingNotification", object: nil) - private let visageLeftEyeClosedNotification = NSNotification(name: "visageLeftEyeClosedNotification", object: nil) - private let visageLeftEyeOpenNotification = NSNotification(name: "visageLeftEyeOpenNotification", object: nil) - private let visageRightEyeClosedNotification = NSNotification(name: "visageRightEyeClosedNotification", object: nil) - private let visageRightEyeOpenNotification = NSNotification(name: "visageRightEyeOpenNotification", object: nil) + private let visageNoFaceDetectedNotification = NSNotification(name: NSNotification.Name(rawValue: "visageNoFaceDetectedNotification"), object: nil) + private let visageFaceDetectedNotification = NSNotification(name: NSNotification.Name(rawValue: "visageFaceDetectedNotification"), object: nil) + private let visageSmilingNotification = NSNotification(name: NSNotification.Name(rawValue: "visageHasSmileNotification"), object: nil) + private let visageNotSmilingNotification = NSNotification(name: NSNotification.Name(rawValue: "visageHasNoSmileNotification"), object: nil) + private let visageBlinkingNotification = NSNotification(name: NSNotification.Name(rawValue: "visageBlinkingNotification"), object: nil) + private let visageNotBlinkingNotification = NSNotification(name: NSNotification.Name(rawValue: "visageNotBlinkingNotification"), object: nil) + private let visageWinkingNotification = NSNotification(name: NSNotification.Name(rawValue: "visageWinkingNotification"), object: nil) + private let visageNotWinkingNotification = NSNotification(name: NSNotification.Name(rawValue: "visageNotWinkingNotification"), object: nil) + private let visageLeftEyeClosedNotification = NSNotification(name: NSNotification.Name(rawValue: "visageLeftEyeClosedNotification"), object: nil) + private let visageLeftEyeOpenNotification = NSNotification(name: NSNotification.Name(rawValue: "visageLeftEyeOpenNotification"), object: nil) + private let visageRightEyeClosedNotification = NSNotification(name: NSNotification.Name(rawValue: "visageRightEyeClosedNotification"), object: nil) + private let visageRightEyeOpenNotification = NSNotification(name: NSNotification.Name(rawValue: "visageRightEyeOpenNotification"), object: nil) //Private variables that cannot be accessed by other classes in any way. private var faceDetector : CIDetector? private var videoDataOutput : AVCaptureVideoDataOutput? - private var videoDataOutputQueue : dispatch_queue_t? + private var videoDataOutputQueue : DispatchQueue? private var cameraPreviewLayer : AVCaptureVideoPreviewLayer? private var captureSession : AVCaptureSession = AVCaptureSession() - private let notificationCenter : NSNotificationCenter = NSNotificationCenter.defaultCenter() + private let notificationCenter : NotificationCenter = NotificationCenter.default private var currentOrientation : Int? init(cameraPosition : CameraDevice, optimizeFor : DetectorAccuracy) { super.init() - currentOrientation = convertOrientation(UIDevice.currentDevice().orientation) + currentOrientation = convertOrientation(deviceOrientation: UIDevice.current.orientation) switch cameraPosition { - case .FaceTimeCamera : self.captureSetup(AVCaptureDevicePosition.Front) - case .ISightCamera : self.captureSetup(AVCaptureDevicePosition.Back) + case .FaceTimeCamera : self.captureSetup(position: AVCaptureDevicePosition.front) + case .ISightCamera : self.captureSetup(position: AVCaptureDevicePosition.back) } var faceDetectorOptions : [String : AnyObject]? switch optimizeFor { - case .BatterySaving : faceDetectorOptions = [CIDetectorAccuracy : CIDetectorAccuracyLow] - case .HigherPerformance : faceDetectorOptions = [CIDetectorAccuracy : CIDetectorAccuracyHigh] + case .BatterySaving : faceDetectorOptions = [CIDetectorAccuracy : CIDetectorAccuracyLow as AnyObject] + case .HigherPerformance : faceDetectorOptions = [CIDetectorAccuracy : CIDetectorAccuracyHigh as AnyObject] } self.faceDetector = CIDetector(ofType: CIDetectorTypeFace, context: nil, options: faceDetectorOptions) @@ -97,14 +97,14 @@ class Visage: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { var captureError : NSError? var captureDevice : AVCaptureDevice! - for testedDevice in AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo){ - if (testedDevice.position == position) { + for testedDevice in AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo){ + if ((testedDevice as AnyObject).position == position) { captureDevice = testedDevice as! AVCaptureDevice } } if (captureDevice == nil) { - captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) + captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo) } var deviceInput : AVCaptureDeviceInput? @@ -122,9 +122,9 @@ class Visage: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { } self.videoDataOutput = AVCaptureVideoDataOutput() - self.videoDataOutput!.videoSettings = [kCVPixelBufferPixelFormatTypeKey: Int(kCVPixelFormatType_32BGRA)] + self.videoDataOutput!.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable: Int(kCVPixelFormatType_32BGRA)] self.videoDataOutput!.alwaysDiscardsLateVideoFrames = true - self.videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL) + self.videoDataOutputQueue = DispatchQueue(label:"VideoDataOutputQueue") self.videoDataOutput!.setSampleBufferDelegate(self, queue: self.videoDataOutputQueue!) if (captureSession.canAddOutput(self.videoDataOutput)) { @@ -132,35 +132,35 @@ class Visage: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { } } - visageCameraView.frame = UIScreen.mainScreen().bounds - + visageCameraView.frame = UIScreen.main.bounds + let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) - previewLayer.frame = UIScreen.mainScreen().bounds - previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill - visageCameraView.layer.addSublayer(previewLayer) + previewLayer?.frame = UIScreen.main.bounds + previewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill + visageCameraView.layer.addSublayer(previewLayer!) } var options : [String : AnyObject]? //MARK: CAPTURE-OUTPUT/ANALYSIS OF FACIAL-FEATURES - func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { + func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) { let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) let opaqueBuffer = Unmanaged.passUnretained(imageBuffer!).toOpaque() let pixelBuffer = Unmanaged.fromOpaque(opaqueBuffer).takeUnretainedValue() - let sourceImage = CIImage(CVPixelBuffer: pixelBuffer, options: nil) - options = [CIDetectorSmile : true, CIDetectorEyeBlink: true, CIDetectorImageOrientation : 6] + let sourceImage = CIImage(cvPixelBuffer: pixelBuffer, options: nil) + options = [CIDetectorSmile : true as AnyObject, CIDetectorEyeBlink: true as AnyObject, CIDetectorImageOrientation : 6 as AnyObject] - let features = self.faceDetector!.featuresInImage(sourceImage, options: options) + let features = self.faceDetector!.features(in: sourceImage, options: options) if (features.count != 0) { if (onlyFireNotificatonOnStatusChange == true) { if (self.faceDetected == false) { - notificationCenter.postNotification(visageFaceDetectedNotification) + notificationCenter.post(visageFaceDetectedNotification as Notification) } } else { - notificationCenter.postNotification(visageFaceDetectedNotification) + notificationCenter.post(visageFaceDetectedNotification as Notification) } self.faceDetected = true @@ -194,10 +194,10 @@ class Visage: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { if (feature.hasSmile) { if (onlyFireNotificatonOnStatusChange == true) { if (self.hasSmile == false) { - notificationCenter.postNotification(visageSmilingNotification) + notificationCenter.post(visageSmilingNotification as Notification) } } else { - notificationCenter.postNotification(visageSmilingNotification) + notificationCenter.post(visageSmilingNotification as Notification) } hasSmile = feature.hasSmile @@ -205,10 +205,10 @@ class Visage: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { } else { if (onlyFireNotificatonOnStatusChange == true) { if (self.hasSmile == true) { - notificationCenter.postNotification(visageNotSmilingNotification) + notificationCenter.post(visageNotSmilingNotification as Notification) } } else { - notificationCenter.postNotification(visageNotSmilingNotification) + notificationCenter.post(visageNotSmilingNotification as Notification) } hasSmile = feature.hasSmile @@ -217,10 +217,10 @@ class Visage: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { if (feature.leftEyeClosed || feature.rightEyeClosed) { if (onlyFireNotificatonOnStatusChange == true) { if (self.isWinking == false) { - notificationCenter.postNotification(visageWinkingNotification) + notificationCenter.post(visageWinkingNotification as Notification) } } else { - notificationCenter.postNotification(visageWinkingNotification) + notificationCenter.post(visageWinkingNotification as Notification) } isWinking = true @@ -228,10 +228,10 @@ class Visage: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { if (feature.leftEyeClosed) { if (onlyFireNotificatonOnStatusChange == true) { if (self.leftEyeClosed == false) { - notificationCenter.postNotification(visageLeftEyeClosedNotification) + notificationCenter.post(visageLeftEyeClosedNotification as Notification) } } else { - notificationCenter.postNotification(visageLeftEyeClosedNotification) + notificationCenter.post(visageLeftEyeClosedNotification as Notification) } leftEyeClosed = feature.leftEyeClosed @@ -239,10 +239,10 @@ class Visage: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { if (feature.rightEyeClosed) { if (onlyFireNotificatonOnStatusChange == true) { if (self.rightEyeClosed == false) { - notificationCenter.postNotification(visageRightEyeClosedNotification) + notificationCenter.post(visageRightEyeClosedNotification as Notification) } } else { - notificationCenter.postNotification(visageRightEyeClosedNotification) + notificationCenter.post(visageRightEyeClosedNotification as Notification) } rightEyeClosed = feature.rightEyeClosed @@ -250,10 +250,10 @@ class Visage: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { if (feature.leftEyeClosed && feature.rightEyeClosed) { if (onlyFireNotificatonOnStatusChange == true) { if (self.isBlinking == false) { - notificationCenter.postNotification(visageBlinkingNotification) + notificationCenter.post(visageBlinkingNotification as Notification) } } else { - notificationCenter.postNotification(visageBlinkingNotification) + notificationCenter.post(visageBlinkingNotification as Notification) } isBlinking = true @@ -262,22 +262,22 @@ class Visage: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { if (onlyFireNotificatonOnStatusChange == true) { if (self.isBlinking == true) { - notificationCenter.postNotification(visageNotBlinkingNotification) + notificationCenter.post(visageNotBlinkingNotification as Notification) } if (self.isWinking == true) { - notificationCenter.postNotification(visageNotWinkingNotification) + notificationCenter.post(visageNotWinkingNotification as Notification) } if (self.leftEyeClosed == true) { - notificationCenter.postNotification(visageLeftEyeOpenNotification) + notificationCenter.post(visageLeftEyeOpenNotification as Notification) } if (self.rightEyeClosed == true) { - notificationCenter.postNotification(visageRightEyeOpenNotification) + notificationCenter.post(visageRightEyeOpenNotification as Notification) } } else { - notificationCenter.postNotification(visageNotBlinkingNotification) - notificationCenter.postNotification(visageNotWinkingNotification) - notificationCenter.postNotification(visageLeftEyeOpenNotification) - notificationCenter.postNotification(visageRightEyeOpenNotification) + notificationCenter.post(visageNotBlinkingNotification as Notification) + notificationCenter.post(visageNotWinkingNotification as Notification) + notificationCenter.post(visageLeftEyeOpenNotification as Notification) + notificationCenter.post(visageRightEyeOpenNotification as Notification) } isBlinking = false @@ -289,10 +289,10 @@ class Visage: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { } else { if (onlyFireNotificatonOnStatusChange == true) { if (self.faceDetected == true) { - notificationCenter.postNotification(visageNoFaceDetectedNotification) + notificationCenter.post(visageNoFaceDetectedNotification as Notification) } } else { - notificationCenter.postNotification(visageNoFaceDetectedNotification) + notificationCenter.post(visageNoFaceDetectedNotification as Notification) } self.faceDetected = false @@ -303,16 +303,16 @@ class Visage: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { private func convertOrientation(deviceOrientation: UIDeviceOrientation) -> Int { var orientation: Int = 0 switch deviceOrientation { - case .Portrait: + case .portrait: orientation = 6 - case .PortraitUpsideDown: + case .portraitUpsideDown: orientation = 2 - case .LandscapeLeft: + case .landscapeLeft: orientation = 3 - case .LandscapeRight: + case .landscapeRight: orientation = 4 default : orientation = 1 } - return 6 + return orientation } -} \ No newline at end of file +} diff --git a/FaceDetectionTests/FaceDetectionTests.swift b/FaceDetectionTests/FaceDetectionTests.swift old mode 100644 new mode 100755 diff --git a/FaceDetectionTests/Info.plist b/FaceDetectionTests/Info.plist old mode 100644 new mode 100755 index b3ed670..513b389 --- a/FaceDetectionTests/Info.plist +++ b/FaceDetectionTests/Info.plist @@ -7,7 +7,7 @@ CFBundleExecutable $(EXECUTABLE_NAME) CFBundleIdentifier - aaab.$(PRODUCT_NAME:rfc1034identifier) + $(PRODUCT_BUNDLE_IDENTIFIER) CFBundleInfoDictionaryVersion 6.0 CFBundleName @@ -20,5 +20,7 @@ ???? CFBundleVersion 1 + NSCameraUsageDescription + This app needs to access your device's camera diff --git a/README.md b/README.md old mode 100644 new mode 100755 diff --git a/screencast.gif b/screencast.gif old mode 100644 new mode 100755