diff --git a/README.md b/README.md index 7be4cfe..b4f149e 100644 --- a/README.md +++ b/README.md @@ -1,15 +1,20 @@ -# Bluetoothed ARKit 2.0 with ARWorldMap! +# ARKit 6.0 -After Apple’s introduction of ARKit 2, we have been consistently working behind to create shared-AR experiences. Our goal is to improve the utility of mobile using AR experiences. +After Apple’s introduction of ARKit, we have been consistently working behind to create various AR experiences levraging the power of ARKit, RealityKit & SceneKit. Our goal is to improve the utility of mobile using AR experiences. -This demo created using ARKit 2: +This demo created using ARKit: * Creates Geo-localized AR experiences on ARWorldMap * Detects objects and images * Mark specific objects and create 3D renders in point cloud -* Share information locally over BLE (Bluetooth low energy) +* Share information locally over maintaining the ARWorld Session +* Detecting user's sitting posture and providng info on it +* Detecting user's standing posture along with angles ## Features in this demo: * Image tracking +* Face tracking +* Sitting posture tracking +* Standing posture tracking * Save and load maps * Detect objects * Environmental texturing @@ -17,9 +22,24 @@ This demo created using ARKit 2: ### Prerequisites Before we dive into the implementation details let’s take a look at the prerequisites of the course. -* Xcode 10 (beta or above) -* iOS 12 (beta or above) -* Physical iPhone 6S or above +* Latest Xcode +* Latest iOS version +* Physical iPhone device (Devices above X series is recommended for performance) + +### Face Traking and loading live 3d content +Tracking and visualizing faces is a key feature to track user's face along with their expressions and simultaneuosly mimic the same user's expression using a 3D model, also there are many possible use cases of tracking face by honing the capability of ARKit + +Here, in this tutiorial we have added some of the basic functionality of tracking face along with mimicking user's facial expression + +### Body Tracking with angles detection +Body tracking is an essential feature of ARKit enabling to track a person in the physical environment and visualize their motion by applying the same body movements to a virtual character. +Alongside this we can also create our own model to mimic user's movemnets or also can use the "biped_robot" provided by Apple itself + +In this demo we will detect 2 types of posture +1. Sitting posture +In this demo we detects the angle between the knee and spine joints as this demo is of sitting posture it mainly focus on user's sitting posture, According to certain reports when user sit's, there is certain pressure applied to spine joints so according to detected posture when user's sit with a reliable support with almost more than 90 degree angles and above which scientifically applies less pressure on spine joints the demo updates itself with the green shape and turns red if vice-versa +2. Standing posture +This demo is all about standing and detecting user's movement along with angles, In this demo i have created a skeleton using cylinder(Bones) and sphere(joints) which will mimic users movement also i have placed angle calculations at the joints based on calculation of 2 nearby joints. This usecase serves various purpose of body tracking and can be useful for exercise related applicaitons. ### Image recognition and tracking “A photo is like a thousands words” - words are fine, but, ARKit-2 turns a photo into thousands of stories. diff --git a/iOS12_Sampler/ios12 Sampler.xcodeproj/project.pbxproj b/iOS12_Sampler/ios12 Sampler.xcodeproj/project.pbxproj index 3309020..2d5c42b 100644 --- a/iOS12_Sampler/ios12 Sampler.xcodeproj/project.pbxproj +++ b/iOS12_Sampler/ios12 Sampler.xcodeproj/project.pbxproj @@ -7,19 +7,47 @@ objects = { /* Begin PBXBuildFile section */ - 2E04C9072B4EE2F3000B4936 /* ARImageLocator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E04C9062B4EE2F3000B4936 /* ARImageLocator.swift */; }; 2E04C9092B503318000B4936 /* StatusViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E04C9082B503318000B4936 /* StatusViewController.swift */; }; - 2E04C90B2B503A6D000B4936 /* ARImageLocator+ARSessionDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E04C90A2B503A6D000B4936 /* ARImageLocator+ARSessionDelegate.swift */; }; 2E15A5982B32F720001EA792 /* RectangleDetector.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E15A5972B32F720001EA792 /* RectangleDetector.swift */; }; 2E15A59A2B32FE85001EA792 /* ARImageDetectorVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E15A5992B32FE85001EA792 /* ARImageDetectorVC.swift */; }; + 2E18E1462BB2A70900D4C1E3 /* ARPostureDetection+ARSessionDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E18E1452BB2A70900D4C1E3 /* ARPostureDetection+ARSessionDelegate.swift */; }; + 2E18E14B2BB2A94F00D4C1E3 /* Bones.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E18E1492BB2A94F00D4C1E3 /* Bones.swift */; }; + 2E18E14C2BB2A94F00D4C1E3 /* JointAngles.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E18E14A2BB2A94F00D4C1E3 /* JointAngles.swift */; }; + 2E18E14F2BB2AAEE00D4C1E3 /* SkeletonJoint.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E18E14D2BB2AAEE00D4C1E3 /* SkeletonJoint.swift */; }; + 2E18E1502BB2AAEE00D4C1E3 /* SkeletonBone.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E18E14E2BB2AAEE00D4C1E3 /* SkeletonBone.swift */; }; + 2E18E1552BB2AC9300D4C1E3 /* BodySkeleton.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E18E1522BB2AC9300D4C1E3 /* BodySkeleton.swift */; }; + 2E18E1562BB2AC9300D4C1E3 /* BodySkeleton+AngleDetection.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E18E1532BB2AC9300D4C1E3 /* BodySkeleton+AngleDetection.swift */; }; + 2E18E1572BB2AC9300D4C1E3 /* BodySkeleton+Entity.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E18E1542BB2AC9300D4C1E3 /* BodySkeleton+Entity.swift */; }; + 2E18E1592BB2AD2800D4C1E3 /* String+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E18E1582BB2AD2800D4C1E3 /* String+Extension.swift */; }; + 2E18E15B2BB2BB1500D4C1E3 /* StandingPostureVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E18E15A2BB2BB1500D4C1E3 /* StandingPostureVC.swift */; }; + 2E2C35C42BE215F100200E7E /* ARFaceDetection.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E2C35C32BE215F100200E7E /* ARFaceDetection.swift */; }; + 2E2C35C62BE216FC00200E7E /* ModelCollectionCell.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E2C35C52BE216FC00200E7E /* ModelCollectionCell.swift */; }; + 2E41DFD12BE2206F0012F773 /* Heart.usdz in Resources */ = {isa = PBXBuildFile; fileRef = 2E41DFCA2BE2206F0012F773 /* Heart.usdz */; }; + 2E41DFD22BE2206F0012F773 /* Glasses.usdz in Resources */ = {isa = PBXBuildFile; fileRef = 2E41DFCB2BE2206F0012F773 /* Glasses.usdz */; }; + 2E41DFD32BE2206F0012F773 /* Cyclops.usdz in Resources */ = {isa = PBXBuildFile; fileRef = 2E41DFCC2BE2206F0012F773 /* Cyclops.usdz */; }; + 2E41DFD42BE2206F0012F773 /* Neon.usdz in Resources */ = {isa = PBXBuildFile; fileRef = 2E41DFCD2BE2206F0012F773 /* Neon.usdz */; }; + 2E41DFD52BE2206F0012F773 /* Star.usdz in Resources */ = {isa = PBXBuildFile; fileRef = 2E41DFCE2BE2206F0012F773 /* Star.usdz */; }; + 2E41DFD62BE2206F0012F773 /* Swag.usdz in Resources */ = {isa = PBXBuildFile; fileRef = 2E41DFCF2BE2206F0012F773 /* Swag.usdz */; }; + 2E41DFD72BE2206F0012F773 /* Animoji.usdz in Resources */ = {isa = PBXBuildFile; fileRef = 2E41DFD02BE2206F0012F773 /* Animoji.usdz */; }; + 2E70214B2B8EFC4000089680 /* ARPostureDetection.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E70214A2B8EFC4000089680 /* ARPostureDetection.swift */; }; + 2E70214F2B90A0D900089680 /* BaseCameraVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E70214E2B90A0D900089680 /* BaseCameraVC.swift */; }; + 2E7021512B90A42700089680 /* AVDetailsVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E7021502B90A42700089680 /* AVDetailsVC.swift */; }; + 2E7021542B95ACA100089680 /* Int+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E7021532B95ACA100089680 /* Int+Extension.swift */; }; + 2E7021562B95ACD600089680 /* SCNVector3+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E7021552B95ACD600089680 /* SCNVector3+Extension.swift */; }; + 2E7021582B95ACF900089680 /* Simd3+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E7021572B95ACF900089680 /* Simd3+Extension.swift */; }; + 2E70215A2B95B4B600089680 /* MeshResource+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E7021592B95B4B600089680 /* MeshResource+Extension.swift */; }; + 2E70215C2B95B65B00089680 /* Float+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E70215B2B95B65B00089680 /* Float+Extension.swift */; }; 2E7503282B30640100DF78E1 /* ARSurfaceDetectionVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E7503272B30640100DF78E1 /* ARSurfaceDetectionVC.swift */; }; 2E75032B2B3079FB00DF78E1 /* Plane.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E75032A2B3079FB00DF78E1 /* Plane.swift */; }; 2E75032D2B30913E00DF78E1 /* Utility.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E75032C2B30913E00DF78E1 /* Utility.swift */; }; + 2E7D4AFC2BBADA0600DE0004 /* Metal_Round_Glasses.usdz in Resources */ = {isa = PBXBuildFile; fileRef = 2E7D4AF12BBADA0600DE0004 /* Metal_Round_Glasses.usdz */; }; 2E89AB4A2B46DBD8005EB695 /* ImageTrackingUtility.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E89AB492B46DBD8005EB695 /* ImageTrackingUtility.swift */; }; 2E89AB4C2B47E11C005EB695 /* VisuallizationNode.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E89AB4B2B47E11C005EB695 /* VisuallizationNode.swift */; }; 2E89AB4F2B481EAA005EB695 /* FilterCell.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E89AB4E2B481EAA005EB695 /* FilterCell.swift */; }; + 2E90B3ED2B87636800D3DB90 /* biped_robot.usdz in Resources */ = {isa = PBXBuildFile; fileRef = 2E90B3EC2B87636800D3DB90 /* biped_robot.usdz */; }; 2E9300F62B4683DC002BF5D6 /* AlteredImage.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2E9300F52B4683DC002BF5D6 /* AlteredImage.swift */; }; 2E9300F92B469633002BF5D6 /* StyleTransferModel.mlpackage in Sources */ = {isa = PBXBuildFile; fileRef = 2E9300F82B469633002BF5D6 /* StyleTransferModel.mlpackage */; }; + 2E962C8F2B69017200D0903D /* robot_walk_idle.usdz in Resources */ = {isa = PBXBuildFile; fileRef = 2E962C8E2B69017200D0903D /* robot_walk_idle.usdz */; }; 4904E7EB20D77386002F5210 /* ViewController+SessionInfo.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4904E7E620D77386002F5210 /* ViewController+SessionInfo.swift */; }; 4904E7EC20D77386002F5210 /* TestRun.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4904E7E720D77386002F5210 /* TestRun.swift */; }; 4904E7ED20D77386002F5210 /* ViewController+NavigationBar.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4904E7E820D77386002F5210 /* ViewController+NavigationBar.swift */; }; @@ -86,19 +114,47 @@ /* End PBXContainerItemProxy section */ /* Begin PBXFileReference section */ - 2E04C9062B4EE2F3000B4936 /* ARImageLocator.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARImageLocator.swift; sourceTree = ""; }; 2E04C9082B503318000B4936 /* StatusViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StatusViewController.swift; sourceTree = ""; }; - 2E04C90A2B503A6D000B4936 /* ARImageLocator+ARSessionDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "ARImageLocator+ARSessionDelegate.swift"; sourceTree = ""; }; 2E15A5972B32F720001EA792 /* RectangleDetector.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RectangleDetector.swift; sourceTree = ""; }; 2E15A5992B32FE85001EA792 /* ARImageDetectorVC.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARImageDetectorVC.swift; sourceTree = ""; }; + 2E18E1452BB2A70900D4C1E3 /* ARPostureDetection+ARSessionDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "ARPostureDetection+ARSessionDelegate.swift"; sourceTree = ""; }; + 2E18E1492BB2A94F00D4C1E3 /* Bones.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Bones.swift; sourceTree = ""; }; + 2E18E14A2BB2A94F00D4C1E3 /* JointAngles.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = JointAngles.swift; sourceTree = ""; }; + 2E18E14D2BB2AAEE00D4C1E3 /* SkeletonJoint.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SkeletonJoint.swift; sourceTree = ""; }; + 2E18E14E2BB2AAEE00D4C1E3 /* SkeletonBone.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SkeletonBone.swift; sourceTree = ""; }; + 2E18E1522BB2AC9300D4C1E3 /* BodySkeleton.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = BodySkeleton.swift; path = "ios12 Sampler/Body Detection with AR/Standing Posture/SkeletonHelper/BodySkeleton.swift"; sourceTree = SOURCE_ROOT; }; + 2E18E1532BB2AC9300D4C1E3 /* BodySkeleton+AngleDetection.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = "BodySkeleton+AngleDetection.swift"; path = "ios12 Sampler/Body Detection with AR/Standing Posture/SkeletonHelper/BodySkeleton+AngleDetection.swift"; sourceTree = SOURCE_ROOT; }; + 2E18E1542BB2AC9300D4C1E3 /* BodySkeleton+Entity.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = "BodySkeleton+Entity.swift"; path = "ios12 Sampler/Body Detection with AR/Standing Posture/SkeletonHelper/BodySkeleton+Entity.swift"; sourceTree = SOURCE_ROOT; }; + 2E18E1582BB2AD2800D4C1E3 /* String+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "String+Extension.swift"; sourceTree = ""; }; + 2E18E15A2BB2BB1500D4C1E3 /* StandingPostureVC.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = StandingPostureVC.swift; sourceTree = ""; }; + 2E2C35C32BE215F100200E7E /* ARFaceDetection.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARFaceDetection.swift; sourceTree = ""; }; + 2E2C35C52BE216FC00200E7E /* ModelCollectionCell.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ModelCollectionCell.swift; sourceTree = ""; }; + 2E41DFCA2BE2206F0012F773 /* Heart.usdz */ = {isa = PBXFileReference; lastKnownFileType = file.usdz; path = Heart.usdz; sourceTree = ""; }; + 2E41DFCB2BE2206F0012F773 /* Glasses.usdz */ = {isa = PBXFileReference; lastKnownFileType = file.usdz; path = Glasses.usdz; sourceTree = ""; }; + 2E41DFCC2BE2206F0012F773 /* Cyclops.usdz */ = {isa = PBXFileReference; lastKnownFileType = file.usdz; path = Cyclops.usdz; sourceTree = ""; }; + 2E41DFCD2BE2206F0012F773 /* Neon.usdz */ = {isa = PBXFileReference; lastKnownFileType = file.usdz; path = Neon.usdz; sourceTree = ""; }; + 2E41DFCE2BE2206F0012F773 /* Star.usdz */ = {isa = PBXFileReference; lastKnownFileType = file.usdz; path = Star.usdz; sourceTree = ""; }; + 2E41DFCF2BE2206F0012F773 /* Swag.usdz */ = {isa = PBXFileReference; lastKnownFileType = file.usdz; path = Swag.usdz; sourceTree = ""; }; + 2E41DFD02BE2206F0012F773 /* Animoji.usdz */ = {isa = PBXFileReference; lastKnownFileType = file.usdz; path = Animoji.usdz; sourceTree = ""; }; + 2E70214A2B8EFC4000089680 /* ARPostureDetection.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARPostureDetection.swift; sourceTree = ""; }; + 2E70214E2B90A0D900089680 /* BaseCameraVC.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BaseCameraVC.swift; sourceTree = ""; }; + 2E7021502B90A42700089680 /* AVDetailsVC.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AVDetailsVC.swift; sourceTree = ""; }; + 2E7021532B95ACA100089680 /* Int+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Int+Extension.swift"; sourceTree = ""; }; + 2E7021552B95ACD600089680 /* SCNVector3+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "SCNVector3+Extension.swift"; sourceTree = ""; }; + 2E7021572B95ACF900089680 /* Simd3+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Simd3+Extension.swift"; sourceTree = ""; }; + 2E7021592B95B4B600089680 /* MeshResource+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "MeshResource+Extension.swift"; sourceTree = ""; }; + 2E70215B2B95B65B00089680 /* Float+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Float+Extension.swift"; sourceTree = ""; }; 2E7503272B30640100DF78E1 /* ARSurfaceDetectionVC.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ARSurfaceDetectionVC.swift; sourceTree = ""; }; 2E75032A2B3079FB00DF78E1 /* Plane.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Plane.swift; sourceTree = ""; }; 2E75032C2B30913E00DF78E1 /* Utility.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Utility.swift; sourceTree = ""; }; + 2E7D4AF12BBADA0600DE0004 /* Metal_Round_Glasses.usdz */ = {isa = PBXFileReference; lastKnownFileType = file.usdz; path = Metal_Round_Glasses.usdz; sourceTree = ""; }; 2E89AB492B46DBD8005EB695 /* ImageTrackingUtility.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImageTrackingUtility.swift; sourceTree = ""; }; 2E89AB4B2B47E11C005EB695 /* VisuallizationNode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VisuallizationNode.swift; sourceTree = ""; }; 2E89AB4E2B481EAA005EB695 /* FilterCell.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FilterCell.swift; sourceTree = ""; }; + 2E90B3EC2B87636800D3DB90 /* biped_robot.usdz */ = {isa = PBXFileReference; lastKnownFileType = file.usdz; path = biped_robot.usdz; sourceTree = ""; }; 2E9300F52B4683DC002BF5D6 /* AlteredImage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AlteredImage.swift; sourceTree = ""; }; 2E9300F82B469633002BF5D6 /* StyleTransferModel.mlpackage */ = {isa = PBXFileReference; lastKnownFileType = folder.mlpackage; path = StyleTransferModel.mlpackage; sourceTree = ""; }; + 2E962C8E2B69017200D0903D /* robot_walk_idle.usdz */ = {isa = PBXFileReference; lastKnownFileType = file.usdz; path = robot_walk_idle.usdz; sourceTree = ""; }; 4904E7E620D77386002F5210 /* ViewController+SessionInfo.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "ViewController+SessionInfo.swift"; sourceTree = ""; }; 4904E7E720D77386002F5210 /* TestRun.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = TestRun.swift; sourceTree = ""; }; 4904E7E820D77386002F5210 /* ViewController+NavigationBar.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "ViewController+NavigationBar.swift"; sourceTree = ""; }; @@ -181,9 +237,7 @@ 2E04C9032B4EE267000B4936 /* Detecting Images in AR */ = { isa = PBXGroup; children = ( - 2E04C9062B4EE2F3000B4936 /* ARImageLocator.swift */, 2E04C9082B503318000B4936 /* StatusViewController.swift */, - 2E04C90A2B503A6D000B4936 /* ARImageLocator+ARSessionDelegate.swift */, ); path = "Detecting Images in AR"; sourceTree = ""; @@ -199,6 +253,156 @@ path = Utilities; sourceTree = ""; }; + 2E18E1442BB2A68400D4C1E3 /* Sitting Posture */ = { + isa = PBXGroup; + children = ( + 2E70214A2B8EFC4000089680 /* ARPostureDetection.swift */, + 2E18E1452BB2A70900D4C1E3 /* ARPostureDetection+ARSessionDelegate.swift */, + ); + path = "Sitting Posture"; + sourceTree = ""; + }; + 2E18E1472BB2A7AB00D4C1E3 /* Standing Posture */ = { + isa = PBXGroup; + children = ( + 2E18E15A2BB2BB1500D4C1E3 /* StandingPostureVC.swift */, + 2E18E15C2BB2BB3500D4C1E3 /* SkeletonHelper */, + 2E18E1512BB2ABE300D4C1E3 /* Skeleton */, + ); + path = "Standing Posture"; + sourceTree = ""; + }; + 2E18E1482BB2A93B00D4C1E3 /* Enums */ = { + isa = PBXGroup; + children = ( + 2E18E1492BB2A94F00D4C1E3 /* Bones.swift */, + 2E18E14A2BB2A94F00D4C1E3 /* JointAngles.swift */, + ); + path = Enums; + sourceTree = ""; + }; + 2E18E1512BB2ABE300D4C1E3 /* Skeleton */ = { + isa = PBXGroup; + children = ( + 2E18E14E2BB2AAEE00D4C1E3 /* SkeletonBone.swift */, + 2E18E14D2BB2AAEE00D4C1E3 /* SkeletonJoint.swift */, + 2E18E1482BB2A93B00D4C1E3 /* Enums */, + ); + path = Skeleton; + sourceTree = ""; + }; + 2E18E15C2BB2BB3500D4C1E3 /* SkeletonHelper */ = { + isa = PBXGroup; + children = ( + 2E18E1522BB2AC9300D4C1E3 /* BodySkeleton.swift */, + 2E18E1532BB2AC9300D4C1E3 /* BodySkeleton+AngleDetection.swift */, + 2E18E1542BB2AC9300D4C1E3 /* BodySkeleton+Entity.swift */, + ); + path = SkeletonHelper; + sourceTree = ""; + }; + 2E2C35C22BE215CB00200E7E /* Face Detection with AR */ = { + isa = PBXGroup; + children = ( + 2E2C35C32BE215F100200E7E /* ARFaceDetection.swift */, + 2E2C35C72BE2170100200E7E /* Cell */, + ); + path = "Face Detection with AR"; + sourceTree = ""; + }; + 2E2C35C72BE2170100200E7E /* Cell */ = { + isa = PBXGroup; + children = ( + 2E2C35C52BE216FC00200E7E /* ModelCollectionCell.swift */, + ); + path = Cell; + sourceTree = ""; + }; + 2E41DFC92BE220250012F773 /* Face Models */ = { + isa = PBXGroup; + children = ( + 2E7D4AF12BBADA0600DE0004 /* Metal_Round_Glasses.usdz */, + 2E41DFD02BE2206F0012F773 /* Animoji.usdz */, + 2E41DFCC2BE2206F0012F773 /* Cyclops.usdz */, + 2E41DFCB2BE2206F0012F773 /* Glasses.usdz */, + 2E41DFCA2BE2206F0012F773 /* Heart.usdz */, + 2E41DFCD2BE2206F0012F773 /* Neon.usdz */, + 2E41DFCE2BE2206F0012F773 /* Star.usdz */, + 2E41DFCF2BE2206F0012F773 /* Swag.usdz */, + ); + path = "Face Models"; + sourceTree = ""; + }; + 2E7021492B8EFBD500089680 /* Body Detection with AR */ = { + isa = PBXGroup; + children = ( + 2E70214A2B8EFC4000089680 /* ARPostureDetection.swift */, + 2E18E1452BB2A70900D4C1E3 /* ARPostureDetection+ARSessionDelegate.swift */, + ); + path = "Sitting Posture"; + sourceTree = ""; + }; + 2E18E1472BB2A7AB00D4C1E3 /* Standing Posture */ = { + isa = PBXGroup; + children = ( + 2E18E15A2BB2BB1500D4C1E3 /* StandingPostureVC.swift */, + 2E18E15C2BB2BB3500D4C1E3 /* SkeletonHelper */, + 2E18E1512BB2ABE300D4C1E3 /* Skeleton */, + ); + path = "Standing Posture"; + sourceTree = ""; + }; + 2E18E1482BB2A93B00D4C1E3 /* Enums */ = { + isa = PBXGroup; + children = ( + 2E18E1492BB2A94F00D4C1E3 /* Bones.swift */, + 2E18E14A2BB2A94F00D4C1E3 /* JointAngles.swift */, + ); + path = Enums; + sourceTree = ""; + }; + 2E18E1512BB2ABE300D4C1E3 /* Skeleton */ = { + isa = PBXGroup; + children = ( + 2E18E14E2BB2AAEE00D4C1E3 /* SkeletonBone.swift */, + 2E18E14D2BB2AAEE00D4C1E3 /* SkeletonJoint.swift */, + 2E18E1482BB2A93B00D4C1E3 /* Enums */, + ); + path = Skeleton; + sourceTree = ""; + }; + 2E18E15C2BB2BB3500D4C1E3 /* SkeletonHelper */ = { + isa = PBXGroup; + children = ( + 2E18E1522BB2AC9300D4C1E3 /* BodySkeleton.swift */, + 2E18E1532BB2AC9300D4C1E3 /* BodySkeleton+AngleDetection.swift */, + 2E18E1542BB2AC9300D4C1E3 /* BodySkeleton+Entity.swift */, + ); + path = SkeletonHelper; + sourceTree = ""; + }; + 2E7021492B8EFBD500089680 /* Body Detection with AR */ = { + isa = PBXGroup; + children = ( + 2E18E1472BB2A7AB00D4C1E3 /* Standing Posture */, + 2E18E1442BB2A68400D4C1E3 /* Sitting Posture */, + ); + path = "Body Detection with AR"; + sourceTree = ""; + }; + 2E7021522B95AC7900089680 /* Extensions */ = { + isa = PBXGroup; + children = ( + 2E7021532B95ACA100089680 /* Int+Extension.swift */, + 2E7021552B95ACD600089680 /* SCNVector3+Extension.swift */, + 2E7021572B95ACF900089680 /* Simd3+Extension.swift */, + 2E7021592B95B4B600089680 /* MeshResource+Extension.swift */, + 2E70215B2B95B65B00089680 /* Float+Extension.swift */, + 2E18E1582BB2AD2800D4C1E3 /* String+Extension.swift */, + ); + path = Extensions; + sourceTree = ""; + }; 2E7503222B30227F00DF78E1 /* Surface Detection */ = { isa = PBXGroup; children = ( @@ -222,8 +426,8 @@ children = ( 2E89AB4D2B481E88005EB695 /* Cell */, 2E9300F72B46961D002BF5D6 /* Model */, - 2E15A5962B32F6FC001EA792 /* Utilities */, 2E15A5992B32FE85001EA792 /* ARImageDetectorVC.swift */, + 2E15A5962B32F6FC001EA792 /* Utilities */, ); path = "Tracking and altering images"; sourceTree = ""; @@ -244,6 +448,16 @@ path = Model; sourceTree = ""; }; + 2E962C6C2B67A10400D0903D /* USDZ */ = { + isa = PBXGroup; + children = ( + 2E41DFC92BE220250012F773 /* Face Models */, + 2E962C8E2B69017200D0903D /* robot_walk_idle.usdz */, + 2E90B3EC2B87636800D3DB90 /* biped_robot.usdz */, + ); + path = USDZ; + sourceTree = ""; + }; 4904E7F520D7767D002F5210 /* Utility Extensions */ = { isa = PBXGroup; children = ( @@ -332,20 +546,26 @@ 4969EA8B20D109FC00F8AE9E /* ios12 Sampler */ = { isa = PBXGroup; children = ( + 2E962C6C2B67A10400D0903D /* USDZ */, 4969EA8C20D109FC00F8AE9E /* AppDelegate.swift */, + 2E70214E2B90A0D900089680 /* BaseCameraVC.swift */, 49C5720F20D269E300602C7B /* AVMainVC.swift */, + 2E7021502B90A42700089680 /* AVDetailsVC.swift */, 4969EA9020D109FC00F8AE9E /* AVSharingWorldMapVC.swift */, 4968C0E420D14B3200D384F0 /* AVChoiceVC.swift */, 4968C0E220D14B1B00D384F0 /* AVScannedObjectListVC.swift */, 4968C0E020D12FE500D384F0 /* AVReadARObjectVC.swift */, 4931C78C20D3988E002F907B /* AVTextureEnvironment.swift */, 8127E0E220D7E5B500D8CD7F /* AVImageDetaction.swift */, + 2E7021492B8EFBD500089680 /* Body Detection with AR */, + 2E2C35C22BE215CB00200E7E /* Face Detection with AR */, 2E04C9032B4EE267000B4936 /* Detecting Images in AR */, 2E75034B2B31E00400DF78E1 /* Tracking and altering images */, 2E7503222B30227F00DF78E1 /* Surface Detection */, 4968C0E720D1569C00D384F0 /* ScanningObjectHelperController */, 4968C0E620D1561C00D384F0 /* MultiPeerHelperClass */, 4969EAC220D129A400F8AE9E /* Scanning Objects Helper Classes */, + 2E7021522B95AC7900089680 /* Extensions */, 8127E0E420D7E5F000D8CD7F /* Helper */, 4969EA9A20D109FD00F8AE9E /* Info.plist */, 4969EA9220D109FC00F8AE9E /* Main.storyboard */, @@ -509,9 +729,19 @@ 8127E0E820D7E6DC00D8CD7F /* giphy.gif in Resources */, 4969EA8F20D109FC00F8AE9E /* art.scnassets in Resources */, 4991D8F520D907B500BF6564 /* Loky.gif in Resources */, + 2E41DFD12BE2206F0012F773 /* Heart.usdz in Resources */, + 2E41DFD32BE2206F0012F773 /* Cyclops.usdz in Resources */, + 2E7D4AFC2BBADA0600DE0004 /* Metal_Round_Glasses.usdz in Resources */, + 2E41DFD62BE2206F0012F773 /* Swag.usdz in Resources */, 4969EA9920D109FD00F8AE9E /* LaunchScreen.storyboard in Resources */, + 2E962C8F2B69017200D0903D /* robot_walk_idle.usdz in Resources */, + 2E90B3ED2B87636800D3DB90 /* biped_robot.usdz in Resources */, + 2E41DFD22BE2206F0012F773 /* Glasses.usdz in Resources */, 4969EA9620D109FD00F8AE9E /* Assets.xcassets in Resources */, + 2E41DFD42BE2206F0012F773 /* Neon.usdz in Resources */, 4969EA9420D109FC00F8AE9E /* Main.storyboard in Resources */, + 2E41DFD52BE2206F0012F773 /* Star.usdz in Resources */, + 2E41DFD72BE2206F0012F773 /* Animoji.usdz in Resources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -536,55 +766,73 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( + 2E18E1562BB2AC9300D4C1E3 /* BodySkeleton+AngleDetection.swift in Sources */, + 2E70215A2B95B4B600089680 /* MeshResource+Extension.swift in Sources */, 4931C78D20D3988F002F907B /* AVTextureEnvironment.swift in Sources */, 2E89AB4F2B481EAA005EB695 /* FilterCell.swift in Sources */, 4968C0E120D12FE500D384F0 /* AVReadARObjectVC.swift in Sources */, 4968C0DA20D12AA700D384F0 /* ThresholdPinchGestureRecognizer.swift in Sources */, 4968C0DF20D12BF500D384F0 /* ViewController+ApplicationState.swift in Sources */, + 2E7021582B95ACF900089680 /* Simd3+Extension.swift in Sources */, + 2E18E14F2BB2AAEE00D4C1E3 /* SkeletonJoint.swift in Sources */, + 2E7021542B95ACA100089680 /* Int+Extension.swift in Sources */, 49C5721020D269E300602C7B /* AVMainVC.swift in Sources */, - 2E04C90B2B503A6D000B4936 /* ARImageLocator+ARSessionDelegate.swift in Sources */, 2E9300F62B4683DC002BF5D6 /* AlteredImage.swift in Sources */, 4904E81020D776B3002F5210 /* DetectedBoundingBox.swift in Sources */, - 2E04C9072B4EE2F3000B4936 /* ARImageLocator.swift in Sources */, 4904E80E20D776B3002F5210 /* ObjectOriginAxis.swift in Sources */, + 2E70214B2B8EFC4000089680 /* ARPostureDetection.swift in Sources */, + 2E18E1552BB2AC9300D4C1E3 /* BodySkeleton.swift in Sources */, 4904E7EB20D77386002F5210 /* ViewController+SessionInfo.swift in Sources */, 4968C0CB20D12AA700D384F0 /* FlashlightButton.swift in Sources */, 2E15A5982B32F720001EA792 /* RectangleDetector.swift in Sources */, 4968C0DE20D12BF100D384F0 /* ScanObjectsVC.swift in Sources */, 2E89AB4A2B46DBD8005EB695 /* ImageTrackingUtility.swift in Sources */, 4904E80B20D776B3002F5210 /* BoundingBoxSide.swift in Sources */, + 2E18E14B2BB2A94F00D4C1E3 /* Bones.swift in Sources */, 2E89AB4C2B47E11C005EB695 /* VisuallizationNode.swift in Sources */, 4904E7F920D7767E002F5210 /* ARCameraTrackingState.swift in Sources */, 4969EABD20D10AAC00F8AE9E /* MultipeerSession.swift in Sources */, 2E7503282B30640100DF78E1 /* ARSurfaceDetectionVC.swift in Sources */, + 2E7021562B95ACD600089680 /* SCNVector3+Extension.swift in Sources */, 4968C0E520D14B3200D384F0 /* AVChoiceVC.swift in Sources */, + 2E2C35C42BE215F100200E7E /* ARFaceDetection.swift in Sources */, 4969EA9120D109FC00F8AE9E /* AVSharingWorldMapVC.swift in Sources */, 4904E80F20D776B3002F5210 /* PointCloud+CreateVisualization.swift in Sources */, 2E75032D2B30913E00DF78E1 /* Utility.swift in Sources */, 4904E81220D776B3002F5210 /* ObjectOrigin.swift in Sources */, 4968C0A120D12A6500D384F0 /* ViewController+Gestures.swift in Sources */, + 2E18E15B2BB2BB1500D4C1E3 /* StandingPostureVC.swift in Sources */, + 2E18E1592BB2AD2800D4C1E3 /* String+Extension.swift in Sources */, 4904E81120D776B3002F5210 /* Tile.swift in Sources */, 4904E80720D776B3002F5210 /* ScannedPointCloud.swift in Sources */, 4904E7EC20D77386002F5210 /* TestRun.swift in Sources */, 2E75032B2B3079FB00DF78E1 /* Plane.swift in Sources */, + 2E18E1462BB2A70900D4C1E3 /* ARPostureDetection+ARSessionDelegate.swift in Sources */, 4968C0D920D12AA700D384F0 /* ThresholdRotationGestureRecognizer.swift in Sources */, 4904E80D20D776B3002F5210 /* DetectedPointCloud.swift in Sources */, + 2E18E14C2BB2A94F00D4C1E3 /* JointAngles.swift in Sources */, 8127E0E620D7E60100D8CD7F /* iOSDevCenters+GIF.swift in Sources */, 4904E7F820D7767E002F5210 /* Utilities.swift in Sources */, + 2E7021512B90A42700089680 /* AVDetailsVC.swift in Sources */, 2E04C9092B503318000B4936 /* StatusViewController.swift in Sources */, + 2E70214F2B90A0D900089680 /* BaseCameraVC.swift in Sources */, 4904E80820D776B3002F5210 /* ScannedObject.swift in Sources */, 4968C0E320D14B1B00D384F0 /* AVScannedObjectListVC.swift in Sources */, 4904E7EE20D77386002F5210 /* Scan.swift in Sources */, + 2E18E1502BB2AAEE00D4C1E3 /* SkeletonBone.swift in Sources */, 4904E7EF20D77386002F5210 /* ShareScanViewController.swift in Sources */, 4904E7ED20D77386002F5210 /* ViewController+NavigationBar.swift in Sources */, 4969EA8D20D109FC00F8AE9E /* AppDelegate.swift in Sources */, 4904E80A20D776B3002F5210 /* BoundingBox.swift in Sources */, 8127E0E320D7E5B500D8CD7F /* AVImageDetaction.swift in Sources */, + 2E18E1572BB2AC9300D4C1E3 /* BodySkeleton+Entity.swift in Sources */, 4904E80920D776B3002F5210 /* Wireframe.swift in Sources */, + 2E70215C2B95B65B00089680 /* Float+Extension.swift in Sources */, 4968C0CA20D12AA700D384F0 /* MessageLabel.swift in Sources */, 4968C0DB20D12AA700D384F0 /* ThresholdPanGestureRecognizer.swift in Sources */, 4904E80C20D776B3002F5210 /* DetectedObject.swift in Sources */, 2E15A59A2B32FE85001EA792 /* ARImageDetectorVC.swift in Sources */, + 2E2C35C62BE216FC00200E7E /* ModelCollectionCell.swift in Sources */, 2E9300F92B469633002BF5D6 /* StyleTransferModel.mlpackage in Sources */, 4968C0CC20D12AA700D384F0 /* RoundedButton.swift in Sources */, ); @@ -762,6 +1010,7 @@ CODE_SIGN_STYLE = Automatic; DEVELOPMENT_TEAM = K7XJG666ZW; INFOPLIST_FILE = "ios12 Sampler/Info.plist"; + IPHONEOS_DEPLOYMENT_TARGET = 15.0; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", @@ -780,6 +1029,7 @@ CODE_SIGN_STYLE = Automatic; DEVELOPMENT_TEAM = K7XJG666ZW; INFOPLIST_FILE = "ios12 Sampler/Info.plist"; + IPHONEOS_DEPLOYMENT_TARGET = 15.0; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", diff --git a/iOS12_Sampler/ios12 Sampler/AVDetailsVC.swift b/iOS12_Sampler/ios12 Sampler/AVDetailsVC.swift new file mode 100644 index 0000000..c8885cf --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/AVDetailsVC.swift @@ -0,0 +1,48 @@ +// +// AVDetailsVC.swift +// ios12 Sampler +// +// Created by Dhruvil Vora on 29/02/24. +// Copyright © 2024 Testing. All rights reserved. +// + +import UIKit + +class AVDetailsVC: BaseCameraVC { + + @IBOutlet weak var cameraView: UIView! + + override func viewDidLoad() { + super.viewDidLoad() + // Do any additional setup after loading the view. + cameraView.layer.addSublayer(prevLayer!) + } + + override func viewWillAppear(_ animated: Bool) { + super.viewWillAppear(animated) + } + + override func viewWillDisappear(_ animated: Bool) { + super.viewWillDisappear(animated) + } + + @IBAction func btnLiveImageFilterClicked(_ sender: UIButton) { + let vc = self.storyboard?.instantiateViewController(withIdentifier: "ARImageDetectorVC") as? ARImageDetectorVC + self.navigationController?.pushViewController(vc!, animated: true) + } + + @IBAction func btnSurfaceDetectionClicked(_ sender: UIButton) { + let vc = self.storyboard?.instantiateViewController(withIdentifier: "ARFaceDetection") as? ARFaceDetection + self.navigationController?.pushViewController(vc!, animated: true) + } + + @IBAction func btnSittingPostureClicked(_ sender: UIButton) { + let vc = self.storyboard?.instantiateViewController(withIdentifier: "ARPostureDetection") as? ARPostureDetection + self.navigationController?.pushViewController(vc!, animated: true) + } + + @IBAction func btnStandingPostureClicked(_ sender: UIButton) { + let vc = self.storyboard?.instantiateViewController(withIdentifier: "StandingPostureVC") as? StandingPostureVC + self.navigationController?.pushViewController(vc!, animated: true) + } +} diff --git a/iOS12_Sampler/ios12 Sampler/AVMainVC.swift b/iOS12_Sampler/ios12 Sampler/AVMainVC.swift index 3a9ae46..951831f 100644 --- a/iOS12_Sampler/ios12 Sampler/AVMainVC.swift +++ b/iOS12_Sampler/ios12 Sampler/AVMainVC.swift @@ -9,37 +9,25 @@ import UIKit import AVFoundation -class AVMainVC: UIViewController, AVCaptureMetadataOutputObjectsDelegate { +class AVMainVC: BaseCameraVC { + + @IBOutlet weak var cameraView: UIView! - var session: AVCaptureSession? - var device: AVCaptureDevice? - var input: AVCaptureDeviceInput? - var output: AVCaptureMetadataOutput? - var prevLayer: AVCaptureVideoPreviewLayer? - - @IBOutlet weak var CameraView: UIView! - override func viewDidLoad() { super.viewDidLoad() - createSession() // Do any additional setup after loading the view. + cameraView.layer.addSublayer(prevLayer!) } override func viewWillAppear(_ animated: Bool) { super.viewWillAppear(animated) - self.navigationController?.isNavigationBarHidden = true } override func viewWillDisappear(_ animated: Bool) { super.viewWillDisappear(animated) - self.navigationController?.isNavigationBarHidden = false } @IBAction func btnActionWorldSharing(_ sender: Any) { - let vc = self.storyboard?.instantiateViewController(withIdentifier: "ARImageLocator") as? ARImageLocator + let vc = self.storyboard?.instantiateViewController(withIdentifier: "ARImageDetectorVC") as? ARImageDetectorVC self.navigationController?.pushViewController(vc!, animated: true) -// let vc = self.storyboard?.instantiateViewController(withIdentifier: "ARSurfaceDetectionVC") as? ARSurfaceDetectionVC -// self.navigationController?.pushViewController(vc!, animated: true) -// let vc = self.storyboard?.instantiateViewController(withIdentifier: "AVSharingWorldMapVC") as? AVSharingWorldMapVC -// self.navigationController?.pushViewController(vc!, animated: true) } @IBAction func btnActionScanAndDetectObjects(_ sender: UIButton) { @@ -54,35 +42,9 @@ class AVMainVC: UIViewController, AVCaptureMetadataOutputObjectsDelegate { let vc = self.storyboard?.instantiateViewController(withIdentifier: "AVTextureEnvironment") as? AVTextureEnvironment self.navigationController?.pushViewController(vc!, animated: true) } - func createSession() { - session = AVCaptureSession() - device = AVCaptureDevice.default(for: .video) - - var error: NSError? = nil - do { - if device != nil { - input = try AVCaptureDeviceInput(device: device!) - } - } catch { - print(error) - } - - if error == nil { - if input != nil { - session?.addInput(input!) - } - } else { - print("camera input error: \(String(describing: error))") - } - - prevLayer = AVCaptureVideoPreviewLayer(session: session!) - let del = UIApplication.shared.delegate as? AppDelegate - prevLayer?.frame = (del?.window?.frame)! - prevLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill - CameraView.layer.addSublayer(prevLayer!) - DispatchQueue.global().async { - self.session?.startRunning() - } - } + @IBAction func btnMoreClicked(_ sender: UIButton) { + let vc = self.storyboard?.instantiateViewController(withIdentifier: "AVDetailsVC") as? AVDetailsVC + self.navigationController?.pushViewController(vc!, animated: true) + } } diff --git a/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Contents.json b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Contents.json new file mode 100644 index 0000000..73c0059 --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Cyclops.imageset/Contents.json b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Cyclops.imageset/Contents.json new file mode 100644 index 0000000..b9d7b65 --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Cyclops.imageset/Contents.json @@ -0,0 +1,21 @@ +{ + "images" : [ + { + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "Cyclops.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Cyclops.imageset/Cyclops.png b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Cyclops.imageset/Cyclops.png new file mode 100644 index 0000000..0a1f703 Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Cyclops.imageset/Cyclops.png differ diff --git a/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Glasses.imageset/Contents.json b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Glasses.imageset/Contents.json new file mode 100644 index 0000000..668a8e6 --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Glasses.imageset/Contents.json @@ -0,0 +1,21 @@ +{ + "images" : [ + { + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "Glasses.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Glasses.imageset/Glasses.png b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Glasses.imageset/Glasses.png new file mode 100644 index 0000000..39071f8 Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Glasses.imageset/Glasses.png differ diff --git a/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Heart.imageset/Contents.json b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Heart.imageset/Contents.json new file mode 100644 index 0000000..ba142fa --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Heart.imageset/Contents.json @@ -0,0 +1,21 @@ +{ + "images" : [ + { + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "heart2.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Heart.imageset/heart2.png b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Heart.imageset/heart2.png new file mode 100644 index 0000000..9934e7d Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Heart.imageset/heart2.png differ diff --git a/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Neon.imageset/Contents.json b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Neon.imageset/Contents.json new file mode 100644 index 0000000..738922b --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Neon.imageset/Contents.json @@ -0,0 +1,21 @@ +{ + "images" : [ + { + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "Neon.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Neon.imageset/Neon.png b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Neon.imageset/Neon.png new file mode 100644 index 0000000..d63b03e Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Neon.imageset/Neon.png differ diff --git a/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Robo.imageset/Contents.json b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Robo.imageset/Contents.json new file mode 100644 index 0000000..fc9aa21 --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Robo.imageset/Contents.json @@ -0,0 +1,21 @@ +{ + "images" : [ + { + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "Robo.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Robo.imageset/Robo.png b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Robo.imageset/Robo.png new file mode 100644 index 0000000..7eb378b Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Robo.imageset/Robo.png differ diff --git a/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Star.imageset/Contents.json b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Star.imageset/Contents.json new file mode 100644 index 0000000..e2041ea --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Star.imageset/Contents.json @@ -0,0 +1,21 @@ +{ + "images" : [ + { + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "Star.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Star.imageset/Star.png b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Star.imageset/Star.png new file mode 100644 index 0000000..a3e472e Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Star.imageset/Star.png differ diff --git a/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Swag.imageset/Contents.json b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Swag.imageset/Contents.json new file mode 100644 index 0000000..2c8dd00 --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Swag.imageset/Contents.json @@ -0,0 +1,21 @@ +{ + "images" : [ + { + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "Swag.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Swag.imageset/Swag.png b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Swag.imageset/Swag.png new file mode 100644 index 0000000..e996f7e Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/Assets.xcassets/Models/Swag.imageset/Swag.png differ diff --git a/iOS12_Sampler/ios12 Sampler/Base.lproj/Main.storyboard b/iOS12_Sampler/ios12 Sampler/Base.lproj/Main.storyboard index e81e1e4..392e913 100644 --- a/iOS12_Sampler/ios12 Sampler/Base.lproj/Main.storyboard +++ b/iOS12_Sampler/ios12 Sampler/Base.lproj/Main.storyboard @@ -1,9 +1,9 @@ - + - + @@ -45,10 +45,27 @@ + + + + + + + + - + @@ -92,7 +109,7 @@ Sharing - + @@ -138,7 +155,7 @@ Texturing - + @@ -183,7 +200,7 @@ Texturing - + @@ -243,8 +260,8 @@ Detection - - + + @@ -262,7 +279,7 @@ Detection - + @@ -1342,6 +1359,376 @@ New Object + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -1362,6 +1749,7 @@ New Object + @@ -1377,5 +1765,8 @@ New Object + + + diff --git a/iOS12_Sampler/ios12 Sampler/BaseCameraVC.swift b/iOS12_Sampler/ios12 Sampler/BaseCameraVC.swift new file mode 100644 index 0000000..a7ed723 --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/BaseCameraVC.swift @@ -0,0 +1,65 @@ +// +// BaseCameraVC.swift +// ios12 Sampler +// +// Created by Dhruvil Vora on 29/02/24. +// Copyright © 2024 Testing. All rights reserved. +// + +import UIKit +import AVFoundation + +class BaseCameraVC: UIViewController, AVCaptureMetadataOutputObjectsDelegate { + + // AVCapture variables + private var session: AVCaptureSession? + private var device: AVCaptureDevice? + private var input: AVCaptureDeviceInput? + private var output: AVCaptureMetadataOutput? + var prevLayer: AVCaptureVideoPreviewLayer? + + override func viewDidLoad() { + super.viewDidLoad() + createSession() + // Do any additional setup after loading the view. + } + + override func viewWillAppear(_ animated: Bool) { + super.viewWillAppear(animated) + self.navigationController?.isNavigationBarHidden = true + } + override func viewWillDisappear(_ animated: Bool) { + super.viewWillDisappear(animated) + self.navigationController?.isNavigationBarHidden = false + } + + func createSession() { + session = AVCaptureSession() + device = AVCaptureDevice.default(for: .video) + + var error: NSError? = nil + do { + if device != nil { + input = try AVCaptureDeviceInput(device: device!) + } + } catch { + print(error) + } + + if error == nil { + if input != nil { + session?.addInput(input!) + } + } else { + print("camera input error: \(String(describing: error))") + } + + prevLayer = AVCaptureVideoPreviewLayer(session: session!) + let del = UIApplication.shared.delegate as? AppDelegate + prevLayer?.frame = (del?.window?.frame)! + prevLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill + DispatchQueue.global().async { + self.session?.startRunning() + } + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Sitting Posture/ARPostureDetection+ARSessionDelegate.swift b/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Sitting Posture/ARPostureDetection+ARSessionDelegate.swift new file mode 100644 index 0000000..532f14b --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Sitting Posture/ARPostureDetection+ARSessionDelegate.swift @@ -0,0 +1,59 @@ +// +// ARPostureDetection+ARSessionDelegate.swift +// ios12 Sampler +// +// Created by Dhruvil Vora on 26/03/24. +// Copyright © 2024 Testing. All rights reserved. +// + +import RealityKit +import ARKit + +// MARK: ARSessionDelegate +extension ARPostureDetection: ARSessionDelegate { + func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) { + for anchor in anchors { + /// Return if there is no body det3ected + guard let bodyAnchor = anchor as? ARBodyAnchor else { return } + /// Fetch the skeleton from the bodyanchor which will provide all transforms from the joints + let bodyAnchorTransform = Transform(matrix: bodyAnchor.transform).translation + meshAnchor.position = bodyAnchorTransform + let skeleton = bodyAnchor.skeleton + /// For this usecase we only have to consider "left_leg_joint" & "spine_7_joint" as we have to find ths sitting posture angle correctly.spine_1_joint + guard let leftLegJointTransform = skeleton.modelTransform(for: ARSkeleton.JointName(rawValue: "left_leg_joint")), + let spine7JointJointTransform = skeleton.modelTransform(for: ARSkeleton.JointName(rawValue: "spine_7_joint")) + else { return } + /// Then find the angle between 2 position + let abc = bodyAnchorTransform + Transform(matrix: leftLegJointTransform).translation + let xyz = bodyAnchorTransform + Transform(matrix: spine7JointJointTransform).translation + let angle = SIMD3.angleBetween(v1: abc, v2: xyz) + // let angle1 = Transform(matrix: leftLegJointTransform).translation + // .angle(v: Transform(matrix: spine7JointJointTransform).translation) + /// Then convert radian to degree and update the label + let radToDeg = angle * 180.0 / Float.pi + DispatchQueue.main.async { + self.labelView.text = "Angle for posture :- \(Int(radToDeg))" + } + + /// Align the anchor's orientation with bodynchor's rotataion + meshAnchor.orientation = Transform(matrix: bodyAnchor.transform).rotation + /// FInd out towards which side the face is facing + let isFacingleft = meshAnchor.orientation.imag.y.isNegative + /// Reset all orientation to its default + meshAnchor.setOrientation(simd_quatf(real: 1, imag: [0,0,0]), relativeTo: nil) + + /// If the meshEntity do not have any parent that means meshEntity is not yet added to the anchor + /// So we need to create an entity and add it to the ARAnchor + if meshEntity?.parent == nil { + guard let mesh = MeshResource.createSemiCircleMeshForAngle(angle: (radToDeg), isFacingLeft: isFacingleft) else { return } + meshEntity = generateEntityFromMesh(mesh: mesh, angle: Int(radToDeg)) + meshAnchor.addChild(meshEntity!) + } + /// so now instead of creating whole 2d shape again and again we will just update its mesh + /// according to the newly updated angle + else { + updateSemiCircleMesh(forAngle: Double(radToDeg), modelEntity: meshEntity, isFacingLeft: isFacingleft) + } + } + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Sitting Posture/ARPostureDetection.swift b/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Sitting Posture/ARPostureDetection.swift new file mode 100644 index 0000000..ec7382c --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Sitting Posture/ARPostureDetection.swift @@ -0,0 +1,80 @@ +// +// ARPostureDetection.swift +// ios12 Sampler +// +// Created by Dhruvil Vora on 28/02/24. +// Copyright © 2024 Testing. All rights reserved. +// + +import UIKit +import RealityKit +import ARKit +import Combine +import SwiftUI + +class ARPostureDetection: UIViewController { + + @IBOutlet var arView: ARView! + + // The 3D character to display. + var meshEntity: ModelEntity? + var meshAnchor = AnchorEntity() + var character: BodyTrackedEntity? + + let labelView: UILabel = { + let label = UILabel() + label.text = "Angle" + label.font = UIFont.systemFont(ofSize: 20, weight: .semibold) + label.textColor = UIColor.green + return label + }() + + override func viewDidLoad() { + loadLabel() + } + + override func viewDidAppear(_ animated: Bool) { + super.viewDidAppear(animated) + arView.session.delegate = self + + // If the iOS device doesn't support body tracking, raise a developer error for + // this unhandled case. + guard ARBodyTrackingConfiguration.isSupported else { + fatalError("This feature is only supported on devices with an A12 chip") + } + + // Run a body tracking configration. + let configuration = ARBodyTrackingConfiguration() + arView.session.run(configuration) + self.arView.scene.addAnchor(meshAnchor) + } + + private func loadLabel() { + arView.addSubview(labelView) + labelView.translatesAutoresizingMaskIntoConstraints = false + labelView.bottomAnchor.constraint(equalTo: arView.bottomAnchor, constant: -20).isActive = true + labelView.leadingAnchor.constraint(equalTo: arView.leadingAnchor, constant: 0).isActive = true + labelView.trailingAnchor.constraint(equalTo: arView.trailingAnchor, constant: 0).isActive = true + labelView.textAlignment = .center + } +} + +/// Used to Generate/Update mesh +extension ARPostureDetection { + func updateSemiCircleMesh(forAngle: Double, modelEntity: ModelEntity?, isFacingLeft: Bool) { + guard let entity = modelEntity, + let mesh = MeshResource.createSemiCircleMeshForAngle(angle: Float(forAngle), + isFacingLeft: isFacingLeft) else { return } + entity.model?.materials[0] = SimpleMaterial(color: Int(forAngle).postureIntensityColor(), + isMetallic: false) + entity.model?.mesh = mesh + } + + func generateEntityFromMesh(mesh: MeshResource, angle: Int) -> ModelEntity { + let material = SimpleMaterial(color: angle.postureIntensityColor(), isMetallic: false) + let entity = ModelEntity(mesh: mesh, materials: [material]) + entity.setScale([0.35, 0.35, 0.35], relativeTo: nil) + return entity + } +} + diff --git a/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/Skeleton/Enums/Bones.swift b/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/Skeleton/Enums/Bones.swift new file mode 100644 index 0000000..5c852dd --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/Skeleton/Enums/Bones.swift @@ -0,0 +1,204 @@ +// +// Bones.swift +// BodyDetection +// +// Created by Dhruvil Vora on 08/03/24. +// Copyright © 2024 Apple. All rights reserved. +// + +import Foundation + +enum Bones: CaseIterable { + + case neck1ToNeck2 + case neck2ToNeck3 + case neck3ToNeck4 + case neck4ToHead + case headToNose + case headToRightEye + case headToLeftEye + + case neckToLeftShoulder + case leftShoulderToLeftArm + case leftArmToLeftForeArm + case leftForeArmToLeftHand + + case neckToRightShoulder + case rightShoulderToLeftArm + case rightArmToLeftForeArm + case rightForeArmToLeftHand + + case neckToSpine7 + case spine7ToSpine6 + case spine6ToSpine5 + case spine5ToSpine4 + case spine4ToSpine3 + case spine3ToSpine2 + case spine2ToSpine1 + + case spineToleftUpLeg + case spineToRightUpLeg + + case leftUpLegToLeftLegJoint + case leftLegJointToLeftFootJoint + case leftFootJointToLeftToesJoint + case leftToesJointToLeftToesEndJoint + + case rightUpLegToLeftLegJoint + case rightLegJointToLeftFootJoint + case rightFootJointToLeftToesJoint + case rightToesJointToLeftToesEndJoint + + var name: String { + return "\(jointFromName)-\(jointToName)" + } + + var jointFromName: String { + switch self { + case .neck1ToNeck2: + return "neck_1_joint" + case .neck2ToNeck3: + return "neck_2_joint" + case .neck3ToNeck4: + return "neck_3_joint" + case .neck4ToHead: + return "neck_4_joint" + case .headToNose: + return "head_joint" + case .headToRightEye: + return "head_joint" + case .headToLeftEye: + return "head_joint" + + case .neckToLeftShoulder: + return "neck_1_joint" + case .leftShoulderToLeftArm: + return "left_shoulder_1_joint" + case .leftArmToLeftForeArm: + return "left_arm_joint" + case .leftForeArmToLeftHand: + return "left_forearm_joint" + + case .neckToRightShoulder: + return "neck_1_joint" + case .rightShoulderToLeftArm: + return "right_shoulder_1_joint" + case .rightArmToLeftForeArm: + return "right_arm_joint" + case .rightForeArmToLeftHand: + return "right_forearm_joint" + + case .neckToSpine7: + return "neck_1_joint" + case .spine7ToSpine6: + return "spine_7_joint" + case .spine6ToSpine5: + return "spine_6_joint" + case .spine5ToSpine4: + return "spine_5_joint" + case .spine4ToSpine3: + return "spine_4_joint" + case .spine3ToSpine2: + return "spine_3_joint" + case .spine2ToSpine1: + return "spine_2_joint" + + case .spineToleftUpLeg: + return "spine_1_joint" + case .spineToRightUpLeg: + return "spine_1_joint" + + case .leftUpLegToLeftLegJoint: + return "left_upLeg_joint" + case .leftLegJointToLeftFootJoint: + return "left_leg_joint" + case .leftFootJointToLeftToesJoint: + return "left_foot_joint" + case .leftToesJointToLeftToesEndJoint: + return "left_toes_joint" + case .rightUpLegToLeftLegJoint: + return "right_upLeg_joint" + case .rightLegJointToLeftFootJoint: + return "right_leg_joint" + case .rightFootJointToLeftToesJoint: + return "right_foot_joint" + case .rightToesJointToLeftToesEndJoint: + return "right_toes_joint" + } + } + + var jointToName: String { + switch self { + case .neck1ToNeck2: + return "neck_2_joint" + case .neck2ToNeck3: + return "neck_3_joint" + case .neck3ToNeck4: + return "neck_4_joint" + case .neck4ToHead: + return "head_joint" + case .headToNose: + return "nose_joint" + case .headToRightEye: + return "right_eye_joint" + case .headToLeftEye: + return "left_eye_joint" + + + case .neckToLeftShoulder: + return "left_shoulder_1_joint" + case .leftShoulderToLeftArm: + return "left_arm_joint" + case .leftArmToLeftForeArm: + return "left_forearm_joint" + case .leftForeArmToLeftHand: + return "left_hand_joint" + + case .neckToRightShoulder: + return "right_shoulder_1_joint" + case .rightShoulderToLeftArm: + return "right_arm_joint" + case .rightArmToLeftForeArm: + return "right_forearm_joint" + case .rightForeArmToLeftHand: + return "right_hand_joint" + + case .neckToSpine7: + return "spine_7_joint" + case .spine7ToSpine6: + return "spine_6_joint" + case .spine6ToSpine5: + return "spine_5_joint" + case .spine5ToSpine4: + return "spine_4_joint" + case .spine4ToSpine3: + return "spine_3_joint" + case .spine3ToSpine2: + return "spine_2_joint" + case .spine2ToSpine1: + return "spine_1_joint" + + case .spineToleftUpLeg: + return "left_upLeg_joint" + case .spineToRightUpLeg: + return "right_upLeg_joint" + + case .leftUpLegToLeftLegJoint: + return "left_leg_joint" + case .leftLegJointToLeftFootJoint: + return "left_foot_joint" + case .leftFootJointToLeftToesJoint: + return "left_toes_joint" + case .leftToesJointToLeftToesEndJoint: + return "left_toesEnd_joint" + case .rightUpLegToLeftLegJoint: + return "right_leg_joint" + case .rightLegJointToLeftFootJoint: + return "right_foot_joint" + case .rightFootJointToLeftToesJoint: + return "right_toes_joint" + case .rightToesJointToLeftToesEndJoint: + return "right_toesEnd_joint" + } + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/Skeleton/Enums/JointAngles.swift b/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/Skeleton/Enums/JointAngles.swift new file mode 100644 index 0000000..49d6565 --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/Skeleton/Enums/JointAngles.swift @@ -0,0 +1,74 @@ +// +// JointAngles.swift +// BodyDetection +// +// Created by Dhruvil Vora on 13/03/24. +// Copyright © 2024 Apple. All rights reserved. +// + +import Foundation +enum AngleInBetween: String, CaseIterable { + case left_upLeg_joint + case left_leg_joint + case right_upLeg_joint + case right_leg_joint + case left_arm_joint + case left_forearm_joint + case right_arm_joint + case right_forearm_joint + /// TODO :- Need to find an accurate way to get foot joint + // case left_foot_joint + // case right_foot_joint + + var angleFrom: String { + switch self { + case .left_upLeg_joint: + "left_shoulder_1_joint" + case .left_leg_joint: //knee + "left_upLeg_joint" + case .right_upLeg_joint: + "right_shoulder_1_joint" + case .right_leg_joint: //knee + "right_upLeg_joint" + case .left_arm_joint: // shoulder + "left_forearm_joint"//"left_handMidEnd_joint" + case .left_forearm_joint: // elbow + "left_arm_joint" + case .right_arm_joint: // shoulder + "right_forearm_joint" // "right_handMidEnd_joint" + case .right_forearm_joint: // elbow + "right_arm_joint" + /// TODO :- Need to find an accurate way to get foot joint +// case .left_foot_joint: +// "left_leg_joint" +// case .right_foot_joint: +// "right_leg_joint"//"right_upLeg_joint" + } + } + + var angleTo: String { + switch self { + case .left_upLeg_joint: + "left_leg_joint" + case .left_leg_joint: //knee + "left_foot_joint" + case .right_upLeg_joint: + "right_leg_joint" + case .right_leg_joint: //knee + "right_foot_joint" + case .left_arm_joint: // shoulder + "left_upLeg_joint" //"left_arm_joint" //"spine_1_joint" + case .left_forearm_joint: // elbow + "left_hand_joint" + case .right_arm_joint: // shoulder + "right_upLeg_joint" //"right_arm_joint" //"spine_1_joint" + case .right_forearm_joint: // elbow + "right_hand_joint" + /// TODO :- Need to find an accurate way to get foot joint +// case .left_foot_joint: +// "left_toesEnd_joint" +// case .right_foot_joint: +// "right_toesEnd_joint" + } + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/Skeleton/SkeletonBone.swift b/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/Skeleton/SkeletonBone.swift new file mode 100644 index 0000000..30aae70 --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/Skeleton/SkeletonBone.swift @@ -0,0 +1,24 @@ +// +// SkeletonBone.swift +// BodyDetection +// +// Created by Dhruvil Vora on 08/03/24. +// Copyright © 2024 Apple. All rights reserved. +// + +import Foundation +import RealityKit + +struct SkeletonBone { + var fromJoint: SkeletonJoint + var toJoint: SkeletonJoint + + var centerPoint: SIMD3 { + [((fromJoint.position.x + toJoint.position.x) / 2), ((fromJoint.position.y + toJoint.position.y) / 2), + ((fromJoint.position.z + toJoint.position.z) / 2)] + } + + var length: Float { + simd_distance(fromJoint.position, toJoint.position) + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/Skeleton/SkeletonJoint.swift b/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/Skeleton/SkeletonJoint.swift new file mode 100644 index 0000000..0850c1b --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/Skeleton/SkeletonJoint.swift @@ -0,0 +1,14 @@ +// +// SkeletonJoint.swift +// SkeletonJoint +// +// Created by Dhruvil Vora on 08/03/24. +// Copyright © 2024 Apple. All rights reserved. +// + +import Foundation + +struct SkeletonJoint { + let name: String + var position: SIMD3 +} diff --git a/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/SkeletonHelper/BodySkeleton+AngleDetection.swift b/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/SkeletonHelper/BodySkeleton+AngleDetection.swift new file mode 100644 index 0000000..d7cb09d --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/SkeletonHelper/BodySkeleton+AngleDetection.swift @@ -0,0 +1,55 @@ +// +// BodySkeleton+AngleDetection.swift +// BodyDetection +// +// Created by Dhruvil Vora on 22/03/24. +// Copyright © 2024 Apple. All rights reserved. +// + +import RealityKit +import ARKit + +extension BodySkeleton { + + func calculateRespectedJointAngles(jointName: String, parentEntity: Entity, bodyEntity: ARBodyAnchor) -> Int { + guard let name = AngleInBetween(rawValue: jointName) else { return 0 } + guard let (fromAngle, toAngle, angleToName) = getJointsTransform(bodyEntity: bodyEntity, angleFromName: name.angleFrom, angleToName: name.angleTo, angleForJoint: jointName) else { return 0 } + return getAngleBetweenTwoVectors(with: fromAngle, to: toAngle, for: (name, angleToName)) + } + + func getAngleBetweenTwoVectors(with fromAngle: SIMD3, to toAngle: SIMD3, for forAngle: (AngleInBetween, SIMD3)) -> Int { + var angle : Float = 0.0 +// if forAngle.0 == .left_arm_joint || forAngle.0 == .right_arm_joint { +// angle = SIMD3.angleForArms(v1: fromAngle, v2: toAngle) +// } else { + angle = SIMD3.getAngleBetween(fromAngle: fromAngle, toAngle: toAngle, forAngle: forAngle.1) +// } + return angle.isNaN ? 0 : Int(angle) + } + + static func getBodyJointsTransform(from vector1: SIMD4, to vector2: SIMD4) -> Float { + /// In order to find the anglke between two vectors we can use the formula of dot product + /// + /// Step 1:- Need to find dot product on basis of two vectors + let dotProduct = (vector1.x * vector2.x) + (vector1.x * vector2.x) + (vector1.x * vector2.x) + + /// Now we need to find the magniotudes of both the vecitors + let magForVector1 = sqrtf(powf(vector1.x, 2) + powf(vector1.y, 2) + powf(vector1.z, 2)) + let magForVector2 = sqrtf(powf(vector1.x, 2) + powf(vector1.y, 2) + powf(vector1.z, 2)) + + /// calculating the cosine rule for dot and magnitude + let angleInRadian = acosf(dotProduct / (magForVector1 * magForVector2)) + return angleInRadian * (180 / .pi) + } + + private func getJointsTransform(bodyEntity: ARBodyAnchor, angleFromName: String, angleToName: String, angleForJoint: String) -> (angleFromTransform: SIMD3, angleMidTransform: SIMD3, angleToTransform: SIMD3)?{ + guard let fromJointModelTransform = bodyEntity.skeleton.modelTransform(for: ARSkeleton.JointName(rawValue: angleFromName)), + let toJointModelTransform = bodyEntity.skeleton.modelTransform(for: ARSkeleton.JointName(rawValue: angleToName)), + let midJointModelTransform = bodyEntity.skeleton.modelTransform(for: ARSkeleton.JointName(rawValue: angleForJoint)) + else { return nil } + let fromJointOffset = Transform(matrix: fromJointModelTransform).translation + let toJointOffset = Transform(matrix: toJointModelTransform).translation + let midJointOffset = Transform(matrix: midJointModelTransform).translation + return ((fromJointOffset), (toJointOffset), (midJointOffset)) + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/SkeletonHelper/BodySkeleton+Entity.swift b/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/SkeletonHelper/BodySkeleton+Entity.swift new file mode 100644 index 0000000..f6f105a --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/SkeletonHelper/BodySkeleton+Entity.swift @@ -0,0 +1,89 @@ +// +// BodySkeleton+Entity.swift +// BodyDetection +// +// Created by Dhruvil Vora on 14/03/24. +// Copyright © 2024 Apple. All rights reserved. +// + +import Foundation +import ARKit +import RealityKit + +// Use to create Joints and Bones +extension BodySkeleton { + + func constructSkeletonBone(customBone: Bones, bodyEntity: ARBodyAnchor) -> SkeletonBone? { + let hipPosition = Transform(matrix: bodyEntity.transform).translation + let fromBone = customBone.jointFromName + let toBone = customBone.jointToName + + guard let fromJointModelTransform = bodyEntity.skeleton.modelTransform(for: ARSkeleton.JointName(rawValue: fromBone)), + let toJointModelTransform = bodyEntity.skeleton.modelTransform(for: ARSkeleton.JointName(rawValue: toBone)) else { return nil } + let fromJointOffset = Transform(matrix: fromJointModelTransform).translation + hipPosition + let toJointOffset = Transform(matrix: toJointModelTransform).translation + hipPosition + return SkeletonBone(fromJoint: SkeletonJoint(name: fromBone, position: fromJointOffset), + toJoint: SkeletonJoint(name: toBone, position: toJointOffset)) + } + + func makeSphereEntity(radius: Float, color: UIColor, position: SIMD3, showAngle: Bool, jointName: String, bodyEntity: ARBodyAnchor) -> ModelEntity { + let sphereEntity = MeshResource.generateSphere(radius: radius) + let material = SimpleMaterial(color: color, roughness: 0.3, isMetallic: false) +// material.baseColor = try! .texture(TextureResource.load(named: "texture.png")) + let dummyEntity = ModelEntity(mesh: sphereEntity, materials: [material]) + dummyEntity.transform.translation = position + if showAngle { + addLabelEntityToJoints(jointName: jointName, parentEntity: dummyEntity, bodyEntity: bodyEntity) + } + return dummyEntity + } + + func makeBoneEntity(skeletonBone: SkeletonBone, diameter: Float = 0.04, color: UIColor = .white) -> ModelEntity { + let cylinderEntity = MeshResource.generateBox(width: diameter, height: diameter, depth: skeletonBone.length, cornerRadius: diameter) + let material = SimpleMaterial(color: color, roughness: 0.3, isMetallic: false) + let entity = ModelEntity(mesh: cylinderEntity, materials: [material]) + return entity + } + + private func addLabelEntityToJoints(jointName: String, parentEntity: Entity, bodyEntity: ARBodyAnchor) { + let angle = calculateRespectedJointAngles(jointName: jointName, parentEntity: parentEntity, bodyEntity: bodyEntity) + let labelEntity = makeLabelEntity(parentEntity: parentEntity, text: "\(angle)") + parentEntity.addChild(labelEntity) + customLabels[jointName] = labelEntity + } + + private func makeLabelEntity(parentEntity: Entity, text: String = "Data", color: UIColor = .red) -> ModelEntity { + let labelEntity = MeshResource.generateText(text) + let material = SimpleMaterial(color: color, roughness: 0.3, isMetallic: false) + let entity = ModelEntity(mesh: labelEntity, materials: [material]) + entity.setPosition(parentEntity.position, relativeTo: nil) + entity.setScale([0.01, 0.01, 0.01], relativeTo: nil) + return entity + } + + func setMeshEntityAccordingToJoints(jointName: String, jointRadius: Float) -> (radius: Float, color: UIColor){ + var updatedJointRadius: Float = 0.03 + var updatedJointColor: UIColor = .green + switch jointName { + case "neck_1_joint", "neck_2_joint", "neck_3_joint", "neck_4_joint", "head_joint", "left_shoulder_1_joint", "right_shoulder_1_joint" : + updatedJointRadius *= 0.5 + case "jaw_joint", "chin_joint", "left_eye_joint", "left_eyeLowerLid_joint", "left_eyeUpperLid_joint", "left_eyeball_joint", "nose_joint", "right_eye_joint", "right_eyeLowerLid_joint", "right_eyeUpperLid_joint", "right_eyeball_joint" : + updatedJointRadius *= 0.2 + updatedJointColor = .yellow + case _ where jointName.hasPrefix("spine_"): + updatedJointRadius *= 0.75 + case _ where jointName.hasPrefix("left_hand") || jointName.hasPrefix("right_hand"): + updatedJointRadius *= 0.5 + updatedJointColor = .yellow + case _ where jointName.hasPrefix("left_toes") || jointName.hasPrefix("right_toes"): + updatedJointRadius *= 0.25 + updatedJointColor = .yellow + case "left_hand_joint", "right_hand_joint": + updatedJointRadius *= 1 + updatedJointColor = .green + default: + break + } + return (updatedJointRadius, updatedJointColor) + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/SkeletonHelper/BodySkeleton.swift b/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/SkeletonHelper/BodySkeleton.swift new file mode 100644 index 0000000..40f0b59 --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/SkeletonHelper/BodySkeleton.swift @@ -0,0 +1,96 @@ +// +// BodySkeleton.swift +// BodyDetection +// +// Created by Dhruvil Vora on 04/03/24. +// Copyright © 2024 Apple. All rights reserved. +// + +import Foundation +import RealityKit +import ARKit + +class BodySkeleton: Entity { + + private var joints: [String: Entity] = [:] + private var customBones: [String: Entity] = [:] + var customLabels: [String: ModelEntity] = [:] + + required init(bodyEntity: ARBodyAnchor) { + super.init() + /// get the position of root(hip) joint + let hipPosition = Transform(matrix: bodyEntity.transform).translation + /// Loop for creating joints + for jointName in ARSkeletonDefinition.defaultBody3D.jointNames { + /// get the position of joint + let jointModelTransform = bodyEntity.skeleton.modelTransform(for: ARSkeleton.JointName(rawValue: jointName))! +// let bodyOrientation = Transform(matrix: jointModelTransform).rotation + /// Now here when we get the translation of the joint it's actually is not the world position of that joint + /// It's actually the offset of that joint from the root joint + let jointModelOffset = Transform(matrix: jointModelTransform).translation + /// So inorder to get the actual world position of the joint we need to add hip's position & joint's offset + let jointPosition = hipPosition + jointModelOffset +// let rotatedJointTransform = bodyOrientation.act(jointPosition) + /// We can now create and place entity in the required position + let updatedRadiiAndColor = setMeshEntityAccordingToJoints(jointName: jointName, jointRadius: 0.03) + let sphereEntity = makeSphereEntity(radius: updatedRadiiAndColor.radius, color: updatedRadiiAndColor.color, + position: jointPosition, showAngle: jointName.isJointFromLegOrhand, + jointName: jointName, bodyEntity: bodyEntity) + self.addChild(sphereEntity) + /// Add joints to the dict as we afterwards also wants to update them + joints[jointName] = sphereEntity + } + /// Loop for creating Bones + for bone in Bones.allCases { + constructBoneEntity(bodyEntity: bodyEntity, customBone: bone) + } + } + + @MainActor required init() { + fatalError("init() has not been implemented") + } + + private func constructBoneEntity(bodyEntity: ARBodyAnchor, customBone: Bones) { + guard let jointsBone = constructSkeletonBone(customBone: customBone, bodyEntity: bodyEntity) else { return } + let boneEntity = makeBoneEntity(skeletonBone: jointsBone) + self.addChild(boneEntity) + boneEntity.look(at: jointsBone.toJoint.position, from: jointsBone.centerPoint, relativeTo: nil) + /// Add joints to the dict as we afterwards also wants to update them + customBones[customBone.name] = boneEntity + } + + func updateTrackedBodyAnchor(bodyEntity: ARBodyAnchor) { + /// get the position of root(hip) joint + let hipPosition = Transform(matrix: bodyEntity.transform).translation + for jointName in ARSkeletonDefinition.defaultBody3D.jointNames { + + /// get the position of joint + let jointModelTransform = bodyEntity.skeleton.modelTransform(for: ARSkeleton.JointName(rawValue: jointName))! + + let bodyOrientation = Transform(matrix: jointModelTransform).rotation + /// Now here when we get the translation of the joint it's actually is not the world position of that joint + /// It's actually the offset of that joint from the root joint + let jointModelOffset = Transform(matrix: jointModelTransform).translation + /// So inorder to get the actual world position of the joint we need to add hip's position & joint's offset + let jointPosition = hipPosition + jointModelOffset // bodyOrientation.act(jointModelOffset) +// let rotatedJointTransform = bodyOrientation.act(jointPosition) + guard let jointToUpdate = joints[jointName] else { continue } + jointToUpdate.transform.translation = jointPosition + guard let jointLabelToUpdate = customLabels[jointName] else { continue } + let angle = calculateRespectedJointAngles(jointName: jointName, parentEntity: jointToUpdate, bodyEntity: bodyEntity) + jointLabelToUpdate.model?.mesh = MeshResource.generateText("\(angle)") + } + updateCustomBones(bodyEntity: bodyEntity) + } + + private func updateCustomBones(bodyEntity: ARBodyAnchor) { + for customBone in Bones.allCases { + guard let jointsBone = constructSkeletonBone(customBone: customBone, bodyEntity: bodyEntity), + let boneEntity = customBones[customBone.name] else { return } + boneEntity.transform.translation = jointsBone.centerPoint + boneEntity.look(at: jointsBone.toJoint.position, from: jointsBone.centerPoint, relativeTo: nil) + /// Add joints to the dict as we afterwards also wants to update them according to the ARBodyAnchor + customBones[customBone.name] = boneEntity + } + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/StandingPostureVC.swift b/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/StandingPostureVC.swift new file mode 100644 index 0000000..b35a76f --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Body Detection with AR/Standing Posture/StandingPostureVC.swift @@ -0,0 +1,62 @@ +// +// StandingPostureVC.swift +// BodyDetection +// +// Created by Dhruvil Vora on 04/03/24. +// Copyright © 2024 Apple. All rights reserved. +// + +import Foundation +import UIKit +import RealityKit +import ARKit +import Combine + +class StandingPostureVC: UIViewController { + + // MARK: IBOutlets + @IBOutlet var arView: ARView! + + // MARK: Variables + private var bodyEntity: BodySkeleton? + private var character: Entity? + private var anchorSkeletonEntity = AnchorEntity() + private var cancellable: AnyCancellable? = nil + + override func viewDidAppear(_ animated: Bool) { + super.viewDidAppear(animated) + + // If the iOS device doesn't support body tracking, raise a developer error for + // this unhandled case. + guard ARBodyTrackingConfiguration.isSupported else { + fatalError("This feature is only supported on devices with an A12 chip") + } + configureARBodyTracking() + } +} + +extension StandingPostureVC { + func configureARBodyTracking() { + let configuration = ARBodyTrackingConfiguration() + arView.session.run(configuration) + arView.session.delegate = self + arView.scene.addAnchor(anchorSkeletonEntity) + } +} + +extension StandingPostureVC: ARSessionDelegate { + func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) { + for anchor in anchors { + guard let bodyAnchor = anchor as? ARBodyAnchor else { continue } + if let entity = bodyEntity { + entity.updateTrackedBodyAnchor(bodyEntity: bodyAnchor) +// let position = Transform(matrix: bodyAnchor.transform).translation +// entity.look(at: position, from: position, relativeTo: nil) + } else { + bodyEntity = BodySkeleton(bodyEntity: bodyAnchor) + guard let bodyTrackedEntity = bodyEntity else { continue } + anchorSkeletonEntity.addChild(bodyTrackedEntity) + } + } + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Detecting Images in AR/ARImageLocator+ARSessionDelegate.swift b/iOS12_Sampler/ios12 Sampler/Detecting Images in AR/ARImageLocator+ARSessionDelegate.swift deleted file mode 100644 index 5d65fa0..0000000 --- a/iOS12_Sampler/ios12 Sampler/Detecting Images in AR/ARImageLocator+ARSessionDelegate.swift +++ /dev/null @@ -1,26 +0,0 @@ -// -// ARImageLocator+ARSessionDelegate.swift -// ios12 Sampler -// -// Created by Dhruvil Vora on 11/01/24. -// Copyright © 2024 Testing. All rights reserved. -// - -import ARKit - -extension ARImageLocator: ARSessionDelegate { - - func session(_ session: ARSession, cameraDidChangeTrackingState camera: ARCamera) { - /// Notify users about current tracking camera quality - statusViewController.showCameraQualityInfo(trackingState: camera.trackingState, autoHide: true) - - switch camera.trackingState { - case .notAvailable: - statusViewController.showRecommendationForCameraQuality(trackingState: camera.trackingState, - duration: 3, autoHide: false) - default: - break - } - } - -} diff --git a/iOS12_Sampler/ios12 Sampler/Detecting Images in AR/ARImageLocator.swift b/iOS12_Sampler/ios12 Sampler/Detecting Images in AR/ARImageLocator.swift deleted file mode 100644 index 0a35dcf..0000000 --- a/iOS12_Sampler/ios12 Sampler/Detecting Images in AR/ARImageLocator.swift +++ /dev/null @@ -1,173 +0,0 @@ -// -// ARImageLocator.swift -// ios12 Sampler -// -// Created by Dhruvil Vora on 10/01/24. -// Copyright © 2024 Testing. All rights reserved. -// - -import Foundation -import ARKit - -class ARImageLocator: UIViewController { - - @IBOutlet weak var sceneView: ARSCNView! - - @IBOutlet weak var blurVIew: UIVisualEffectView! - - lazy var statusViewController: StatusViewController = { - children.lazy.compactMap({ $0 as? StatusViewController }).first! - }() - - /// Need to create a serial queue for thread safety, when modifying scenekit node graph - let serialQueue = DispatchQueue(label: "\(Bundle.main.bundleIdentifier ?? "") + .serialScenekitQueue") - - /// Session accessor which is hold by sceneview - var session: ARSession { - sceneView.session - } - - var isRestartAvailable = true - - // MARK: View life cycle - override func viewDidLoad() { - super.viewDidLoad() - - sceneView.delegate = self - - // Create a new scene - let scene = SCNScene(named: "art.scnassets/ship.scn")! - sceneView.scene = scene - - sceneView.session.delegate = self - - // Hook up status view controller callback(s). - statusViewController.restartExperienceHandler = { [unowned self] in - self.restartExperience() - } - } - - override func viewDidAppear(_ animated: Bool) { - super.viewDidAppear(animated) - /// Prevent screen from being dimmed after it's let ideal for sometime - UIApplication.shared.isIdleTimerDisabled = true - statusViewController.scheduleGenericMessage(genericMsg: "Look for an image", duration: 7.5, - autoHide: true, messageType: .cameraQualityInfo) - resetTracking() - } - - override func viewDidDisappear(_ animated: Bool) { - super.viewDidDisappear(animated) - session.pause() - } - - /// Create a new arconfig to run on a `session` - func resetTracking() { - - guard let referenceImages = ARReferenceImage.referenceImages(inGroupNamed: "AR Resources", bundle: nil) else { - fatalError("Didn't found any resourcses") - } - let config = ARWorldTrackingConfiguration() - /// Need to provide detection images - config.detectionImages = referenceImages - session.run(config, options: [.resetTracking, .removeExistingAnchors]) - } - - private func restartExperience() { - guard isRestartAvailable else { return } - isRestartAvailable = false - statusViewController.showHideResetButton(isHidden: isRestartAvailable) - statusViewController.removeAllTimers() - - DispatchQueue.main.asyncAfter(deadline: .now() + 5.0) { [weak self] in - guard let self else { return } - self.isRestartAvailable = true - self.statusViewController.showHideResetButton(isHidden: self.isRestartAvailable) - } - } -} - -extension ARImageLocator: ARSCNViewDelegate { - func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) { - guard let imageAnchor = anchor as? ARImageAnchor else { return } - serialQueue.async { - // first create a plane from the added anchor - let plane = SCNPlane(width: imageAnchor.referenceImage.physicalSize.width, - height: imageAnchor.referenceImage.physicalSize.height) - let planeNode = SCNNode(geometry: plane) - planeNode.opacity = 0.25 - - // As SCNPlane is a 2d it is vertically oriented and the ARImageAnchor is horizontally oriented - /// So by default SCNPlane is in a 2D format whose orientation is vertical & image anchor being 3D format its - /// horizontally align so inorder to matchb with imageanchor we need to rotate the plane's angle - planeNode.eulerAngles.x = -.pi / 2 - - node.addChildNode(planeNode) - } - DispatchQueue.main.async { - let name = imageAnchor.referenceImage.name - print("Image Name :- ",name) - self.animateObject(node) - } - } - - func animateObject(_ node: SCNNode) { - - guard let nodeToAnimate = sceneView.scene.rootNode.childNode(withName: "ship", recursively: true) else { - return - } - - // let forwardAction = SCNAction.moveBy(x: 0, y: 0, z: 5, duration: 1.0) - // let rotateAction1 = SCNAction.rotateBy(x: -.pi/2,y: 0, z: 0, duration: 2.0) - // let backwardAction = SCNAction.moveBy(x: 0, y: 0, z: -5, duration: 1.0) - // let rotateAction2 = SCNAction.rotateBy(x: -.pi/2,y: 0, z: 0, duration: 2.0) - // let abc = SCNAction.group([backwardAction, rotateAction1]) - // let sequenceAction = SCNAction.sequence([forwardAction, rotateAction1, backwardAction, rotateAction2]) - - let forwardAction = SCNAction.moveBy(x: 0, y: 0, z: 5, duration: 1.0) - let rotateAction1 = SCNAction.rotateBy(x: (-.pi),y: 0, z: 0, duration: 5.0) - let backwardAction = SCNAction.moveBy(x: 0, y: 0, z: -5, duration: 1.0) - let rotateAction2 = SCNAction.rotateBy(x: .pi/2,y: 0, z: 0, duration: 2.0) - let rotation = SCNAction.rotateBy(x: 0,y: 0, z: .pi, duration: 2.0) - var verticalPosition0: CGFloat = 0.0 - // Create a custom action to update the position based on a parabolic function - let parabolicAction = SCNAction.customAction(duration: 2) { (node, elapsedTime) in - // Calculate the vertical position using a parabolic function - verticalPosition0 = 0.5 * 9.8 * pow(elapsedTime, 1) - print("vertical Pos :- ", verticalPosition0) - // Update the node's position based on the parabolic function - node.position = SCNVector3(nodeToAnimate.position.x, - Float(verticalPosition0) > 4.9 ? (9.8 - Float(verticalPosition0)) : Float(verticalPosition0), - -Float(verticalPosition0)) - } - - let parabolicBackAction = SCNAction.customAction(duration: 2) { (node, elapsedTime) in - // Calculate the vertical position using a parabolic function - let verticalPosition = -0.5 * 9.8 * pow(elapsedTime, 1) - print("vertical Pos :- ", verticalPosition) - // Update the node's position based on the parabolic function - node.position = SCNVector3(Float(node.position.x), Float(verticalPosition), node.position.z) - } - // let sequenceAction = SCNAction.sequence([forwardAction, rotateAction1, backwardAction, rotateAction1]) - - let groupAction1 = SCNAction.group([forwardAction, rotation, parabolicAction]) - let groupAction2 = SCNAction.group([rotateAction1, backwardAction]) - let sequenceAction = SCNAction.sequence([groupAction1]) - - let repeatAction = SCNAction.repeatForever(parabolicAction.reversed()) - - nodeToAnimate.runAction(repeatAction) - } - - var imageHighlightAction: SCNAction { - return .sequence([ - .wait(duration: 0.25), - .fadeOpacity(to: 0.85, duration: 0.25), - .fadeOpacity(to: 0.15, duration: 0.25), - .fadeOpacity(to: 0.85, duration: 0.25), - .fadeOut(duration: 0.5), - .removeFromParentNode() - ]) - } -} - diff --git a/iOS12_Sampler/ios12 Sampler/Extensions/Float+Extension.swift b/iOS12_Sampler/ios12 Sampler/Extensions/Float+Extension.swift new file mode 100644 index 0000000..f92a00d --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Extensions/Float+Extension.swift @@ -0,0 +1,26 @@ +// +// Float+Extension.swift +// ios12 Sampler +// +// Created by Dhruvil Vora on 04/03/24. +// Copyright © 2024 Testing. All rights reserved. +// + +import Foundation + +extension Float { + /// A boolean value indicating whether a number is negative or not + var isNegative: Bool { + return self < 0 + } + + var toDegree : Float { + get { + self * 180 / .pi + } + } + + var toPoints: Self { + return (self * 2835) + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Extensions/Int+Extension.swift b/iOS12_Sampler/ios12 Sampler/Extensions/Int+Extension.swift new file mode 100644 index 0000000..a2a3748 --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Extensions/Int+Extension.swift @@ -0,0 +1,32 @@ +// +// Int+Extension.swift +// ios12 Sampler +// +// Created by Dhruvil Vora on 04/03/24. +// Copyright © 2024 Testing. All rights reserved. +// + +import UIKit + +extension Int { + /// Provides a array of UInt32 from the given count + func returnArrCountInUInt32() -> [UInt32]{ + var arr = [UInt32]() + for x in 0...(self-1) { + arr.append(UInt32(x)) + } + return arr + } + + /// Provides a predefined colour based on detected angle from body posture + func postureIntensityColor() -> UIColor{ + if self <= 70 { + return .red.withAlphaComponent(0.7) + } else if (self > 70 && self <= 110) { + return .orange.withAlphaComponent(0.7) + } else if self > 110 { + return .green.withAlphaComponent(0.7) + } + return .black.withAlphaComponent(0.7) + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Extensions/MeshResource+Extension.swift b/iOS12_Sampler/ios12 Sampler/Extensions/MeshResource+Extension.swift new file mode 100644 index 0000000..5b0fa8b --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Extensions/MeshResource+Extension.swift @@ -0,0 +1,39 @@ +// +// MeshResource+Extension.swift +// ios12 Sampler +// +// Created by Dhruvil Vora on 04/03/24. +// Copyright © 2024 Testing. All rights reserved. +// + +import Foundation +import RealityKit + +extension MeshResource { + static func createSemiCircleMeshForAngle(angle: Float, isFacingLeft: Bool = false) -> MeshResource? { + guard angle > 15.0 else { return nil } + let angleCount = Int(angle/6) + var fixedAngleCount = 0 + var fixedPostitions: [SIMD3] = [] + var positions: [SIMD3] = SIMD3.getSemiCircleMesh + + if isFacingLeft { + let removalPosCount = 29 - angleCount + positions.remove(atOffsets: IndexSet(0...(removalPosCount-1))) + fixedPostitions.append(contentsOf: positions) + } else { + fixedPostitions.append(contentsOf: positions[0...(angleCount-1)]) + } + + fixedPostitions.append(.zero) + fixedAngleCount = fixedPostitions.count + + let counts: [UInt8] = [UInt8(fixedAngleCount)] + let indices: [UInt32] = fixedAngleCount.returnArrCountInUInt32() + + var meshDescriptor = MeshDescriptor() + meshDescriptor.positions = .init(fixedPostitions) + meshDescriptor.primitives = .polygons(counts, indices) + return try! MeshResource.generate(from: [meshDescriptor]) + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Extensions/SCNVector3+Extension.swift b/iOS12_Sampler/ios12 Sampler/Extensions/SCNVector3+Extension.swift new file mode 100644 index 0000000..e6631e0 --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Extensions/SCNVector3+Extension.swift @@ -0,0 +1,249 @@ +// +// SCNVector3+Extension.swift +// ios12 Sampler +// +// Created by Dhruvil Vora on 04/03/24. +// Copyright © 2024 Testing. All rights reserved. +// + +import Foundation +import CoreGraphics +import SceneKit + +extension SCNVector3 { + + // Vector Length is Zero + func isZero() -> Bool { + if self.x == 0 && self.y == 0 && self.z == 0 { + return true + } + + return false + } + + /** + Inverts vector + */ + mutating func invert() -> SCNVector3 { + self * -1 + return self + } + + /** + Calculates vector length based on Pythagoras theorem + */ + var length:Float { + get { + return sqrtf(x*x + y*y + z*z) + } + set { + self = self.unit * newValue + } + } + + /** + Calculate Length Squared of Vector + - Used to determine Longest/Shortest Vector. Faster than using v.length + */ + var lengthSquared:Float { + get { + return self.x * self.x + self.y * self.y + self.z * self.z; + } + } + + /** + Returns unit vector (aka Normalized Vector) + - v.length = 1.0 + */ + var unit:SCNVector3 { + get { + return self / self.length + } + } + + /** + Normalizes vector + - v.Length = 1.0 + */ + mutating func normalize() { + self = self.unit + } + + /** + Calculates distance to vector + */ + func distance(toVector: SCNVector3) -> Float { + return (self - toVector).length + } + + + /** + Calculates dot product to vector + */ + func dot(toVector: SCNVector3) -> Float { + return x * toVector.x + y * toVector.y + z * toVector.z + } + + /** + Calculates cross product to vector + */ + func cross(toVector: SCNVector3) -> SCNVector3 { + return SCNVector3Make(y * toVector.z - z * toVector.y, z * toVector.x - x * toVector.z, x * toVector.y - y * toVector.x) + } + + /** + Returns lerp from Vector to Vector + */ + func lerp(toVector: SCNVector3, t: Float) -> SCNVector3 { + return SCNVector3Make( + self.x + ((toVector.x - self.x) * t), + self.y + ((toVector.y - self.y) * t), + self.z + ((toVector.z - self.z) * t)) + } + + /** + Project onto Vector + */ + func project(ontoVector: SCNVector3) -> SCNVector3 { + let scale: Float = dotBetweenVectors(v1: ontoVector, v2: self) / dotBetweenVectors(v1: ontoVector, v2: ontoVector) + let v: SCNVector3 = ontoVector * scale + return v + } + + /// Get/Set Angle of Vector + mutating func rotate(angle:Float) { + let length = self.length + self.x = cos(angle) * length + self.y = sin(angle) * length + } + + + func toCGVector() -> CGVector { + return CGVector(dx: CGFloat(self.x), dy: CGFloat(self.y)) + } + +} + +/** + v1 = v2 + v3 +*/ +func +(left: SCNVector3, right: SCNVector3) -> SCNVector3 { + return SCNVector3Make(left.x + right.x, left.y + right.y, left.z + right.z) +} + +/** + v1 += v2 +*/ +func +=( left: inout SCNVector3, right: SCNVector3) { + left = left + right +} + +/** + v1 = v2 - v3 +*/ +func -(left: SCNVector3, right: SCNVector3) -> SCNVector3 { + return SCNVector3Make(left.x - right.x, left.y - right.y, left.z - right.z) +} + +/** + v1 -= v2 +*/ +func -=( left: inout SCNVector3, right: SCNVector3) { + left = left - right +} + +/** + v1 = v2 * v3 +*/ +func *(left: SCNVector3, right: SCNVector3) -> SCNVector3 { + return SCNVector3Make(left.x * right.x, left.y * right.y, left.z * right.z) +} + +/** + v1 *= v2 +*/ +func *=( left: inout SCNVector3, right: SCNVector3) { + left = left * right +} + +/** + v1 = v2 * x +*/ +func *(left: SCNVector3, right: Float) -> SCNVector3 { + return SCNVector3Make(left.x * right, left.y * right, left.z * right) +} + +/** + v *= x +*/ +func *=( left: inout SCNVector3, right: Float) { + left = SCNVector3Make(left.x * right, left.y * right, left.z * right) +} + +/** + v1 = v2 / v3 +*/ +func /(left: SCNVector3, right: SCNVector3) -> SCNVector3 { + return SCNVector3Make(left.x / right.x, left.y / right.y, left.z / right.z) +} + +/** + v1 /= v2 +*/ +func /=( left: inout SCNVector3, right: SCNVector3) { + left = SCNVector3Make(left.x / right.x, left.y / right.y, left.z / right.z) +} + +/** + v1 = v2 / x +*/ +func /(left: SCNVector3, right: Float) -> SCNVector3 { + return SCNVector3Make(left.x / right, left.y / right, left.z / right) +} + +/** + v /= x +*/ +func /=( left: inout SCNVector3, right: Float) { + left = SCNVector3Make(left.x / right, left.y / right, left.z / right) +} + +/** + v = -v +*/ +prefix func -(v: SCNVector3) -> SCNVector3 { + return v * -1 +} + +/** + Returns distance between two vectors +*/ +func distanceBetweenVectors(v1: SCNVector3, v2: SCNVector3) -> Float { + return (v2 - v1).length +} + +/** + Returns dot product between two vectors +*/ +func dotBetweenVectors(v1: SCNVector3, v2: SCNVector3) -> Float { + return v1.x * v2.x + v1.y * v2.y + v1.z * v2.z +} + +/** + Returns cross product between two vectors +*/ +func crossBetweenVectors(v1: SCNVector3, v2: SCNVector3) -> SCNVector3 { + return SCNVector3Make(v1.y * v2.z - v1.z * v2.y, v1.z * v2.x - v1.x * v2.z, v1.x * v2.y - v1.y * v2.x) +} + +/** + Generate a Random Vector +*/ +func randomSCNVector3(rangeX:Float, rangeY:Float, rangeZ:Float) -> SCNVector3 { + + return SCNVector3( + x: Float(arc4random()%UInt32(rangeX)), + y: Float(arc4random()%UInt32(rangeY)), + z: Float(arc4random()%UInt32(rangeZ))) +} + diff --git a/iOS12_Sampler/ios12 Sampler/Extensions/Simd3+Extension.swift b/iOS12_Sampler/ios12 Sampler/Extensions/Simd3+Extension.swift new file mode 100644 index 0000000..6fbc07c --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Extensions/Simd3+Extension.swift @@ -0,0 +1,88 @@ +// +// Simd3+Extension.swift +// ios12 Sampler +// +// Created by Dhruvil Vora on 04/03/24. +// Copyright © 2024 Testing. All rights reserved. +// + +import Foundation +import SceneKit + +extension SIMD3 { + + static var getSemiCircleMesh: [SIMD3] { + [ + [0.8, 0.1, 0], [0.79, 0.18, 0], [0.78, 0.26, 0], [0.75, 0.43, 0], [0.725, 0.53, 0], + [0.7, 0.6, 0], [0.65, 0.7, 0], [0.6, 0.78, 0], [0.55, 0.83, 0], [0.45, 0.9, 0], + [0.4, 0.92, 0], [0.3, 0.95, 0], [0.2, 0.98, 0], [0.1, 1, 0], [0, 1, 0], + [-0.1, 1, 0], [-0.2, 0.98, 0], [-0.3, 0.95, 0], [-0.4, 0.92, 0], [-0.45, 0.9, 0], + [-0.55, 0.83, 0], [-0.6, 0.78, 0], [-0.65, 0.7, 0], [-0.7, 0.6, 0], [-0.725, 0.53, 0], + [-0.75, 0.43, 0], [-0.78, 0.26, 0], [-0.79, 0.18, 0], [-0.8, 0.1, 0] + ] + } + + var length:Float { + get { + return sqrtf(x*x + y*y + z*z) + } + } + + static func dotProduct(v1: SIMD3, v2: SIMD3) -> Float{ + return v1.x*v2.x + v1.y*v2.y + v1.z*v2.z + } + + static func angleBetween(v1: SIMD3, v2: SIMD3) -> Float{ + let cosinus = dotProduct(v1: v1, v2: v2) / v1.length / v2.length + let angle = acos(cosinus) + return angle + } + + // Return the angle between this vector and the specified vector v + func angle(v: SIMD3) -> Float + { + // angle between 3d vectors P and Q is equal to the arc cos of their dot products over the product of + // their magnitudes (lengths). + // theta = arccos( (P • Q) / (|P||Q|) ) + let dp = dot(v) // dot product + let magProduct = length * v.length // product of lengths (magnitudes) + return acos(dp / magProduct) // DONE + } + + func dot(_ vec: SIMD3) -> Float { + return (self.x * vec.x) + (self.y * vec.y) + (self.z * vec.z) + } + + static func angleForArms(v1: SIMD3, v2: SIMD3) -> Float{ + let vectorFromShoulderToElbow = (v2 - v1) + // We can perform further calculations to get the angle from this vector + // For example, we can also use atan2 or acos to get the angle in radians or degrees + let angleInRadians = atan2(vectorFromShoulderToElbow.x, vectorFromShoulderToElbow.y) + let angleInDegrees = abs(angleInRadians.toDegree) + print("NEw angle :- ",angleInDegrees) + return angleInDegrees + } + + static func getAngleBetween(fromAngle: SIMD3, toAngle: SIMD3, forAngle: SIMD3) -> Float { + let vector_a = (fromAngle - forAngle) + let vector_b = (toAngle - forAngle) + let degres = SIMD3.angle(vector1: vector_a, vector2: vector_b) + return degres + } + + /// Calculates the **angles** between two vectors. + static func angle(vector1: SIMD3, vector2: SIMD3) -> Float{ + /// In order to find the angle between two vectors we can use the formula of dot product + /// + /// Step 1:- Now we need to find the magniotudes of both the vecitors + let mag_vector1 = powf(vector1.x, 2) + powf(vector1.y, 2) + powf(vector1.z, 2) // a^2 + b^2 + c^2 (helpful to calculate magnitude) + let mag_vector2 = powf(vector2.x, 2) + powf(vector2.y, 2) + powf(vector2.z, 2) // a^2 + b^2 + c^2 (helpful to calculate magnitude) + let magnitudeForVectors = sqrtf(mag_vector1) * sqrtf(mag_vector2) // Magnitude calculations + /// Step 2:- Need to find dot product on basis of two vectors + let dot_product = (vector1.x * vector2.x) + (vector1.y * vector2.y) + (vector1.z * vector2.z) // Dot product + /// Step 3:- Calculating the cosine rule for dot and magnitude + return acosf(dot_product / magnitudeForVectors).toDegree + } + +} + diff --git a/iOS12_Sampler/ios12 Sampler/Extensions/String+Extension.swift b/iOS12_Sampler/ios12 Sampler/Extensions/String+Extension.swift new file mode 100644 index 0000000..d30f2ab --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Extensions/String+Extension.swift @@ -0,0 +1,22 @@ +// +// String+Extension.swift +// ios12 Sampler +// +// Created by Dhruvil Vora on 26/03/24. +// Copyright © 2024 Testing. All rights reserved. +// + +import Foundation + +extension String { + var isJointFromLegOrhand: Bool { + ( + self == "left_upLeg_joint" || self == "left_leg_joint" || + self == "right_upLeg_joint" || self == "right_leg_joint" || + self == "left_arm_joint" || self == "left_forearm_joint" || + self == "right_arm_joint" || self == "right_forearm_joint" + /// TODO :- Need to find an accurate way to get foot joint +// || self == "left_foot_joint" || self == "right_foot_joint" + ) + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Face Detection with AR/ARFaceDetection.swift b/iOS12_Sampler/ios12 Sampler/Face Detection with AR/ARFaceDetection.swift new file mode 100644 index 0000000..9cae82a --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Face Detection with AR/ARFaceDetection.swift @@ -0,0 +1,171 @@ +// +// ARFaceDetection.swift +// ios12 Sampler +// +// Created by Dhruvil Vora on 01/05/24. +// Copyright © 2024 Testing. All rights reserved. +// + +import RealityKit +import Vision +import ARKit +// Comment + +class ARFaceDetection: UIViewController { + + // MARK: IBOutlets + @IBOutlet var arview: ARView! + @IBOutlet weak var circleVw: UIView! + @IBOutlet weak var modelCollectionView: UICollectionView! + + // MARK: Variables + var timer: Timer? + var modelWidth: Float = 0.0 + var deviceWidth: Float = 0.0 + var currentLoadedEntity: Entity! + var parentAnchorEntity: AnchorEntity? + var models: [String] = ["Neon", "Heart", "Star", "Swag", "Glasses", "Animoji", "Cyclops"] + + // MARK: ViewDidLoad + override func viewDidLoad() { + super.viewDidLoad() + setupARConfiguration() + setupCircleView() + setUpCollectionView() + loadModel(withModelIndex: 0) + } + + private func setupARConfiguration() { + let configuaration = ARFaceTrackingConfiguration() + configuaration.isLightEstimationEnabled = true + arview.session.run(configuaration, options: [.resetTracking, .removeExistingAnchors]) + arview.session.delegate = self + } + + private func setupCircleView() { + deviceWidth = Float(UIScreen.main.bounds.size.width) + 100 + circleVw.layer.cornerRadius = (circleVw.layer.frame.width / 2) + circleVw.layer.borderWidth = 10 + circleVw.layer.borderColor = UIColor.green.cgColor + } + + private func setUpCollectionView() { + let layout: UICollectionViewFlowLayout = UICollectionViewFlowLayout() + layout.sectionInset = UIEdgeInsets(top: 0, left: ((UIScreen.main.bounds.width) / 2) - 50, + bottom: 0, right: ((UIScreen.main.bounds.width) / 2) - 50) + layout.scrollDirection = .horizontal + modelCollectionView.collectionViewLayout = layout + modelCollectionView.delegate = self + modelCollectionView.dataSource = self + modelCollectionView.reloadData() + } + + // Calculate the scaling of model using sertain params + private func newCalculateScalingModel(modelWidth: Float) -> Float { + // Convert Models width to points + let modelWidthInPts = modelWidth.toPoints + // Then need to divide device width with the converted width in points + // to get the ratio + let convertedRatio = deviceWidth/modelWidthInPts + return convertedRatio + } + + private func loadModel(withModelIndex index: Int) { + // Need to remove existing model if any is placed in ARView + parentAnchorEntity?.removeChild(currentLoadedEntity) + // load the model + let entity = try! ModelEntity.load(named: models[index]) + // Get the bounds of loaded model + let entityBounds = entity.visualBounds(relativeTo: nil) + // Get the width of model from bounding box of entity + modelWidth = entityBounds.extents.x + currentLoadedEntity = entity + // Calculate and scale the model + let scaledModelCalculation = newCalculateScalingModel(modelWidth: modelWidth) + entity.setScale([scaledModelCalculation, scaledModelCalculation, scaledModelCalculation], relativeTo: nil) + + // create a AnchorEntity tracking face + if let anchorEntity = parentAnchorEntity { + parentAnchorEntity = anchorEntity + } else { + parentAnchorEntity = AnchorEntity(.face) + } + parentAnchorEntity?.addChild(entity) + arview.scene.anchors.append(parentAnchorEntity!) + } +} + +// MARK: UIScrollViewDelegate +extension ARFaceDetection { + func scrollViewDidEndDragging(_ scrollView: UIScrollView, willDecelerate decelerate: Bool) { + if !decelerate { + stopScrolling(scrollView: scrollView) + } + } + + func scrollViewDidEndDecelerating(_ scrollView: UIScrollView) { + stopScrolling(scrollView: scrollView) + } + + private func stopScrolling(scrollView: UIScrollView) { + let modularCount = (Float(scrollView.contentOffset.x) / 95) + guard Int(round(modularCount)) <= models.count else { return } + scrollAndLoadModel(with: Int(modularCount)) + } + + private func scrollAndLoadModel(with indexToScroll: Int) { + modelCollectionView.scrollToItem(at: IndexPath(item: indexToScroll, section: 0), at: .centeredHorizontally, animated: true) + loadModel(withModelIndex: indexToScroll) + } +} + +// MARK: ARSessionDelegate +extension ARFaceDetection: ARSessionDelegate { + func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) { + for anchor in anchors { + // Track anchor of Face only & For Animoji only + guard let anchor = anchor as? ARFaceAnchor, currentLoadedEntity.findEntity(named: "Animoji") != nil else { continue } + // Get Eyebrows and Jaws displacement value + guard let browOuterUpLeftShape = anchor.blendShapes[.browOuterUpLeft]?.floatValue, + let browOuterUpRightShape = anchor.blendShapes[.browOuterUpRight]?.floatValue, + let jawShape = anchor.blendShapes[.jawOpen]?.floatValue else { continue } + + // Then find that particular entity from Animoji model + guard let leftBrowEntity = currentLoadedEntity?.findEntity(named: "left_eyebrow"), + let rightBrowEntity = currentLoadedEntity?.findEntity(named: "right_eyebrow"), + let jawEntity = currentLoadedEntity?.findEntity(named: "jaw") else { continue } + + // Change the position of that entity realtime to reflect user's expression + leftBrowEntity.position.y = 0.082 + (browOuterUpLeftShape / 10) + rightBrowEntity.position.y = 0.082 + (browOuterUpRightShape / 10) + jawEntity.position.y = -0.067 - (jawShape / 10) + } + } +} + +// MARK: UICollectionViewDataSource +extension ARFaceDetection: UICollectionViewDataSource { + func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int { + return models.count + } + + func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell { + guard let collectionViewCell: ModelCollectionCell = collectionView.dequeueReusableCell(withReuseIdentifier: "ModelCollectionCell", for: indexPath) as? ModelCollectionCell else { return UICollectionViewCell() } + collectionViewCell.configureCell(indexPath: indexPath.item) + return collectionViewCell + } +} + +// MARK: UICollectionViewDelegate +extension ARFaceDetection: UICollectionViewDelegate { + func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) { + scrollAndLoadModel(with: indexPath.item) + } +} + +// MARK: UICollectionViewDelegateFlowLayout +extension ARFaceDetection: UICollectionViewDelegateFlowLayout { + func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, sizeForItemAt indexPath: IndexPath) -> CGSize { + CGSize(width: 100, height: 100) + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Face Detection with AR/Cell/ModelCollectionCell.swift b/iOS12_Sampler/ios12 Sampler/Face Detection with AR/Cell/ModelCollectionCell.swift new file mode 100644 index 0000000..94bee33 --- /dev/null +++ b/iOS12_Sampler/ios12 Sampler/Face Detection with AR/Cell/ModelCollectionCell.swift @@ -0,0 +1,21 @@ +// +// ModelCollectionCell.swift +// ios12 Sampler +// +// Created by Dhruvil Vora on 01/05/24. +// Copyright © 2024 Testing. All rights reserved. +// + +import UIKit + +class ModelCollectionCell: UICollectionViewCell { + + var modelImgs: [String] = ["Neon", "Heart", "Star", "Swag", "Glasses", "Robo", "Cyclops"] + + @IBOutlet weak var modelImgVw: UIImageView! + + func configureCell(indexPath: Int) { + modelImgVw.image = UIImage(named: modelImgs[indexPath]) + modelImgVw.layer.cornerRadius = (modelImgVw.layer.frame.width / 2) + } +} diff --git a/iOS12_Sampler/ios12 Sampler/Tracking and altering images/Utilities/ImageTrackingUtility.swift b/iOS12_Sampler/ios12 Sampler/Tracking and altering images/Utilities/ImageTrackingUtility.swift index d051083..07f0289 100644 --- a/iOS12_Sampler/ios12 Sampler/Tracking and altering images/Utilities/ImageTrackingUtility.swift +++ b/iOS12_Sampler/ios12 Sampler/Tracking and altering images/Utilities/ImageTrackingUtility.swift @@ -91,7 +91,7 @@ func createPlaneNode(size: CGSize, rotation: Float, content: Any?) -> SCNNode { } func getFilterData() -> [FilterModel] { - return [FilterModel(filterDummyImage: UIImage(named: "random")!, filterName: "Random", isSelected: true, selectedFilterStyle: .randomStyle), + return [FilterModel(filterDummyImage: UIImage(named: "Random")!, filterName: "Random", isSelected: true, selectedFilterStyle: .randomStyle), FilterModel(filterDummyImage: UIImage(named: "style1")!, filterName: "Style1", isSelected: false), FilterModel(filterDummyImage: UIImage(named: "style2")!, filterName: "Style2", isSelected: false), FilterModel(filterDummyImage: UIImage(named: "style3")!, filterName: "Style3", isSelected: false), diff --git a/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Animoji.usdz b/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Animoji.usdz new file mode 100644 index 0000000..470c45d Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Animoji.usdz differ diff --git a/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Cyclops.usdz b/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Cyclops.usdz new file mode 100644 index 0000000..32caf7f Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Cyclops.usdz differ diff --git a/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Glasses.usdz b/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Glasses.usdz new file mode 100644 index 0000000..64ca484 Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Glasses.usdz differ diff --git a/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Heart.usdz b/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Heart.usdz new file mode 100644 index 0000000..909ceeb Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Heart.usdz differ diff --git a/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Metal_Round_Glasses.usdz b/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Metal_Round_Glasses.usdz new file mode 100644 index 0000000..f7d5cc9 Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Metal_Round_Glasses.usdz differ diff --git a/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Neon.usdz b/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Neon.usdz new file mode 100644 index 0000000..6463af4 Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Neon.usdz differ diff --git a/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Star.usdz b/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Star.usdz new file mode 100644 index 0000000..b2049de Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Star.usdz differ diff --git a/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Swag.usdz b/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Swag.usdz new file mode 100644 index 0000000..a859572 Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/USDZ/Face Models/Swag.usdz differ diff --git a/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/80s_Sunglasses_that_look_litt_it_you_ask_me.usdz b/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/80s_Sunglasses_that_look_litt_it_you_ask_me.usdz new file mode 100644 index 0000000..986917a Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/80s_Sunglasses_that_look_litt_it_you_ask_me.usdz differ diff --git a/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Cheese_Sunglasses.usdz b/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Cheese_Sunglasses.usdz new file mode 100644 index 0000000..b69099c Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Cheese_Sunglasses.usdz differ diff --git a/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Cyclops_sunglasses.usdz b/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Cyclops_sunglasses.usdz new file mode 100644 index 0000000..facfbe9 Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Cyclops_sunglasses.usdz differ diff --git a/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Deal_With_It_Sunglasses.usdz b/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Deal_With_It_Sunglasses.usdz new file mode 100644 index 0000000..5f3ad52 Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Deal_With_It_Sunglasses.usdz differ diff --git a/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Montgomery_Gators_Star_Sunglasses.usdz b/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Montgomery_Gators_Star_Sunglasses.usdz new file mode 100644 index 0000000..1f357f0 Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Montgomery_Gators_Star_Sunglasses.usdz differ diff --git a/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Neon_Party_Glasses.usdz b/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Neon_Party_Glasses.usdz new file mode 100644 index 0000000..6797f36 Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Neon_Party_Glasses.usdz differ diff --git a/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Occhiale_Goccia__Sunglasses.usdz b/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Occhiale_Goccia__Sunglasses.usdz new file mode 100644 index 0000000..6fb165f Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Occhiale_Goccia__Sunglasses.usdz differ diff --git a/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Party_Glasses_Heart_type.usdz b/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Party_Glasses_Heart_type.usdz new file mode 100644 index 0000000..dd50a13 Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Party_Glasses_Heart_type.usdz differ diff --git a/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Sunglasses.usdz b/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Sunglasses.usdz new file mode 100644 index 0000000..8073135 Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/USDZ/Glasses/Sunglasses.usdz differ diff --git a/iOS12_Sampler/ios12 Sampler/USDZ/biped_robot.usdz b/iOS12_Sampler/ios12 Sampler/USDZ/biped_robot.usdz new file mode 100644 index 0000000..a4ffb6b Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/USDZ/biped_robot.usdz differ diff --git a/iOS12_Sampler/ios12 Sampler/USDZ/robot_walk_idle.usdz b/iOS12_Sampler/ios12 Sampler/USDZ/robot_walk_idle.usdz new file mode 100644 index 0000000..1c6b1c0 Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/USDZ/robot_walk_idle.usdz differ diff --git a/iOS12_Sampler/ios12 Sampler/art.scnassets/robot_walk_idle.scn b/iOS12_Sampler/ios12 Sampler/art.scnassets/robot_walk_idle.scn new file mode 100644 index 0000000..d8b784b Binary files /dev/null and b/iOS12_Sampler/ios12 Sampler/art.scnassets/robot_walk_idle.scn differ diff --git a/iOS12_Sampler/ios12 Sampler/art.scnassets/ship.scn b/iOS12_Sampler/ios12 Sampler/art.scnassets/ship.scn index 89cc818..5539d8a 100644 Binary files a/iOS12_Sampler/ios12 Sampler/art.scnassets/ship.scn and b/iOS12_Sampler/ios12 Sampler/art.scnassets/ship.scn differ