diff --git a/examples/face_landmarker/ios/FaceLandmarker.xcodeproj/project.pbxproj b/examples/face_landmarker/ios/FaceLandmarker.xcodeproj/project.pbxproj new file mode 100644 index 00000000..5b5cd6ba --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker.xcodeproj/project.pbxproj @@ -0,0 +1,753 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 56; + objects = { + +/* Begin PBXBuildFile section */ + BFD91ED62A74F2F300FD4649 /* face_landmarker.task in Resources */ = {isa = PBXBuildFile; fileRef = BFD91ED52A74F2F300FD4649 /* face_landmarker.task */; }; + BFD91ED92A7889DF00FD4649 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = BFD91ED82A7889DF00FD4649 /* Assets.xcassets */; }; + BFEE41EB2A45351E00BA1A35 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = BFEE41EA2A45351E00BA1A35 /* AppDelegate.swift */; }; + BFEE41ED2A45351E00BA1A35 /* SceneDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = BFEE41EC2A45351E00BA1A35 /* SceneDelegate.swift */; }; + BFEE41F72A45351F00BA1A35 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = BFEE41F52A45351F00BA1A35 /* LaunchScreen.storyboard */; }; + BFEE420C2A45351F00BA1A35 /* FaceLandmarkerUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BFEE420B2A45351F00BA1A35 /* FaceLandmarkerUITests.swift */; }; + BFEE420E2A45351F00BA1A35 /* FaceLandmarkerUITestsLaunchTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BFEE420D2A45351F00BA1A35 /* FaceLandmarkerUITestsLaunchTests.swift */; }; + BFEE42202A45394C00BA1A35 /* FaceLandmarkerHelper.swift in Sources */ = {isa = PBXBuildFile; fileRef = BFEE421B2A45394C00BA1A35 /* FaceLandmarkerHelper.swift */; }; + BFEE42212A45394C00BA1A35 /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BFEE421C2A45394C00BA1A35 /* PreviewView.swift */; }; + BFEE42222A45394C00BA1A35 /* OverlayView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BFEE421D2A45394C00BA1A35 /* OverlayView.swift */; }; + BFEE42232A45394C00BA1A35 /* CameraFeedManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = BFEE421E2A45394C00BA1A35 /* CameraFeedManager.swift */; }; + BFEE42282A45395400BA1A35 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = BFEE42262A45395400BA1A35 /* ViewController.swift */; }; + BFEE42292A45395400BA1A35 /* InferenceViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = BFEE42272A45395400BA1A35 /* InferenceViewController.swift */; }; + BFEE42302A4542B400BA1A35 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = BFEE422E2A4542B400BA1A35 /* Main.storyboard */; }; + CC9DBE1CC48C4DB446229F58 /* Pods_FaceLandmarker.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = BEB757D4516FCE33E7F93E0B /* Pods_FaceLandmarker.framework */; }; + CDB9E639CD84F1DEE7F622D5 /* libPods-FaceLandmarkerTests.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 5DC907E142B2A4A5ED6AD68C /* libPods-FaceLandmarkerTests.a */; }; +/* End PBXBuildFile section */ + +/* Begin PBXContainerItemProxy section */ + BFEE41FE2A45351F00BA1A35 /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = BFEE41DF2A45351E00BA1A35 /* Project object */; + proxyType = 1; + remoteGlobalIDString = BFEE41E62A45351E00BA1A35; + remoteInfo = FaceLandmarker; + }; + BFEE42082A45351F00BA1A35 /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = BFEE41DF2A45351E00BA1A35 /* Project object */; + proxyType = 1; + remoteGlobalIDString = BFEE41E62A45351E00BA1A35; + remoteInfo = FaceLandmarker; + }; +/* End PBXContainerItemProxy section */ + +/* Begin PBXFileReference section */ + 40B88274C9701B9197F0726E /* Pods-FaceLandmarkerTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-FaceLandmarkerTests.release.xcconfig"; path = "Target Support Files/Pods-FaceLandmarkerTests/Pods-FaceLandmarkerTests.release.xcconfig"; sourceTree = ""; }; + 5DC907E142B2A4A5ED6AD68C /* libPods-FaceLandmarkerTests.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-FaceLandmarkerTests.a"; sourceTree = BUILT_PRODUCTS_DIR; }; + 7D098A775C42220DE7F02EC0 /* Pods-FaceLandmarkerTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-FaceLandmarkerTests.debug.xcconfig"; path = "Target Support Files/Pods-FaceLandmarkerTests/Pods-FaceLandmarkerTests.debug.xcconfig"; sourceTree = ""; }; + BEB757D4516FCE33E7F93E0B /* Pods_FaceLandmarker.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_FaceLandmarker.framework; sourceTree = BUILT_PRODUCTS_DIR; }; + BFD91ED52A74F2F300FD4649 /* face_landmarker.task */ = {isa = PBXFileReference; lastKnownFileType = file; path = face_landmarker.task; sourceTree = ""; }; + BFD91ED82A7889DF00FD4649 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + BFEE41E72A45351E00BA1A35 /* FaceLandmarker.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = FaceLandmarker.app; sourceTree = BUILT_PRODUCTS_DIR; }; + BFEE41EA2A45351E00BA1A35 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; + BFEE41EC2A45351E00BA1A35 /* SceneDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SceneDelegate.swift; sourceTree = ""; }; + BFEE41F62A45351F00BA1A35 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; + BFEE41F82A45351F00BA1A35 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; + BFEE41FD2A45351F00BA1A35 /* FaceLandmarkerTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = FaceLandmarkerTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; + BFEE42072A45351F00BA1A35 /* FaceLandmarkerUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = FaceLandmarkerUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; + BFEE420B2A45351F00BA1A35 /* FaceLandmarkerUITests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceLandmarkerUITests.swift; sourceTree = ""; }; + BFEE420D2A45351F00BA1A35 /* FaceLandmarkerUITestsLaunchTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceLandmarkerUITestsLaunchTests.swift; sourceTree = ""; }; + BFEE421B2A45394C00BA1A35 /* FaceLandmarkerHelper.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FaceLandmarkerHelper.swift; sourceTree = ""; }; + BFEE421C2A45394C00BA1A35 /* PreviewView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = ""; }; + BFEE421D2A45394C00BA1A35 /* OverlayView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = OverlayView.swift; sourceTree = ""; }; + BFEE421E2A45394C00BA1A35 /* CameraFeedManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraFeedManager.swift; sourceTree = ""; }; + BFEE42262A45395400BA1A35 /* ViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; }; + BFEE42272A45395400BA1A35 /* InferenceViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = InferenceViewController.swift; sourceTree = ""; }; + BFEE422F2A4542B400BA1A35 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; + E926CBA36E9A76BEFC7FB7DF /* Pods-FaceLandmarker.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-FaceLandmarker.release.xcconfig"; path = "Target Support Files/Pods-FaceLandmarker/Pods-FaceLandmarker.release.xcconfig"; sourceTree = ""; }; + FD9F1289B507CF7CF326D7DB /* Pods-FaceLandmarker.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-FaceLandmarker.debug.xcconfig"; path = "Target Support Files/Pods-FaceLandmarker/Pods-FaceLandmarker.debug.xcconfig"; sourceTree = ""; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + BFEE41E42A45351E00BA1A35 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + CC9DBE1CC48C4DB446229F58 /* Pods_FaceLandmarker.framework in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; + BFEE41FA2A45351F00BA1A35 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + CDB9E639CD84F1DEE7F622D5 /* libPods-FaceLandmarkerTests.a in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; + BFEE42042A45351F00BA1A35 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + 227AD33CCBBBA0C0E45E8889 /* Pods */ = { + isa = PBXGroup; + children = ( + FD9F1289B507CF7CF326D7DB /* Pods-FaceLandmarker.debug.xcconfig */, + E926CBA36E9A76BEFC7FB7DF /* Pods-FaceLandmarker.release.xcconfig */, + 7D098A775C42220DE7F02EC0 /* Pods-FaceLandmarkerTests.debug.xcconfig */, + 40B88274C9701B9197F0726E /* Pods-FaceLandmarkerTests.release.xcconfig */, + ); + path = Pods; + sourceTree = ""; + }; + 287163CB4E8FDF046324D157 /* Frameworks */ = { + isa = PBXGroup; + children = ( + BEB757D4516FCE33E7F93E0B /* Pods_FaceLandmarker.framework */, + 5DC907E142B2A4A5ED6AD68C /* libPods-FaceLandmarkerTests.a */, + ); + name = Frameworks; + sourceTree = ""; + }; + BFEE41DE2A45351E00BA1A35 = { + isa = PBXGroup; + children = ( + BFEE41E92A45351E00BA1A35 /* FaceLandmarker */, + BFEE42002A45351F00BA1A35 /* FaceLandmarkerTests */, + BFEE420A2A45351F00BA1A35 /* FaceLandmarkerUITests */, + BFEE41E82A45351E00BA1A35 /* Products */, + 227AD33CCBBBA0C0E45E8889 /* Pods */, + 287163CB4E8FDF046324D157 /* Frameworks */, + ); + sourceTree = ""; + }; + BFEE41E82A45351E00BA1A35 /* Products */ = { + isa = PBXGroup; + children = ( + BFEE41E72A45351E00BA1A35 /* FaceLandmarker.app */, + BFEE41FD2A45351F00BA1A35 /* FaceLandmarkerTests.xctest */, + BFEE42072A45351F00BA1A35 /* FaceLandmarkerUITests.xctest */, + ); + name = Products; + sourceTree = ""; + }; + BFEE41E92A45351E00BA1A35 /* FaceLandmarker */ = { + isa = PBXGroup; + children = ( + BFD91ED52A74F2F300FD4649 /* face_landmarker.task */, + BFEE42252A45395400BA1A35 /* ViewContoller */, + BFEE421A2A45394C00BA1A35 /* Helper */, + BFEE41EA2A45351E00BA1A35 /* AppDelegate.swift */, + BFEE41EC2A45351E00BA1A35 /* SceneDelegate.swift */, + BFEE41F52A45351F00BA1A35 /* LaunchScreen.storyboard */, + BFD91ED82A7889DF00FD4649 /* Assets.xcassets */, + BFEE422E2A4542B400BA1A35 /* Main.storyboard */, + BFEE41F82A45351F00BA1A35 /* Info.plist */, + ); + path = FaceLandmarker; + sourceTree = ""; + }; + BFEE42002A45351F00BA1A35 /* FaceLandmarkerTests */ = { + isa = PBXGroup; + children = ( + ); + path = FaceLandmarkerTests; + sourceTree = ""; + }; + BFEE420A2A45351F00BA1A35 /* FaceLandmarkerUITests */ = { + isa = PBXGroup; + children = ( + BFEE420B2A45351F00BA1A35 /* FaceLandmarkerUITests.swift */, + BFEE420D2A45351F00BA1A35 /* FaceLandmarkerUITestsLaunchTests.swift */, + ); + path = FaceLandmarkerUITests; + sourceTree = ""; + }; + BFEE421A2A45394C00BA1A35 /* Helper */ = { + isa = PBXGroup; + children = ( + BFEE421B2A45394C00BA1A35 /* FaceLandmarkerHelper.swift */, + BFEE421C2A45394C00BA1A35 /* PreviewView.swift */, + BFEE421D2A45394C00BA1A35 /* OverlayView.swift */, + BFEE421E2A45394C00BA1A35 /* CameraFeedManager.swift */, + ); + path = Helper; + sourceTree = ""; + }; + BFEE42252A45395400BA1A35 /* ViewContoller */ = { + isa = PBXGroup; + children = ( + BFEE42262A45395400BA1A35 /* ViewController.swift */, + BFEE42272A45395400BA1A35 /* InferenceViewController.swift */, + ); + path = ViewContoller; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + BFEE41E62A45351E00BA1A35 /* FaceLandmarker */ = { + isa = PBXNativeTarget; + buildConfigurationList = BFEE42112A45351F00BA1A35 /* Build configuration list for PBXNativeTarget "FaceLandmarker" */; + buildPhases = ( + F01E952EE51699A6F761D123 /* [CP] Check Pods Manifest.lock */, + BFD91ED72A74F31600FD4649 /* Download models */, + BFEE41E32A45351E00BA1A35 /* Sources */, + BFEE41E42A45351E00BA1A35 /* Frameworks */, + BFEE41E52A45351E00BA1A35 /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = FaceLandmarker; + productName = FaceLandmarker; + productReference = BFEE41E72A45351E00BA1A35 /* FaceLandmarker.app */; + productType = "com.apple.product-type.application"; + }; + BFEE41FC2A45351F00BA1A35 /* FaceLandmarkerTests */ = { + isa = PBXNativeTarget; + buildConfigurationList = BFEE42142A45351F00BA1A35 /* Build configuration list for PBXNativeTarget "FaceLandmarkerTests" */; + buildPhases = ( + 8262D92BE4902008C2E5E89A /* [CP] Check Pods Manifest.lock */, + BFEE41F92A45351F00BA1A35 /* Sources */, + BFEE41FA2A45351F00BA1A35 /* Frameworks */, + BFEE41FB2A45351F00BA1A35 /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + BFEE41FF2A45351F00BA1A35 /* PBXTargetDependency */, + ); + name = FaceLandmarkerTests; + productName = FaceLandmarkerTests; + productReference = BFEE41FD2A45351F00BA1A35 /* FaceLandmarkerTests.xctest */; + productType = "com.apple.product-type.bundle.unit-test"; + }; + BFEE42062A45351F00BA1A35 /* FaceLandmarkerUITests */ = { + isa = PBXNativeTarget; + buildConfigurationList = BFEE42172A45351F00BA1A35 /* Build configuration list for PBXNativeTarget "FaceLandmarkerUITests" */; + buildPhases = ( + BFEE42032A45351F00BA1A35 /* Sources */, + BFEE42042A45351F00BA1A35 /* Frameworks */, + BFEE42052A45351F00BA1A35 /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + BFEE42092A45351F00BA1A35 /* PBXTargetDependency */, + ); + name = FaceLandmarkerUITests; + productName = FaceLandmarkerUITests; + productReference = BFEE42072A45351F00BA1A35 /* FaceLandmarkerUITests.xctest */; + productType = "com.apple.product-type.bundle.ui-testing"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + BFEE41DF2A45351E00BA1A35 /* Project object */ = { + isa = PBXProject; + attributes = { + BuildIndependentTargetsInParallel = 1; + LastSwiftUpdateCheck = 1430; + LastUpgradeCheck = 1430; + TargetAttributes = { + BFEE41E62A45351E00BA1A35 = { + CreatedOnToolsVersion = 14.3; + }; + BFEE41FC2A45351F00BA1A35 = { + CreatedOnToolsVersion = 14.3; + TestTargetID = BFEE41E62A45351E00BA1A35; + }; + BFEE42062A45351F00BA1A35 = { + CreatedOnToolsVersion = 14.3; + TestTargetID = BFEE41E62A45351E00BA1A35; + }; + }; + }; + buildConfigurationList = BFEE41E22A45351E00BA1A35 /* Build configuration list for PBXProject "FaceLandmarker" */; + compatibilityVersion = "Xcode 14.0"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = BFEE41DE2A45351E00BA1A35; + productRefGroup = BFEE41E82A45351E00BA1A35 /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + BFEE41E62A45351E00BA1A35 /* FaceLandmarker */, + BFEE41FC2A45351F00BA1A35 /* FaceLandmarkerTests */, + BFEE42062A45351F00BA1A35 /* FaceLandmarkerUITests */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXResourcesBuildPhase section */ + BFEE41E52A45351E00BA1A35 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + BFD91ED62A74F2F300FD4649 /* face_landmarker.task in Resources */, + BFEE42302A4542B400BA1A35 /* Main.storyboard in Resources */, + BFD91ED92A7889DF00FD4649 /* Assets.xcassets in Resources */, + BFEE41F72A45351F00BA1A35 /* LaunchScreen.storyboard in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; + BFEE41FB2A45351F00BA1A35 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + BFEE42052A45351F00BA1A35 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXShellScriptBuildPhase section */ + 8262D92BE4902008C2E5E89A /* [CP] Check Pods Manifest.lock */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + ); + inputPaths = ( + "${PODS_PODFILE_DIR_PATH}/Podfile.lock", + "${PODS_ROOT}/Manifest.lock", + ); + name = "[CP] Check Pods Manifest.lock"; + outputFileListPaths = ( + ); + outputPaths = ( + "$(DERIVED_FILE_DIR)/Pods-FaceLandmarkerTests-checkManifestLockResult.txt", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; + showEnvVarsInLog = 0; + }; + BFD91ED72A74F31600FD4649 /* Download models */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + ); + inputPaths = ( + ); + name = "Download models"; + outputFileListPaths = ( + ); + outputPaths = ( + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"$SRCROOT/RunScripts/download_models.sh\"\n"; + }; + F01E952EE51699A6F761D123 /* [CP] Check Pods Manifest.lock */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + ); + inputPaths = ( + "${PODS_PODFILE_DIR_PATH}/Podfile.lock", + "${PODS_ROOT}/Manifest.lock", + ); + name = "[CP] Check Pods Manifest.lock"; + outputFileListPaths = ( + ); + outputPaths = ( + "$(DERIVED_FILE_DIR)/Pods-FaceLandmarker-checkManifestLockResult.txt", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; + showEnvVarsInLog = 0; + }; +/* End PBXShellScriptBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + BFEE41E32A45351E00BA1A35 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + BFEE42212A45394C00BA1A35 /* PreviewView.swift in Sources */, + BFEE42282A45395400BA1A35 /* ViewController.swift in Sources */, + BFEE42232A45394C00BA1A35 /* CameraFeedManager.swift in Sources */, + BFEE41EB2A45351E00BA1A35 /* AppDelegate.swift in Sources */, + BFEE42292A45395400BA1A35 /* InferenceViewController.swift in Sources */, + BFEE42222A45394C00BA1A35 /* OverlayView.swift in Sources */, + BFEE42202A45394C00BA1A35 /* FaceLandmarkerHelper.swift in Sources */, + BFEE41ED2A45351E00BA1A35 /* SceneDelegate.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; + BFEE41F92A45351F00BA1A35 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + BFEE42032A45351F00BA1A35 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + BFEE420E2A45351F00BA1A35 /* FaceLandmarkerUITestsLaunchTests.swift in Sources */, + BFEE420C2A45351F00BA1A35 /* FaceLandmarkerUITests.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin PBXTargetDependency section */ + BFEE41FF2A45351F00BA1A35 /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = BFEE41E62A45351E00BA1A35 /* FaceLandmarker */; + targetProxy = BFEE41FE2A45351F00BA1A35 /* PBXContainerItemProxy */; + }; + BFEE42092A45351F00BA1A35 /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = BFEE41E62A45351E00BA1A35 /* FaceLandmarker */; + targetProxy = BFEE42082A45351F00BA1A35 /* PBXContainerItemProxy */; + }; +/* End PBXTargetDependency section */ + +/* Begin PBXVariantGroup section */ + BFEE41F52A45351F00BA1A35 /* LaunchScreen.storyboard */ = { + isa = PBXVariantGroup; + children = ( + BFEE41F62A45351F00BA1A35 /* Base */, + ); + name = LaunchScreen.storyboard; + sourceTree = ""; + }; + BFEE422E2A4542B400BA1A35 /* Main.storyboard */ = { + isa = PBXVariantGroup; + children = ( + BFEE422F2A4542B400BA1A35 /* Base */, + ); + name = Main.storyboard; + sourceTree = ""; + }; +/* End PBXVariantGroup section */ + +/* Begin XCBuildConfiguration section */ + BFEE420F2A45351F00BA1A35 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 15.0; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = iphoneos; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + }; + name = Debug; + }; + BFEE42102A45351F00BA1A35 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 15.0; + MTL_ENABLE_DEBUG_INFO = NO; + MTL_FAST_MATH = YES; + SDKROOT = iphoneos; + SWIFT_COMPILATION_MODE = wholemodule; + SWIFT_OPTIMIZATION_LEVEL = "-O"; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + BFEE42122A45351F00BA1A35 /* Debug */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = FD9F1289B507CF7CF326D7DB /* Pods-FaceLandmarker.debug.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CODE_SIGN_IDENTITY = "Apple Development"; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = ""; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_FILE = FaceLandmarker/Info.plist; + INFOPLIST_KEY_NSCameraUsageDescription = "This app uses camera to get face landmarks that appear in the camera feed."; + INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; + INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen; + INFOPLIST_KEY_UIMainStoryboardFile = Main; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UIUserInterfaceStyle = Light; + IPHONEOS_DEPLOYMENT_TARGET = 15; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = com.google.mediapipe.examples.facelandmarker; + PRODUCT_NAME = "$(TARGET_NAME)"; + PROVISIONING_PROFILE_SPECIFIER = ""; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + BFEE42132A45351F00BA1A35 /* Release */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = E926CBA36E9A76BEFC7FB7DF /* Pods-FaceLandmarker.release.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CODE_SIGN_IDENTITY = "Apple Development"; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = ""; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_FILE = FaceLandmarker/Info.plist; + INFOPLIST_KEY_NSCameraUsageDescription = "This app uses camera to get face landmarks that appear in the camera feed."; + INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; + INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen; + INFOPLIST_KEY_UIMainStoryboardFile = Main; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UIUserInterfaceStyle = Light; + IPHONEOS_DEPLOYMENT_TARGET = 15; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = com.google.mediapipe.examples.facelandmarker; + PRODUCT_NAME = "$(TARGET_NAME)"; + PROVISIONING_PROFILE_SPECIFIER = ""; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Release; + }; + BFEE42152A45351F00BA1A35 /* Debug */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 7D098A775C42220DE7F02EC0 /* Pods-FaceLandmarkerTests.debug.xcconfig */; + buildSettings = { + ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; + BUNDLE_LOADER = "$(TEST_HOST)"; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + GENERATE_INFOPLIST_FILE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 16.4; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = com.example.FaceLandmarkerTests; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_EMIT_LOC_STRINGS = NO; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/FaceLandmarker.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/FaceLandmarker"; + }; + name = Debug; + }; + BFEE42162A45351F00BA1A35 /* Release */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 40B88274C9701B9197F0726E /* Pods-FaceLandmarkerTests.release.xcconfig */; + buildSettings = { + ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; + BUNDLE_LOADER = "$(TEST_HOST)"; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + GENERATE_INFOPLIST_FILE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 16.4; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = com.example.FaceLandmarkerTests; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_EMIT_LOC_STRINGS = NO; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/FaceLandmarker.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/FaceLandmarker"; + }; + name = Release; + }; + BFEE42182A45351F00BA1A35 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + GENERATE_INFOPLIST_FILE = YES; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = com.example.FaceLandmarkerUITests; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_EMIT_LOC_STRINGS = NO; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + TEST_TARGET_NAME = FaceLandmarker; + }; + name = Debug; + }; + BFEE42192A45351F00BA1A35 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + GENERATE_INFOPLIST_FILE = YES; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = com.example.FaceLandmarkerUITests; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_EMIT_LOC_STRINGS = NO; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + TEST_TARGET_NAME = FaceLandmarker; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + BFEE41E22A45351E00BA1A35 /* Build configuration list for PBXProject "FaceLandmarker" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + BFEE420F2A45351F00BA1A35 /* Debug */, + BFEE42102A45351F00BA1A35 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + BFEE42112A45351F00BA1A35 /* Build configuration list for PBXNativeTarget "FaceLandmarker" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + BFEE42122A45351F00BA1A35 /* Debug */, + BFEE42132A45351F00BA1A35 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + BFEE42142A45351F00BA1A35 /* Build configuration list for PBXNativeTarget "FaceLandmarkerTests" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + BFEE42152A45351F00BA1A35 /* Debug */, + BFEE42162A45351F00BA1A35 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + BFEE42172A45351F00BA1A35 /* Build configuration list for PBXNativeTarget "FaceLandmarkerUITests" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + BFEE42182A45351F00BA1A35 /* Debug */, + BFEE42192A45351F00BA1A35 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = BFEE41DF2A45351E00BA1A35 /* Project object */; +} diff --git a/examples/face_landmarker/ios/FaceLandmarker/AppDelegate.swift b/examples/face_landmarker/ios/FaceLandmarker/AppDelegate.swift new file mode 100644 index 00000000..0fa39757 --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker/AppDelegate.swift @@ -0,0 +1,43 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import UIKit + +@main +class AppDelegate: UIResponder, UIApplicationDelegate { + + + + func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { + // Override point for customization after application launch. + return true + } + + // MARK: UISceneSession Lifecycle + + func application(_ application: UIApplication, configurationForConnecting connectingSceneSession: UISceneSession, options: UIScene.ConnectionOptions) -> UISceneConfiguration { + // Called when a new scene session is being created. + // Use this method to select a configuration to create the new scene with. + return UISceneConfiguration(name: "Default Configuration", sessionRole: connectingSceneSession.role) + } + + func application(_ application: UIApplication, didDiscardSceneSessions sceneSessions: Set) { + // Called when the user discards a scene session. + // If any sessions were discarded while the application was not running, this will be called shortly after application:didFinishLaunchingWithOptions. + // Use this method to release any resources that were specific to the discarded scenes, as they will not return. + } + + +} + diff --git a/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/AccentColor.colorset/Contents.json b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/AccentColor.colorset/Contents.json new file mode 100644 index 00000000..eb878970 --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/AccentColor.colorset/Contents.json @@ -0,0 +1,11 @@ +{ + "colors" : [ + { + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/AppIcon.appiconset/Contents.json b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 00000000..e730c9ca --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,14 @@ +{ + "images" : [ + { + "filename" : "ic_launcher_1024.png", + "idiom" : "universal", + "platform" : "ios", + "size" : "1024x1024" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/AppIcon.appiconset/ic_launcher_1024.png b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/AppIcon.appiconset/ic_launcher_1024.png new file mode 100644 index 00000000..bcda9bb5 Binary files /dev/null and b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/AppIcon.appiconset/ic_launcher_1024.png differ diff --git a/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/Contents.json b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/Contents.json new file mode 100644 index 00000000..73c00596 --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_add.imageset/Contents.json b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_add.imageset/Contents.json new file mode 100644 index 00000000..283b7ca6 --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_add.imageset/Contents.json @@ -0,0 +1,21 @@ +{ + "images" : [ + { + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "ic_add.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_add.imageset/ic_add.png b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_add.imageset/ic_add.png new file mode 100644 index 00000000..8ca20248 Binary files /dev/null and b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_add.imageset/ic_add.png differ diff --git a/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_camera.imageset/Contents.json b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_camera.imageset/Contents.json new file mode 100644 index 00000000..e6dcb1d7 --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_camera.imageset/Contents.json @@ -0,0 +1,21 @@ +{ + "images" : [ + { + "idiom" : "universal", + "scale" : "1x" + }, + { + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "ic_camera.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_camera.imageset/ic_camera.png b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_camera.imageset/ic_camera.png new file mode 100644 index 00000000..fead90f1 Binary files /dev/null and b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_camera.imageset/ic_camera.png differ diff --git a/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_expand_down.imageset/Contents.json b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_expand_down.imageset/Contents.json new file mode 100644 index 00000000..291ebf91 --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_expand_down.imageset/Contents.json @@ -0,0 +1,21 @@ +{ + "images" : [ + { + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "ic_expand_down.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_expand_down.imageset/ic_expand_down.png b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_expand_down.imageset/ic_expand_down.png new file mode 100644 index 00000000..0443030e Binary files /dev/null and b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_expand_down.imageset/ic_expand_down.png differ diff --git a/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_expand_up.imageset/Contents.json b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_expand_up.imageset/Contents.json new file mode 100644 index 00000000..6eda6253 --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_expand_up.imageset/Contents.json @@ -0,0 +1,21 @@ +{ + "images" : [ + { + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "ic_expand_up.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_expand_up.imageset/ic_expand_up.png b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_expand_up.imageset/ic_expand_up.png new file mode 100644 index 00000000..d0251860 Binary files /dev/null and b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_expand_up.imageset/ic_expand_up.png differ diff --git a/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_library.imageset/Contents.json b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_library.imageset/Contents.json new file mode 100644 index 00000000..5abf40ed --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_library.imageset/Contents.json @@ -0,0 +1,21 @@ +{ + "images" : [ + { + "idiom" : "universal", + "scale" : "1x" + }, + { + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "ic_library.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_library.imageset/ic_library.png b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_library.imageset/ic_library.png new file mode 100644 index 00000000..7845f28d Binary files /dev/null and b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_library.imageset/ic_library.png differ diff --git a/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_mediapipe.imageset/Contents.json b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_mediapipe.imageset/Contents.json new file mode 100644 index 00000000..01f67d24 --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_mediapipe.imageset/Contents.json @@ -0,0 +1,21 @@ +{ + "images" : [ + { + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "ic_launcher_adaptive_fore.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_mediapipe.imageset/ic_launcher_adaptive_fore.png b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_mediapipe.imageset/ic_launcher_adaptive_fore.png new file mode 100644 index 00000000..5d5d7d14 Binary files /dev/null and b/examples/face_landmarker/ios/FaceLandmarker/Assets.xcassets/ic_mediapipe.imageset/ic_launcher_adaptive_fore.png differ diff --git a/examples/face_landmarker/ios/FaceLandmarker/Base.lproj/LaunchScreen.storyboard b/examples/face_landmarker/ios/FaceLandmarker/Base.lproj/LaunchScreen.storyboard new file mode 100644 index 00000000..865e9329 --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker/Base.lproj/LaunchScreen.storyboard @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/face_landmarker/ios/FaceLandmarker/Base.lproj/Main.storyboard b/examples/face_landmarker/ios/FaceLandmarker/Base.lproj/Main.storyboard new file mode 100644 index 00000000..c726054d --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker/Base.lproj/Main.storyboard @@ -0,0 +1,403 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/face_landmarker/ios/FaceLandmarker/Helper/CameraFeedManager.swift b/examples/face_landmarker/ios/FaceLandmarker/Helper/CameraFeedManager.swift new file mode 100644 index 00000000..211c8bef --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker/Helper/CameraFeedManager.swift @@ -0,0 +1,394 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import UIKit +import AVFoundation + +// MARK: CameraFeedManagerDelegate Declaration +protocol CameraFeedManagerDelegate: AnyObject { + + /** + This method delivers the pixel buffer of the current frame seen by the device's camera. + */ + func didOutput(sampleBuffer: CMSampleBuffer, orientation: UIImage.Orientation) + + /** + This method initimates that the camera permissions have been denied. + */ + func presentCameraPermissionsDeniedAlert() + + /** + This method initimates that there was an error in video configurtion. + */ + func presentVideoConfigurationErrorAlert() + + /** + This method initimates that a session runtime error occured. + */ + func sessionRunTimeErrorOccured() + + /** + This method initimates that the session was interrupted. + */ + func sessionWasInterrupted(canResumeManually resumeManually: Bool) + + /** + This method initimates that the session interruption has ended. + */ + func sessionInterruptionEnded() + +} + +/** + This enum holds the state of the camera initialization. + */ +enum CameraConfiguration { + + case success + case failed + case permissionDenied +} + +/** + This class manages all camera related functionality + */ +class CameraFeedManager: NSObject { + + // MARK: Camera Related Instance Variables + private let session: AVCaptureSession = AVCaptureSession() + private let previewView: PreviewView + private let sessionQueue = DispatchQueue(label: "sessionQueue") + private var cameraConfiguration: CameraConfiguration = .failed + private lazy var videoDataOutput = AVCaptureVideoDataOutput() + private var isSessionRunning = false + private var coreImageContext: CIContext + private var needCalculationSize = true + + var cameraPosition: AVCaptureDevice.Position = .front + var orientation: UIImage.Orientation { + get { + switch UIDevice.current.orientation { + case .landscapeLeft: + return .left + case .landscapeRight: + return .right + default: + return .up + } + } + } + var videoFrameSize: CGSize = .zero + + // MARK: CameraFeedManagerDelegate + weak var delegate: CameraFeedManagerDelegate? + + // MARK: Initializer + init(previewView: PreviewView) { + self.previewView = previewView + if let metalDevice = MTLCreateSystemDefaultDevice() { + coreImageContext = CIContext(mtlDevice: metalDevice) + } else { + coreImageContext = CIContext(options: nil) + } + super.init() + + // Initializes the session + session.sessionPreset = .high + self.previewView.session = session + self.previewView.previewLayer.connection?.videoOrientation = .portrait + self.previewView.previewLayer.videoGravity = .resizeAspectFill + self.attemptToConfigureSession() + NotificationCenter.default.addObserver(self, selector: #selector(orientationChanged), name: UIDevice.orientationDidChangeNotification, object: nil) + } + + deinit { + NotificationCenter.default.removeObserver(self) + } + + // MARK: notification methods + @objc func orientationChanged(notification: Notification) { + needCalculationSize = true + switch orientation { + case .up: + previewView.previewLayer.connection?.videoOrientation = .portrait + case .left: + previewView.previewLayer.connection?.videoOrientation = .landscapeRight + case .right: + previewView.previewLayer.connection?.videoOrientation = .landscapeLeft + default: + break + } + } + + // MARK: Session Start and End methods + + /** + This method starts an AVCaptureSession based on whether the camera configuration was successful. + */ + + func checkCameraConfigurationAndStartSession() { + sessionQueue.async { + switch self.cameraConfiguration { + case .success: + self.addObservers() + self.startSession() + case .failed: + DispatchQueue.main.async { + self.delegate?.presentVideoConfigurationErrorAlert() + } + case .permissionDenied: + DispatchQueue.main.async { + self.delegate?.presentCameraPermissionsDeniedAlert() + } + } + } + } + + /** + This method stops a running an AVCaptureSession. + */ + func stopSession() { + self.removeObservers() + sessionQueue.async { + if self.session.isRunning { + self.session.stopRunning() + self.isSessionRunning = self.session.isRunning + } + } + + } + + /** + This method resumes an interrupted AVCaptureSession. + */ + func resumeInterruptedSession(withCompletion completion: @escaping (Bool) -> ()) { + + sessionQueue.async { + self.startSession() + + DispatchQueue.main.async { + completion(self.isSessionRunning) + } + } + } + + /** + This method starts the AVCaptureSession + **/ + private func startSession() { + self.session.startRunning() + self.isSessionRunning = self.session.isRunning + } + + // MARK: Session Configuration Methods. + /** + This method requests for camera permissions and handles the configuration of the session and stores the result of configuration. + */ + private func attemptToConfigureSession() { + switch AVCaptureDevice.authorizationStatus(for: .video) { + case .authorized: + self.cameraConfiguration = .success + case .notDetermined: + self.sessionQueue.suspend() + self.requestCameraAccess(completion: { (granted) in + self.sessionQueue.resume() + }) + case .denied: + self.cameraConfiguration = .permissionDenied + default: + break + } + + self.sessionQueue.async { + self.configureSession() + } + } + + /** + This method requests for camera permissions. + */ + private func requestCameraAccess(completion: @escaping (Bool) -> ()) { + AVCaptureDevice.requestAccess(for: .video) { (granted) in + if !granted { + self.cameraConfiguration = .permissionDenied + } + else { + self.cameraConfiguration = .success + } + completion(granted) + } + } + + + /** + This method handles all the steps to configure an AVCaptureSession. + */ + private func configureSession() { + + guard cameraConfiguration == .success else { + return + } + session.beginConfiguration() + + // Tries to add an AVCaptureDeviceInput. + guard addVideoDeviceInput() == true else { + self.session.commitConfiguration() + self.cameraConfiguration = .failed + return + } + + // Tries to add an AVCaptureVideoDataOutput. + guard addVideoDataOutput() else { + self.session.commitConfiguration() + self.cameraConfiguration = .failed + return + } + + session.commitConfiguration() + self.cameraConfiguration = .success + } + + /** + This method tries to an AVCaptureDeviceInput to the current AVCaptureSession. + */ + private func addVideoDeviceInput() -> Bool { + + /**Tries to get the default back camera. + */ + guard let camera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: cameraPosition) else { + return false + } + + do { + let videoDeviceInput = try AVCaptureDeviceInput(device: camera) + if session.canAddInput(videoDeviceInput) { + session.addInput(videoDeviceInput) + return true + } + else { + return false + } + } + catch { + fatalError("Cannot create video device input") + } + } + + /** + This method tries to an AVCaptureVideoDataOutput to the current AVCaptureSession. + */ + private func addVideoDataOutput() -> Bool { + + let sampleBufferQueue = DispatchQueue(label: "sampleBufferQueue") + videoDataOutput.setSampleBufferDelegate(self, queue: sampleBufferQueue) + videoDataOutput.alwaysDiscardsLateVideoFrames = true + videoDataOutput.videoSettings = [ String(kCVPixelBufferPixelFormatTypeKey) : kCMPixelFormat_32BGRA] + + if session.canAddOutput(videoDataOutput) { + session.addOutput(videoDataOutput) + videoDataOutput.connection(with: .video)?.videoOrientation = .portrait + if videoDataOutput.connection(with: .video)?.isVideoOrientationSupported == true + && cameraPosition == .front { + videoDataOutput.connection(with: .video)?.isVideoMirrored = true + } + return true + } + return false + } + + // MARK: Notification Observer Handling + private func addObservers() { + NotificationCenter.default.addObserver(self, selector: #selector(CameraFeedManager.sessionRuntimeErrorOccured(notification:)), name: NSNotification.Name.AVCaptureSessionRuntimeError, object: session) + NotificationCenter.default.addObserver(self, selector: #selector(CameraFeedManager.sessionWasInterrupted(notification:)), name: NSNotification.Name.AVCaptureSessionWasInterrupted, object: session) + NotificationCenter.default.addObserver(self, selector: #selector(CameraFeedManager.sessionInterruptionEnded), name: NSNotification.Name.AVCaptureSessionInterruptionEnded, object: session) + } + + private func removeObservers() { + NotificationCenter.default.removeObserver(self, name: NSNotification.Name.AVCaptureSessionRuntimeError, object: session) + NotificationCenter.default.removeObserver(self, name: NSNotification.Name.AVCaptureSessionWasInterrupted, object: session) + NotificationCenter.default.removeObserver(self, name: NSNotification.Name.AVCaptureSessionInterruptionEnded, object: session) + } + + // MARK: Notification Observers + @objc func sessionWasInterrupted(notification: Notification) { + + if let userInfoValue = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as AnyObject?, + let reasonIntegerValue = userInfoValue.integerValue, + let reason = AVCaptureSession.InterruptionReason(rawValue: reasonIntegerValue) { + print("Capture session was interrupted with reason \(reason)") + + var canResumeManually = false + if reason == .videoDeviceInUseByAnotherClient { + canResumeManually = true + } else if reason == .videoDeviceNotAvailableWithMultipleForegroundApps { + canResumeManually = false + } + + self.delegate?.sessionWasInterrupted(canResumeManually: canResumeManually) + + } + } + + @objc func sessionInterruptionEnded(notification: Notification) { + + self.delegate?.sessionInterruptionEnded() + } + + @objc func sessionRuntimeErrorOccured(notification: Notification) { + guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else { + return + } + + print("Capture session runtime error: \(error)") + + if error.code == .mediaServicesWereReset { + sessionQueue.async { + if self.isSessionRunning { + self.startSession() + } else { + DispatchQueue.main.async { + self.delegate?.sessionRunTimeErrorOccured() + } + } + } + } else { + self.delegate?.sessionRunTimeErrorOccured() + + } + } +} + +/** + AVCaptureVideoDataOutputSampleBufferDelegate + */ +extension CameraFeedManager: AVCaptureVideoDataOutputSampleBufferDelegate { + + /** This method delegates the CVPixelBuffer of the frame seen by the camera currently. + */ + func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { + if needCalculationSize { + let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)! + switch orientation { + case .left, .right: + videoFrameSize = CGSize(width: CVPixelBufferGetHeight(imageBuffer), height: CVPixelBufferGetWidth(imageBuffer)) + default: + videoFrameSize = CGSize(width: CVPixelBufferGetWidth(imageBuffer), height: CVPixelBufferGetHeight(imageBuffer)) + } + needCalculationSize = false + print(videoFrameSize) + } + + delegate?.didOutput(sampleBuffer: sampleBuffer, orientation: orientation) + } +} diff --git a/examples/face_landmarker/ios/FaceLandmarker/Helper/FaceLandmarkerHelper.swift b/examples/face_landmarker/ios/FaceLandmarker/Helper/FaceLandmarkerHelper.swift new file mode 100644 index 00000000..e49c8be1 --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker/Helper/FaceLandmarkerHelper.swift @@ -0,0 +1,152 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import UIKit +import MediaPipeTasksVision +import AVFoundation + +protocol FaceLandmarkerHelperDelegate: AnyObject { + func faceLandmarkerHelper(_ faceLandmarkerHelper: FaceLandmarkerHelper, + didFinishDetection result: ResultBundle?, + error: Error?) +} + +class FaceLandmarkerHelper: NSObject { + + weak var delegate: FaceLandmarkerHelperDelegate? + var faceLandmarker: FaceLandmarker? + + init(modelPath: String?, numFaces: Int, minFaceDetectionConfidence: Float, minFacePresenceConfidence: Float, minTrackingConfidence: Float, runningModel: RunningMode, delegate: FaceLandmarkerHelperDelegate?) { + super.init() + guard let modelPath = modelPath else { return } + let faceLandmarkerOptions = FaceLandmarkerOptions() + faceLandmarkerOptions.runningMode = runningModel + faceLandmarkerOptions.numFaces = numFaces + faceLandmarkerOptions.minFaceDetectionConfidence = minFaceDetectionConfidence + faceLandmarkerOptions.minFacePresenceConfidence = minFacePresenceConfidence + faceLandmarkerOptions.minTrackingConfidence = minTrackingConfidence + faceLandmarkerOptions.faceLandmarkerLiveStreamDelegate = runningModel == .liveStream ? self : nil + self.delegate = delegate + faceLandmarkerOptions.baseOptions.modelAssetPath = modelPath + do { + faceLandmarker = try FaceLandmarker(options: faceLandmarkerOptions) + } catch { + print(error) + } + } + + /** + This method returns a FaceLandmarkerResult object and infrenceTime after receiving an image + **/ + func detect(image: UIImage) -> ResultBundle? { + guard let mpImage = try? MPImage(uiImage: image) else { return nil } + do { + let startDate = Date() + let result = try faceLandmarker?.detect(image: mpImage) + let inferenceTime = Date().timeIntervalSince(startDate) * 1000 + return ResultBundle(inferenceTime: inferenceTime, faceLandmarkerResults: [result]) + } catch { + print(error) + return nil + } + } + + /** + This method return FaceLandmarkerResult and infrenceTime when receive videoFrame + **/ + func detectAsync(videoFrame: CMSampleBuffer, orientation: UIImage.Orientation, timeStamps: Int) { + guard let faceLandmarker = faceLandmarker, + let image = try? MPImage(sampleBuffer: videoFrame, orientation: orientation) else { return } + do { + try faceLandmarker.detectAsync(image: image, timestampInMilliseconds: timeStamps) + } catch { + print(error) + } + } + + /** + This method returns a FaceLandmarkerResults object and infrenceTime when receiving videoUrl and inferenceIntervalMs + **/ + func detectVideoFile(url: URL, inferenceIntervalMs: Double) async -> ResultBundle? { + guard let faceLandmarker = faceLandmarker else { return nil } + let asset: AVAsset = AVAsset(url: url) + guard let videoDurationMs = try? await asset.load(.duration).seconds * 1000 else { return nil } + + // Using AVAssetImageGenerator to produce images from video content + let generator = AVAssetImageGenerator(asset:asset) + generator.requestedTimeToleranceBefore = CMTimeMake(value: 1, timescale: 25) + generator.requestedTimeToleranceAfter = CMTimeMake(value: 1, timescale: 25) + generator.appliesPreferredTrackTransform = true + let frameCount = Int(videoDurationMs / inferenceIntervalMs) + var faceLandmarkerResults: [FaceLandmarkerResult?] = [] + var size: CGSize = .zero + let startDate = Date() + // Go through each frame and detect it + for i in 0 ..< frameCount { + let timestampMs = inferenceIntervalMs * Double(i) // ms + let time = CMTime(seconds: timestampMs / 1000, preferredTimescale: 600) + if let image = getImageFromVideo(generator, atTime: time) { + size = image.size + do { + let result = try faceLandmarker.detect(videoFrame: MPImage(uiImage: image), timestampInMilliseconds: Int(timestampMs)) + faceLandmarkerResults.append(result) + } catch { + print(error) + faceLandmarkerResults.append(nil) + } + } else { + faceLandmarkerResults.append(nil) + } + } + let inferenceTime = Date().timeIntervalSince(startDate) / Double(frameCount) * 1000 + return ResultBundle(inferenceTime: inferenceTime, faceLandmarkerResults: faceLandmarkerResults, imageSize: size) + } + + /** + This method returns an image object and when receiving assetImageGenerator and time + **/ + private func getImageFromVideo(_ generator: AVAssetImageGenerator, atTime time: CMTime) -> UIImage? { + let image:CGImage? + do { + try image = generator.copyCGImage(at: time, actualTime:nil) + } catch { + print(error) + return nil + } + guard let image = image else { return nil } + return UIImage(cgImage: image) + } +} + +extension FaceLandmarkerHelper: FaceLandmarkerLiveStreamDelegate { + func faceLandmarker(_ faceLandmarker: FaceLandmarker, didFinishDetection result: FaceLandmarkerResult?, timestampInMilliseconds: Int, error: Error?) { + guard let result = result else { + delegate?.faceLandmarkerHelper(self, didFinishDetection: nil, error: error) + return + } + let resultBundle = ResultBundle( + inferenceTime: Date().timeIntervalSince1970 * 1000 - Double(timestampInMilliseconds), + faceLandmarkerResults: [result]) + delegate?.faceLandmarkerHelper(self, didFinishDetection: resultBundle, error: nil) + } + + +} + +/// A result from the `FaceLandmarkerHelper`. +struct ResultBundle { + let inferenceTime: Double + let faceLandmarkerResults: [FaceLandmarkerResult?] + var imageSize: CGSize = .zero +} diff --git a/examples/face_landmarker/ios/FaceLandmarker/Helper/OverlayView.swift b/examples/face_landmarker/ios/FaceLandmarker/Helper/OverlayView.swift new file mode 100644 index 00000000..0982c219 --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker/Helper/OverlayView.swift @@ -0,0 +1,176 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import UIKit +import MediaPipeTasksVision + +/// A straight line. +struct Line { + let from: CGPoint + let to: CGPoint +} + +/// Line connection +struct LineConnection { + let color: UIColor + let lines: [Line] +} + +/** + This structure holds the display parameters for the overlay to be drawon on a detected object. + */ +struct ObjectOverlay { + let dots: [CGPoint] + let lineConnectios: [LineConnection] +} + +/// Custom view to visualize the face landmarks result on top of the input image. +class OverlayView: UIView { + + var objectOverlays: [ObjectOverlay] = [] + private let lineWidth: CGFloat = 3 + private let pointRadius: CGFloat = 3 + private let pointColor = UIColor.yellow + + override func draw(_ rect: CGRect) { + for objectOverlay in objectOverlays { + drawDots(objectOverlay.dots) + for lineConnection in objectOverlay.lineConnectios { + drawLines(lineConnection.lines, lineColor: lineConnection.color) + } + } + } + + /** + This method takes the landmarks, translates the points and lines to the current view, draws to the of inferences. + */ + func drawLandmarks(_ landmarks: [[NormalizedLandmark]], orientation: UIImage.Orientation, withImageSize imageSize: CGSize) { + guard !landmarks.isEmpty else { + objectOverlays = [] + setNeedsDisplay() + return + } + + var viewWidth = bounds.size.width + var viewHeight = bounds.size.height + var originX: CGFloat = 0 + var originY: CGFloat = 0 + + if viewWidth / viewHeight > imageSize.width / imageSize.height { + viewHeight = imageSize.height / imageSize.width * bounds.size.width + originY = (bounds.size.height - viewHeight) / 2 + } else { + viewWidth = imageSize.width / imageSize.height * bounds.size.height + originX = (bounds.size.width - viewWidth) / 2 + } + + var objectOverlays: [ObjectOverlay] = [] + + for landmark in landmarks { + var transformedLandmark: [CGPoint]! + + switch orientation { + case .left: + transformedLandmark = landmark.map({CGPoint(x: CGFloat($0.y), y: 1 - CGFloat($0.x))}) + case .right: + transformedLandmark = landmark.map({CGPoint(x: 1 - CGFloat($0.y), y: CGFloat($0.x))}) + default: + transformedLandmark = landmark.map({CGPoint(x: CGFloat($0.x), y: CGFloat($0.y))}) + } + + let dots: [CGPoint] = transformedLandmark.map({CGPoint(x: CGFloat($0.x) * viewWidth + originX, y: CGFloat($0.y) * viewHeight + originY)}) + var lineConnections: [LineConnection] = [] + lineConnections.append(LineConnection( + color: UIColor(red: 0, green: 127/255.0, blue: 139/255.0, alpha: 1), + lines: FaceLandmarker.faceOvalConnections() + .map({ connection in + let start = transformedLandmark[Int(connection.start)] + let end = transformedLandmark[Int(connection.end)] + return Line(from: CGPoint(x: CGFloat(start.x) * viewWidth + originX, y: CGFloat(start.y) * viewHeight + originY), + to: CGPoint(x: CGFloat(end.x) * viewWidth + originX, y: CGFloat(end.y) * viewHeight + originY)) + }))) + lineConnections.append(LineConnection( + color: UIColor(red: 18/255.0, green: 181/255.0, blue: 203/255.0, alpha: 1), + lines: FaceLandmarker.rightEyebrowConnections() + .map({ connection in + let start = transformedLandmark[Int(connection.start)] + let end = transformedLandmark[Int(connection.end)] + return Line(from: CGPoint(x: CGFloat(start.x) * viewWidth + originX, y: CGFloat(start.y) * viewHeight + originY), + to: CGPoint(x: CGFloat(end.x) * viewWidth + originX, y: CGFloat(end.y) * viewHeight + originY)) + }))) + lineConnections.append(LineConnection( + color: UIColor(red: 18/255.0, green: 181/255.0, blue: 203/255.0, alpha: 1), + lines: FaceLandmarker.leftEyebrowConnections() + .map({ connection in + let start = transformedLandmark[Int(connection.start)] + let end = transformedLandmark[Int(connection.end)] + return Line(from: CGPoint(x: CGFloat(start.x) * viewWidth + originX, y: CGFloat(start.y) * viewHeight + originY), + to: CGPoint(x: CGFloat(end.x) * viewWidth + originX, y: CGFloat(end.y) * viewHeight + originY)) + }))) + lineConnections.append(LineConnection( + color: UIColor(red: 279/255.0, green: 171/255.0, blue: 0, alpha: 1), + lines: FaceLandmarker.rightEyeConnections() + .map({ connection in + let start = transformedLandmark[Int(connection.start)] + let end = transformedLandmark[Int(connection.end)] + return Line(from: CGPoint(x: CGFloat(start.x) * viewWidth + originX, y: CGFloat(start.y) * viewHeight + originY), + to: CGPoint(x: CGFloat(end.x) * viewWidth + originX, y: CGFloat(end.y) * viewHeight + originY)) + }))) + lineConnections.append(LineConnection( + color: UIColor(red: 279/255.0, green: 171/255.0, blue: 0, alpha: 1), + lines: FaceLandmarker.leftEyeConnections() + .map({ connection in + let start = transformedLandmark[Int(connection.start)] + let end = transformedLandmark[Int(connection.end)] + return Line(from: CGPoint(x: CGFloat(start.x) * viewWidth + originX, y: CGFloat(start.y) * viewHeight + originY), + to: CGPoint(x: CGFloat(end.x) * viewWidth + originX, y: CGFloat(end.y) * viewHeight + originY)) + }))) + lineConnections.append(LineConnection( + color: UIColor(red: 176/255.0, green: 0, blue: 32/255.0, alpha: 1), + lines: FaceLandmarker.lipsConnections() + .map({ connection in + let start = transformedLandmark[Int(connection.start)] + let end = transformedLandmark[Int(connection.end)] + return Line(from: CGPoint(x: CGFloat(start.x) * viewWidth + originX, y: CGFloat(start.y) * viewHeight + originY), + to: CGPoint(x: CGFloat(end.x) * viewWidth + originX, y: CGFloat(end.y) * viewHeight + originY)) + }))) + objectOverlays.append(ObjectOverlay(dots: dots, lineConnectios: lineConnections)) + } + + self.objectOverlays = objectOverlays + setNeedsDisplay() + } + + private func drawDots(_ dots: [CGPoint]) { + for dot in dots { + let dotRect = CGRect( + x: CGFloat(dot.x) - pointRadius / 2, y: CGFloat(dot.y) - pointRadius / 2, + width: pointRadius, height: pointRadius) + let path = UIBezierPath(ovalIn: dotRect) + pointColor.setFill() + path.fill() + } + } + + private func drawLines(_ lines: [Line], lineColor: UIColor) { + let path = UIBezierPath() + for line in lines { + path.move(to: line.from) + path.addLine(to: line.to) + } + path.lineWidth = lineWidth + lineColor.setStroke() + path.stroke() + } +} diff --git a/examples/face_landmarker/ios/FaceLandmarker/Helper/PreviewView.swift b/examples/face_landmarker/ios/FaceLandmarker/Helper/PreviewView.swift new file mode 100644 index 00000000..58a247d4 --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker/Helper/PreviewView.swift @@ -0,0 +1,96 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import UIKit +import AVFoundation + +/** + Displays a preview of the image being processed. By default, this uses the device's camera frame, + but will use a still image copied from clipboard if `shouldUseClipboardImage` is set to true. + */ +class PreviewView: UIView { + + var shouldUseClipboardImage: Bool = false { + didSet { + if shouldUseClipboardImage { + if imageView.superview == nil { + addSubview(imageView) + let constraints = [ + NSLayoutConstraint(item: imageView, attribute: .top, + relatedBy: .equal, + toItem: self, attribute: .top, + multiplier: 1, constant: 0), + NSLayoutConstraint(item: imageView, attribute: .leading, + relatedBy: .equal, + toItem: self, attribute: .leading, + multiplier: 1, constant: 0), + NSLayoutConstraint(item: imageView, attribute: .trailing, + relatedBy: .equal, + toItem: self, attribute: .trailing, + multiplier: 1, constant: 0), + NSLayoutConstraint(item: imageView, attribute: .bottom, + relatedBy: .equal, + toItem: self, attribute: .bottom, + multiplier: 1, constant: 0), + ] + addConstraints(constraints) + layoutIfNeeded() + layoutSubviews() + } + + } else { + imageView.removeFromSuperview() + imageView.image = nil + } + } + } + + lazy private var imageView: UIImageView = { + let imageView = UIImageView() + imageView.backgroundColor = .black + imageView.contentMode = .scaleAspectFill + imageView.translatesAutoresizingMaskIntoConstraints = false + return imageView + }() + + var image: UIImage? { + get { + return imageView.image + } + set { + imageView.image = newValue + } + } + + var previewLayer: AVCaptureVideoPreviewLayer { + guard let layer = layer as? AVCaptureVideoPreviewLayer else { + fatalError("Layer expected is of type VideoPreviewLayer") + } + return layer + } + + var session: AVCaptureSession? { + get { + return previewLayer.session + } + set { + previewLayer.session = newValue + } + } + + override class var layerClass: AnyClass { + return AVCaptureVideoPreviewLayer.self + } + +} diff --git a/examples/face_landmarker/ios/FaceLandmarker/Info.plist b/examples/face_landmarker/ios/FaceLandmarker/Info.plist new file mode 100644 index 00000000..dd3c9afd --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker/Info.plist @@ -0,0 +1,25 @@ + + + + + UIApplicationSceneManifest + + UIApplicationSupportsMultipleScenes + + UISceneConfigurations + + UIWindowSceneSessionRoleApplication + + + UISceneConfigurationName + Default Configuration + UISceneDelegateClassName + $(PRODUCT_MODULE_NAME).SceneDelegate + UISceneStoryboardFile + Main + + + + + + diff --git a/examples/face_landmarker/ios/FaceLandmarker/SceneDelegate.swift b/examples/face_landmarker/ios/FaceLandmarker/SceneDelegate.swift new file mode 100644 index 00000000..d259406c --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker/SceneDelegate.swift @@ -0,0 +1,59 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import UIKit + +class SceneDelegate: UIResponder, UIWindowSceneDelegate { + + var window: UIWindow? + + + func scene(_ scene: UIScene, willConnectTo session: UISceneSession, options connectionOptions: UIScene.ConnectionOptions) { + // Use this method to optionally configure and attach the UIWindow `window` to the provided UIWindowScene `scene`. + // If using a storyboard, the `window` property will automatically be initialized and attached to the scene. + // This delegate does not imply the connecting scene or session are new (see `application:configurationForConnectingSceneSession` instead). + guard let _ = (scene as? UIWindowScene) else { return } + } + + func sceneDidDisconnect(_ scene: UIScene) { + // Called as the scene is being released by the system. + // This occurs shortly after the scene enters the background, or when its session is discarded. + // Release any resources associated with this scene that can be re-created the next time the scene connects. + // The scene may re-connect later, as its session was not necessarily discarded (see `application:didDiscardSceneSessions` instead). + } + + func sceneDidBecomeActive(_ scene: UIScene) { + // Called when the scene has moved from an inactive state to an active state. + // Use this method to restart any tasks that were paused (or not yet started) when the scene was inactive. + } + + func sceneWillResignActive(_ scene: UIScene) { + // Called when the scene will move from an active state to an inactive state. + // This may occur due to temporary interruptions (ex. an incoming phone call). + } + + func sceneWillEnterForeground(_ scene: UIScene) { + // Called as the scene transitions from the background to the foreground. + // Use this method to undo the changes made on entering the background. + } + + func sceneDidEnterBackground(_ scene: UIScene) { + // Called as the scene transitions from the foreground to the background. + // Use this method to save data, release shared resources, and store enough scene-specific state information + // to restore the scene back to its current state. + } + + +} + diff --git a/examples/face_landmarker/ios/FaceLandmarker/ViewContoller/InferenceViewController.swift b/examples/face_landmarker/ios/FaceLandmarker/ViewContoller/InferenceViewController.swift new file mode 100644 index 00000000..15e0dbaa --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker/ViewContoller/InferenceViewController.swift @@ -0,0 +1,122 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import UIKit +import MediaPipeTasksVision + +protocol InferenceViewControllerDelegate { + + /** + This method is called when the user changes the value to update model used for inference. + **/ + func viewController( + _ viewController: InferenceViewController, + needPerformActions action: InferenceViewController.Action) +} + +class InferenceViewController: UIViewController { + + enum Action { + case changeNumFaces(Int) + case changeDetectionConfidence(Float) + case changePresenceConfidence(Float) + case changeTrackingConfidence(Float) + case changeBottomSheetViewBottomSpace(Bool) + } + + // MARK: Delegate + var delegate: InferenceViewControllerDelegate? + + // MARK: Storyboards Connections + @IBOutlet weak var infrenceTimeLabel: UILabel! + @IBOutlet weak var infrenceTimeTitleLabel: UILabel! + @IBOutlet weak var detectionConfidenceStepper: UIStepper! + @IBOutlet weak var detectionConfidenceValueLabel: UILabel! + + @IBOutlet weak var presenceConfidenceStepper: UIStepper! + @IBOutlet weak var presenceConfidenceValueLabel: UILabel! + + @IBOutlet weak var minTrackingConfidenceStepper: UIStepper! + @IBOutlet weak var minTrackingConfidenceValueLabel: UILabel! + + @IBOutlet weak var numFacesStepper: UIStepper! + @IBOutlet weak var numFacestLabel: UILabel! + + // MARK: Instance Variables + var result: ResultBundle? = nil + var numFaces = DefaultConstants.numFaces + var minFaceDetectionConfidence = DefaultConstants.detectionConfidence + var minFacePresenceConfidence = DefaultConstants.presenceConfidence + var minTrackingConfidence = DefaultConstants.trackingConfidence + + override func viewDidLoad() { + super.viewDidLoad() + setupUI() + } + + // Private function + private func setupUI() { + + detectionConfidenceStepper.value = Double(minFaceDetectionConfidence) + detectionConfidenceValueLabel.text = "\(minFaceDetectionConfidence)" + + presenceConfidenceStepper.value = Double(minFacePresenceConfidence) + presenceConfidenceValueLabel.text = "\(minFacePresenceConfidence)" + + minTrackingConfidenceStepper.value = Double(minTrackingConfidence) + minTrackingConfidenceValueLabel.text = "\(minTrackingConfidence)" + + numFacesStepper.value = Double(numFaces) + numFacestLabel.text = "\(numFaces)" + } + + // Public function + func updateData() { + if let inferenceTime = result?.inferenceTime { + infrenceTimeLabel.text = String(format: "%.2fms", inferenceTime) + } + } + // MARK: IBAction + + @IBAction func expandButtonTouchUpInside(_ sender: UIButton) { + sender.isSelected.toggle() + infrenceTimeLabel.isHidden = !sender.isSelected + infrenceTimeTitleLabel.isHidden = !sender.isSelected + delegate?.viewController(self, needPerformActions: .changeBottomSheetViewBottomSpace(sender.isSelected)) + } + + @IBAction func detectionConfidenceStepperValueChanged(_ sender: UIStepper) { + minFaceDetectionConfidence = Float(sender.value) + delegate?.viewController(self, needPerformActions: .changeDetectionConfidence(minFaceDetectionConfidence)) + detectionConfidenceValueLabel.text = "\(minFaceDetectionConfidence)" + } + + @IBAction func presenceConfidenceStepperValueChanged(_ sender: UIStepper) { + minFacePresenceConfidence = Float(sender.value) + delegate?.viewController(self, needPerformActions: .changePresenceConfidence(minFacePresenceConfidence)) + presenceConfidenceValueLabel.text = "\(minFacePresenceConfidence)" + } + + @IBAction func minTrackingConfidenceStepperValueChanged(_ sender: UIStepper) { + minTrackingConfidence = Float(sender.value) + delegate?.viewController(self, needPerformActions: .changeTrackingConfidence(minTrackingConfidence)) + minTrackingConfidenceValueLabel.text = "\(minTrackingConfidence)" + } + + @IBAction func numFacesStepperValueChanged(_ sender: UIStepper) { + numFaces = Int(sender.value) + delegate?.viewController(self, needPerformActions: .changeNumFaces(numFaces)) + numFacestLabel.text = "\(numFaces)" + } +} diff --git a/examples/face_landmarker/ios/FaceLandmarker/ViewContoller/ViewController.swift b/examples/face_landmarker/ios/FaceLandmarker/ViewContoller/ViewController.swift new file mode 100644 index 00000000..851c2f53 --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarker/ViewContoller/ViewController.swift @@ -0,0 +1,441 @@ +// Copyright 2023 The MediaPipe Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import UIKit +import MediaPipeTasksVision +import UniformTypeIdentifiers +import AVKit + +class ViewController: UIViewController { + + // MARK: Storyboards Connections + @IBOutlet weak var previewView: PreviewView! + @IBOutlet weak var overlayView: OverlayView! + @IBOutlet weak var addImageButton: UIButton! + @IBOutlet weak var cameraUnavailableLabel: UILabel! + @IBOutlet weak var imageEmptyLabel: UILabel! + @IBOutlet weak var resumeButton: UIButton! + @IBOutlet weak var runningModelTabbar: UITabBar! + @IBOutlet weak var cameraTabbarItem: UITabBarItem! + @IBOutlet weak var photoTabbarItem: UITabBarItem! + @IBOutlet weak var bottomSheetViewBottomSpace: NSLayoutConstraint! + @IBOutlet weak var bottomViewHeightConstraint: NSLayoutConstraint! + + // MARK: Constants + private var videoDetectTimer: Timer? + private let inferenceIntervalMs: Double = 100 + private let inferenceBottomHeight = 220.0 + private let expandButtonHeight = 41.0 + private let edgeOffset: CGFloat = 2.0 + private let labelOffset: CGFloat = 10.0 + private let displayFont = UIFont.systemFont(ofSize: 14.0, weight: .medium) + private let playerViewController = AVPlayerViewController() + + // MARK: Instance Variables + private var numFaces = DefaultConstants.numFaces + private var detectionConfidence = DefaultConstants.detectionConfidence + private var presenceConfidence = DefaultConstants.presenceConfidence + private var trackingConfidence = DefaultConstants.trackingConfidence + private let modelPath = DefaultConstants.modelPath + private var runingModel: RunningMode = .liveStream { + didSet { + faceLandmarkerHelper = FaceLandmarkerHelper( + modelPath: modelPath, + numFaces: numFaces, + minFaceDetectionConfidence: detectionConfidence, + minFacePresenceConfidence: presenceConfidence, + minTrackingConfidence: trackingConfidence, + runningModel: runingModel, + delegate: self) + } + } + let backgroundQueue = DispatchQueue( + label: "com.google.mediapipe.examples.facelandmarker", + qos: .userInteractive + ) + private var isProcess = false + + // MARK: Controllers that manage functionality + // Handles all the camera related functionality + private lazy var cameraCapture = CameraFeedManager(previewView: previewView) + + // Handles all data preprocessing and makes calls to run inference through the + // `FaceLandmarkerHelper`. + private var faceLandmarkerHelper: FaceLandmarkerHelper? + + // Handles the presenting of results on the screen + private var inferenceViewController: InferenceViewController? + + // MARK: View Handling Methods + override func viewDidLoad() { + super.viewDidLoad() + // Create object detector helper + faceLandmarkerHelper = FaceLandmarkerHelper( + modelPath: modelPath, + numFaces: numFaces, + minFaceDetectionConfidence: detectionConfidence, + minFacePresenceConfidence: presenceConfidence, + minTrackingConfidence: trackingConfidence, + runningModel: runingModel, + delegate: self) + + runningModelTabbar.selectedItem = cameraTabbarItem + runningModelTabbar.delegate = self + cameraCapture.delegate = self + overlayView.clearsContextBeforeDrawing = true + } + override func viewWillAppear(_ animated: Bool) { + super.viewWillAppear(animated) +#if !targetEnvironment(simulator) + if runingModel == .liveStream && runningModelTabbar.selectedItem == cameraTabbarItem { + cameraCapture.checkCameraConfigurationAndStartSession() + } +#endif + } + +#if !targetEnvironment(simulator) + override func viewWillDisappear(_ animated: Bool) { + super.viewWillDisappear(animated) + cameraCapture.stopSession() + } +#endif + + override var preferredStatusBarStyle: UIStatusBarStyle { + return .lightContent + } + + // MARK: Storyboard Segue Handlers + override func prepare(for segue: UIStoryboardSegue, sender: Any?) { + super.prepare(for: segue, sender: sender) + if segue.identifier == "EMBED" { + inferenceViewController = segue.destination as? InferenceViewController + inferenceViewController?.numFaces = numFaces + inferenceViewController?.delegate = self + bottomViewHeightConstraint.constant = inferenceBottomHeight + bottomSheetViewBottomSpace.constant = -inferenceBottomHeight + expandButtonHeight + view.layoutSubviews() + } + } + + // MARK: IBAction + + @IBAction func addPhotoButtonTouchUpInside(_ sender: Any) { + openImagePickerController() + } + // Resume camera session when click button resume + @IBAction func resumeButtonTouchUpInside(_ sender: Any) { + cameraCapture.resumeInterruptedSession { isSessionRunning in + if isSessionRunning { + self.resumeButton.isHidden = true + self.cameraUnavailableLabel.isHidden = true + } + } + } + // MARK: Private function + private func openImagePickerController() { + if UIImagePickerController.isSourceTypeAvailable(.savedPhotosAlbum) { + let imagePicker = UIImagePickerController() + imagePicker.delegate = self + imagePicker.sourceType = .savedPhotosAlbum + imagePicker.mediaTypes = [UTType.image.identifier, UTType.movie.identifier] + imagePicker.allowsEditing = false + DispatchQueue.main.async { + self.present(imagePicker, animated: true, completion: nil) + } + } + } + + private func removePlayerViewController() { + playerViewController.view.removeFromSuperview() + playerViewController.removeFromParent() + } + + private func processVideo(url: URL) { + backgroundQueue.async { [weak self] in + guard let weakSelf = self else { return } + let faceLandmarkerHelper = FaceLandmarkerHelper( + modelPath: weakSelf.modelPath, + numFaces: weakSelf.numFaces, + minFaceDetectionConfidence: weakSelf.detectionConfidence, + minFacePresenceConfidence: weakSelf.presenceConfidence, + minTrackingConfidence: weakSelf.trackingConfidence, + runningModel: .video, + delegate: nil) + Task { + let result = await faceLandmarkerHelper.detectVideoFile(url: url, inferenceIntervalMs: weakSelf.inferenceIntervalMs) + guard let result = result else { return } + DispatchQueue.main.async { + weakSelf.showResult(result, videoUrl: url) + } + } + } + } + + private func showResult(_ result: ResultBundle, videoUrl: URL) { + inferenceViewController?.result = result + inferenceViewController?.updateData() + let player = AVPlayer(url: videoUrl) + playerViewController.player = player + playerViewController.videoGravity = .resizeAspectFill + playerViewController.showsPlaybackControls = false + playerViewController.view.frame = previewView.bounds + previewView.addSubview(playerViewController.view) + addChild(playerViewController) + player.play() + NotificationCenter.default.removeObserver(self) + NotificationCenter.default + .addObserver(self, + selector: #selector(playerDidFinishPlaying), + name: .AVPlayerItemDidPlayToEndTime, + object: player.currentItem + ) + + videoDetectTimer?.invalidate() + videoDetectTimer = Timer.scheduledTimer( + withTimeInterval: inferenceIntervalMs / 1000, + repeats: true, block: { [weak self] _ in + guard let this = self else { return } + let currentTime: CMTime = player.currentTime() + let index = Int(currentTime.seconds * 1000 / this.inferenceIntervalMs) + guard index < result.faceLandmarkerResults.count, + let faceLandmarkerResult = result.faceLandmarkerResults[index] else { return } + DispatchQueue.main.async { + this.overlayView.drawLandmarks(faceLandmarkerResult.faceLandmarks, + orientation: .up, + withImageSize: result.imageSize) + } + }) + } + + @objc func playerDidFinishPlaying(note: NSNotification) { + videoDetectTimer?.invalidate() + videoDetectTimer = nil + } +} + +// MARK: UIImagePickerControllerDelegate, UINavigationControllerDelegate +extension ViewController: UIImagePickerControllerDelegate, UINavigationControllerDelegate { + + func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) { + picker.dismiss(animated: true) + //Detect video and show result when mediaType is movie + if info[.mediaType] as? String == UTType.movie.identifier { + guard let mediaURL = info[.mediaURL] as? URL else { return } + imageEmptyLabel.isHidden = true + processVideo(url: mediaURL) + } else { + guard let image = info[.originalImage] as? UIImage else { return } + imageEmptyLabel.isHidden = true + if runingModel != .image { + runingModel = .image + } + removePlayerViewController() + previewView.image = image + // Pass the uiimage to mediapipe + let result = faceLandmarkerHelper?.detect(image: image) + // Display results by handing off to the InferenceViewController. + inferenceViewController?.result = result + DispatchQueue.main.async { + self.inferenceViewController?.updateData() + guard let result = result, + let faceLandmarkerResult = result.faceLandmarkerResults.first, + let faceLandmarkerResult = faceLandmarkerResult else { return } + self.overlayView.drawLandmarks(faceLandmarkerResult.faceLandmarks, + orientation: image.imageOrientation, + withImageSize: image.size) + } + } + } +} + +// MARK: CameraFeedManagerDelegate Methods +extension ViewController: CameraFeedManagerDelegate { + + func didOutput(sampleBuffer: CMSampleBuffer, orientation: UIImage.Orientation) { + let currentTimeMs = Date().timeIntervalSince1970 * 1000 + backgroundQueue.async { + self.faceLandmarkerHelper?.detectAsync(videoFrame: sampleBuffer, orientation: orientation, timeStamps: Int(currentTimeMs)) + } + } + + // Convert CIImage to UIImage + func convert(cmage: CIImage) -> UIImage { + let context = CIContext(options: nil) + let cgImage = context.createCGImage(cmage, from: cmage.extent)! + let image = UIImage(cgImage: cgImage) + return image + } + + // MARK: Session Handling Alerts + func sessionWasInterrupted(canResumeManually resumeManually: Bool) { + + // Updates the UI when session is interupted. + if resumeManually { + self.resumeButton.isHidden = false + } else { + self.cameraUnavailableLabel.isHidden = false + } + } + + func sessionInterruptionEnded() { + // Updates UI once session interruption has ended. + if !self.cameraUnavailableLabel.isHidden { + self.cameraUnavailableLabel.isHidden = true + } + + if !self.resumeButton.isHidden { + self.resumeButton.isHidden = true + } + } + + func sessionRunTimeErrorOccured() { + // Handles session run time error by updating the UI and providing a button if session can be + // manually resumed. + self.resumeButton.isHidden = false + previewView.shouldUseClipboardImage = true + } + + func presentCameraPermissionsDeniedAlert() { + let alertController = UIAlertController( + title: "Camera Permissions Denied", + message: + "Camera permissions have been denied for this app. You can change this by going to Settings", + preferredStyle: .alert) + + let cancelAction = UIAlertAction(title: "Cancel", style: .cancel, handler: nil) + let settingsAction = UIAlertAction(title: "Settings", style: .default) { (action) in + UIApplication.shared.open( + URL(string: UIApplication.openSettingsURLString)!, options: [:], completionHandler: nil) + } + alertController.addAction(cancelAction) + alertController.addAction(settingsAction) + + present(alertController, animated: true, completion: nil) + + previewView.shouldUseClipboardImage = true + } + + func presentVideoConfigurationErrorAlert() { + let alert = UIAlertController( + title: "Camera Configuration Failed", message: "There was an error while configuring camera.", + preferredStyle: .alert) + alert.addAction(UIAlertAction(title: "OK", style: .default, handler: nil)) + + self.present(alert, animated: true) + previewView.shouldUseClipboardImage = true + } +} + +// MARK: InferenceViewControllerDelegate Methods +extension ViewController: InferenceViewControllerDelegate { + func viewController( + _ viewController: InferenceViewController, + needPerformActions action: InferenceViewController.Action + ) { + var isModelNeedsRefresh = false + switch action { + case .changeNumFaces(let numFaces): + if self.numFaces != numFaces { + isModelNeedsRefresh = true + } + self.numFaces = numFaces + case .changeDetectionConfidence(let detectionConfidence): + if self.detectionConfidence != detectionConfidence { + isModelNeedsRefresh = true + } + self.detectionConfidence = detectionConfidence + case .changePresenceConfidence(let presenceConfidence): + if self.presenceConfidence != presenceConfidence { + isModelNeedsRefresh = true + } + self.presenceConfidence = presenceConfidence + case .changeTrackingConfidence(let trackingConfidence): + if self.trackingConfidence != trackingConfidence { + isModelNeedsRefresh = true + } + self.trackingConfidence = trackingConfidence + case .changeBottomSheetViewBottomSpace(let isExpand): + bottomSheetViewBottomSpace.constant = isExpand ? 0 : -inferenceBottomHeight + expandButtonHeight + UIView.animate(withDuration: 0.3) { + self.view.layoutSubviews() + } + } + if isModelNeedsRefresh { + faceLandmarkerHelper = FaceLandmarkerHelper( + modelPath: modelPath, + numFaces: numFaces, + minFaceDetectionConfidence: detectionConfidence, + minFacePresenceConfidence: presenceConfidence, + minTrackingConfidence: trackingConfidence, + runningModel: runingModel, + delegate: self) + } + } +} + +extension ViewController: FaceLandmarkerHelperDelegate { + func faceLandmarkerHelper(_ faceLandmarkerHelper: FaceLandmarkerHelper, didFinishDetection result: ResultBundle?, error: Error?) { + guard let result = result, + let faceLandmarkerResult = result.faceLandmarkerResults.first, + let faceLandmarkerResult = faceLandmarkerResult else { return } + DispatchQueue.main.async { + if self.runningModelTabbar.selectedItem != self.cameraTabbarItem { return } + self.overlayView.drawLandmarks(faceLandmarkerResult.faceLandmarks, + orientation: self.cameraCapture.orientation, + withImageSize: self.cameraCapture.videoFrameSize) + } + } +} + +// MARK: UITabBarDelegate +extension ViewController: UITabBarDelegate { + func tabBar(_ tabBar: UITabBar, didSelect item: UITabBarItem) { + switch item { + case cameraTabbarItem: + if runingModel != .liveStream { + runingModel = .liveStream + } + removePlayerViewController() +#if !targetEnvironment(simulator) + cameraCapture.checkCameraConfigurationAndStartSession() +#endif + previewView.shouldUseClipboardImage = false + addImageButton.isHidden = true + imageEmptyLabel.isHidden = true + case photoTabbarItem: +#if !targetEnvironment(simulator) + cameraCapture.stopSession() +#endif + previewView.shouldUseClipboardImage = true + addImageButton.isHidden = false + if previewView.image == nil || playerViewController.parent != self { + imageEmptyLabel.isHidden = false + } + default: + break + } + overlayView.objectOverlays = [] + overlayView.setNeedsDisplay() + } +} + +// MARK: Define default constants +enum DefaultConstants { + static let numFaces = 3 + static let detectionConfidence: Float = 0.5 + static let presenceConfidence: Float = 0.5 + static let trackingConfidence: Float = 0.5 + static let outputFaceBlendshapes: Bool = false + static let modelPath: String? = Bundle.main.path(forResource: "face_landmarker", ofType: "task") +} diff --git a/examples/face_landmarker/ios/FaceLandmarker/face_landmarker.task b/examples/face_landmarker/ios/FaceLandmarker/face_landmarker.task new file mode 100644 index 00000000..c50c845d Binary files /dev/null and b/examples/face_landmarker/ios/FaceLandmarker/face_landmarker.task differ diff --git a/examples/face_landmarker/ios/FaceLandmarkerUITests/FaceLandmarkerUITests.swift b/examples/face_landmarker/ios/FaceLandmarkerUITests/FaceLandmarkerUITests.swift new file mode 100644 index 00000000..b67a2c78 --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarkerUITests/FaceLandmarkerUITests.swift @@ -0,0 +1,41 @@ +// +// FaceLandmarkerUITests.swift +// FaceLandmarkerUITests +// +// Created by MBA0077 on 6/23/23. +// + +import XCTest + +final class FaceLandmarkerUITests: XCTestCase { + + override func setUpWithError() throws { + // Put setup code here. This method is called before the invocation of each test method in the class. + + // In UI tests it is usually best to stop immediately when a failure occurs. + continueAfterFailure = false + + // In UI tests it’s important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this. + } + + override func tearDownWithError() throws { + // Put teardown code here. This method is called after the invocation of each test method in the class. + } + + func testExample() throws { + // UI tests must launch the application that they test. + let app = XCUIApplication() + app.launch() + + // Use XCTAssert and related functions to verify your tests produce the correct results. + } + + func testLaunchPerformance() throws { + if #available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 7.0, *) { + // This measures how long it takes to launch your application. + measure(metrics: [XCTApplicationLaunchMetric()]) { + XCUIApplication().launch() + } + } + } +} diff --git a/examples/face_landmarker/ios/FaceLandmarkerUITests/FaceLandmarkerUITestsLaunchTests.swift b/examples/face_landmarker/ios/FaceLandmarkerUITests/FaceLandmarkerUITestsLaunchTests.swift new file mode 100644 index 00000000..704c0d07 --- /dev/null +++ b/examples/face_landmarker/ios/FaceLandmarkerUITests/FaceLandmarkerUITestsLaunchTests.swift @@ -0,0 +1,32 @@ +// +// FaceLandmarkerUITestsLaunchTests.swift +// FaceLandmarkerUITests +// +// Created by MBA0077 on 6/23/23. +// + +import XCTest + +final class FaceLandmarkerUITestsLaunchTests: XCTestCase { + + override class var runsForEachTargetApplicationUIConfiguration: Bool { + true + } + + override func setUpWithError() throws { + continueAfterFailure = false + } + + func testLaunch() throws { + let app = XCUIApplication() + app.launch() + + // Insert steps here to perform after app launch but before taking a screenshot, + // such as logging into a test account or navigating somewhere in the app + + let attachment = XCTAttachment(screenshot: app.screenshot()) + attachment.name = "Launch Screen" + attachment.lifetime = .keepAlways + add(attachment) + } +} diff --git a/examples/face_landmarker/ios/Podfile b/examples/face_landmarker/ios/Podfile new file mode 100644 index 00000000..9593cff6 --- /dev/null +++ b/examples/face_landmarker/ios/Podfile @@ -0,0 +1,15 @@ +# Uncomment the next line to define a global platform for your project +# platform :ios, '9.0' + +target 'FaceLandmarker' do + # Comment the next line if you don't want to use dynamic frameworks + use_frameworks! + + pod 'MediaPipeTasksVision', '0.10.2-alpha-3' + +end + +target 'FaceLandmarkerTests' do + inherit! :search_paths + # Pods for testing +end diff --git a/examples/face_landmarker/ios/Podfile.lock b/examples/face_landmarker/ios/Podfile.lock new file mode 100644 index 00000000..c1716b2a --- /dev/null +++ b/examples/face_landmarker/ios/Podfile.lock @@ -0,0 +1,20 @@ +PODS: + - MediaPipeTasksCommon (0.10.2-alpha-3) + - MediaPipeTasksVision (0.10.2-alpha-3): + - MediaPipeTasksCommon (= 0.10.2-alpha-3) + +DEPENDENCIES: + - MediaPipeTasksVision (= 0.10.2-alpha-3) + +SPEC REPOS: + trunk: + - MediaPipeTasksCommon + - MediaPipeTasksVision + +SPEC CHECKSUMS: + MediaPipeTasksCommon: d2cb34b690555550a9576caa9ea16b6aaa55a6e9 + MediaPipeTasksVision: a60e150bdc7c0d8dda1e071a88228da7dbd526a4 + +PODFILE CHECKSUM: 482145949ad170e2e95d9187bc10591e87ac0fc7 + +COCOAPODS: 1.12.1 diff --git a/examples/face_landmarker/ios/RunScripts/download_models.sh b/examples/face_landmarker/ios/RunScripts/download_models.sh new file mode 100755 index 00000000..b397c870 --- /dev/null +++ b/examples/face_landmarker/ios/RunScripts/download_models.sh @@ -0,0 +1,24 @@ +#!/bin/bash +# Copyright 2023 The MediaPipe Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# Download face_landmarker.task from the internet if it's not exist. +MODEL_FILE=./FaceLandmarker/face_landmarker.task +if test -f "$MODEL_FILE"; then + echo "INFO: face_landmarker.task existed. Skip downloading and use the local task." +else + curl -o ${MODEL_FILE} https://storage.googleapis.com/mediapipe-models/face_landmarker/face_landmarker/float16/1/face_landmarker.task + echo "INFO: Downloaded face_landmarker.task to $MODEL_FILE ." +fi