diff --git a/.github/workflows/firebaseai.yml b/.github/workflows/firebaseai.yml index ea807fc9b..df503c87d 100644 --- a/.github/workflows/firebaseai.yml +++ b/.github/workflows/firebaseai.yml @@ -29,11 +29,13 @@ jobs: - os: macos-15 xcode: "16.4" platform: iOS - device: iPhone 16 + device: iPhone 16 Pro + ios_version: "18.6" - os: macos-26 xcode: "26.0" platform: iOS - device: iPhone 16e + device: iPhone 17 Pro + ios_version: "26.1" runs-on: ${{ matrix.os }} env: SETUP: firebaseai diff --git a/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj b/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj index 89084e975..1a4d012df 100644 --- a/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj +++ b/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj @@ -7,30 +7,17 @@ objects = { /* Begin PBXBuildFile section */ - 869200B32B879C4F00482873 /* GoogleService-Info.plist in Resources */ = {isa = PBXBuildFile; fileRef = 869200B22B879C4F00482873 /* GoogleService-Info.plist */; }; - 86A67E8D2E9FECCF00EDFB8A /* cmark-gfm in Frameworks */ = {isa = PBXBuildFile; productRef = 86A67E8C2E9FECCF00EDFB8A /* cmark-gfm */; }; - 86A67E8F2E9FECCF00EDFB8A /* cmark-gfm-extensions in Frameworks */ = {isa = PBXBuildFile; productRef = 86A67E8E2E9FECCF00EDFB8A /* cmark-gfm-extensions */; }; - 86A67E912E9FED0600EDFB8A /* NetworkImage in Frameworks */ = {isa = PBXBuildFile; productRef = 86A67E902E9FED0600EDFB8A /* NetworkImage */; }; - 86A67E932E9FED1700EDFB8A /* NetworkImage in Frameworks */ = {isa = PBXBuildFile; productRef = 86A67E922E9FED1700EDFB8A /* NetworkImage */; }; - 86A67E952E9FED2200EDFB8A /* cmark-gfm in Frameworks */ = {isa = PBXBuildFile; productRef = 86A67E942E9FED2200EDFB8A /* cmark-gfm */; }; - 86A67E972E9FED2200EDFB8A /* cmark-gfm-extensions in Frameworks */ = {isa = PBXBuildFile; productRef = 86A67E962E9FED2200EDFB8A /* cmark-gfm-extensions */; }; - 86BB55FF2E8B2D6D0054B8B5 /* MarkdownUI in Frameworks */ = {isa = PBXBuildFile; productRef = 86BB55E42E8B2D6D0054B8B5 /* MarkdownUI */; }; - 86BB56002E8B2D6D0054B8B5 /* GenerativeAIUIComponents in Frameworks */ = {isa = PBXBuildFile; productRef = 86BB55E62E8B2D6D0054B8B5 /* GenerativeAIUIComponents */; }; - 86BB56042E8B2D6D0054B8B5 /* GoogleService-Info.plist in Resources */ = {isa = PBXBuildFile; fileRef = 869200B22B879C4F00482873 /* GoogleService-Info.plist */; }; - 886F95D82B17BA420036F07A /* MarkdownUI in Frameworks */ = {isa = PBXBuildFile; productRef = 886F95D72B17BA420036F07A /* MarkdownUI */; }; - 886F95E32B17D6630036F07A /* GenerativeAIUIComponents in Frameworks */ = {isa = PBXBuildFile; productRef = 886F95E22B17D6630036F07A /* GenerativeAIUIComponents */; }; - DE26D95F2DBB3E9F007E6668 /* FirebaseAI in Frameworks */ = {isa = PBXBuildFile; productRef = DE26D95E2DBB3E9F007E6668 /* FirebaseAI */; }; + 88151ADC2EC9345700775CFB /* MarkdownUI in Frameworks */ = {isa = PBXBuildFile; productRef = 88151ADB2EC9345700775CFB /* MarkdownUI */; }; + 88779D902EC8AA920080D023 /* ConversationKit in Frameworks */ = {isa = PBXBuildFile; productRef = 88779D8F2EC8AA920080D023 /* ConversationKit */; }; + 88779D932EC8AC460080D023 /* FirebaseAILogic in Frameworks */ = {isa = PBXBuildFile; productRef = 88779D922EC8AC460080D023 /* FirebaseAILogic */; }; /* End PBXBuildFile section */ /* Begin PBXFileReference section */ - 869200B22B879C4F00482873 /* GoogleService-Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = "GoogleService-Info.plist"; sourceTree = ""; }; - 86BB56082E8B2D6D0054B8B5 /* FirebaseAIExampleZip.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = FirebaseAIExampleZip.app; sourceTree = BUILT_PRODUCTS_DIR; }; - 8848C82F2B0D04BC007B434F /* FirebaseAIExample.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = FirebaseAIExample.app; sourceTree = BUILT_PRODUCTS_DIR; }; - 88B8A9352B0FCBA700424728 /* GenerativeAIUIComponents */ = {isa = PBXFileReference; lastKnownFileType = wrapper; path = GenerativeAIUIComponents; sourceTree = ""; }; + 88779D352EC8A9CF0080D023 /* FirebaseAIExample.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = FirebaseAIExample.app; sourceTree = BUILT_PRODUCTS_DIR; }; /* End PBXFileReference section */ /* Begin PBXFileSystemSynchronizedRootGroup section */ - 863E95812EC7B70200BE4F4E /* FirebaseAIExample */ = { + 88779D372EC8A9CF0080D023 /* FirebaseAIExample */ = { isa = PBXFileSystemSynchronizedRootGroup; path = FirebaseAIExample; sourceTree = ""; @@ -38,57 +25,31 @@ /* End PBXFileSystemSynchronizedRootGroup section */ /* Begin PBXFrameworksBuildPhase section */ - 86BB55FD2E8B2D6D0054B8B5 /* Frameworks */ = { + 88779D322EC8A9CF0080D023 /* Frameworks */ = { isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( - 86A67E932E9FED1700EDFB8A /* NetworkImage in Frameworks */, - 86BB55FF2E8B2D6D0054B8B5 /* MarkdownUI in Frameworks */, - 86A67E952E9FED2200EDFB8A /* cmark-gfm in Frameworks */, - 86A67E972E9FED2200EDFB8A /* cmark-gfm-extensions in Frameworks */, - 86BB56002E8B2D6D0054B8B5 /* GenerativeAIUIComponents in Frameworks */, - ); - runOnlyForDeploymentPostprocessing = 0; - }; - 8848C82C2B0D04BC007B434F /* Frameworks */ = { - isa = PBXFrameworksBuildPhase; - buildActionMask = 2147483647; - files = ( - DE26D95F2DBB3E9F007E6668 /* FirebaseAI in Frameworks */, - 886F95D82B17BA420036F07A /* MarkdownUI in Frameworks */, - 86A67E8D2E9FECCF00EDFB8A /* cmark-gfm in Frameworks */, - 86A67E912E9FED0600EDFB8A /* NetworkImage in Frameworks */, - 86A67E8F2E9FECCF00EDFB8A /* cmark-gfm-extensions in Frameworks */, - 886F95E32B17D6630036F07A /* GenerativeAIUIComponents in Frameworks */, + 88151ADC2EC9345700775CFB /* MarkdownUI in Frameworks */, + 88779D932EC8AC460080D023 /* FirebaseAILogic in Frameworks */, + 88779D902EC8AA920080D023 /* ConversationKit in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ - 88209C222B0FBE1700F64795 /* Frameworks */ = { - isa = PBXGroup; - children = ( - ); - name = Frameworks; - sourceTree = ""; - }; - 8848C8262B0D04BC007B434F = { + 88779D2C2EC8A9CF0080D023 = { isa = PBXGroup; children = ( - 863E95812EC7B70200BE4F4E /* FirebaseAIExample */, - 88B8A9352B0FCBA700424728 /* GenerativeAIUIComponents */, - 869200B22B879C4F00482873 /* GoogleService-Info.plist */, - 8848C8302B0D04BC007B434F /* Products */, - 88209C222B0FBE1700F64795 /* Frameworks */, + 88779D372EC8A9CF0080D023 /* FirebaseAIExample */, + 88779D362EC8A9CF0080D023 /* Products */, ); sourceTree = ""; }; - 8848C8302B0D04BC007B434F /* Products */ = { + 88779D362EC8A9CF0080D023 /* Products */ = { isa = PBXGroup; children = ( - 8848C82F2B0D04BC007B434F /* FirebaseAIExample.app */, - 86BB56082E8B2D6D0054B8B5 /* FirebaseAIExampleZip.app */, + 88779D352EC8A9CF0080D023 /* FirebaseAIExample.app */, ); name = Products; sourceTree = ""; @@ -96,131 +57,82 @@ /* End PBXGroup section */ /* Begin PBXNativeTarget section */ - 86BB55E32E8B2D6D0054B8B5 /* FirebaseAIExampleZip */ = { + 88779D342EC8A9CF0080D023 /* FirebaseAIExample */ = { isa = PBXNativeTarget; - buildConfigurationList = 86BB56052E8B2D6D0054B8B5 /* Build configuration list for PBXNativeTarget "FirebaseAIExampleZip" */; + buildConfigurationList = 88779D402EC8A9CF0080D023 /* Build configuration list for PBXNativeTarget "FirebaseAIExample" */; buildPhases = ( - 86BB55E92E8B2D6D0054B8B5 /* Sources */, - 86BB55FD2E8B2D6D0054B8B5 /* Frameworks */, - 86BB56012E8B2D6D0054B8B5 /* Resources */, + 88779D312EC8A9CF0080D023 /* Sources */, + 88779D322EC8A9CF0080D023 /* Frameworks */, + 88779D332EC8A9CF0080D023 /* Resources */, ); buildRules = ( ); dependencies = ( ); fileSystemSynchronizedGroups = ( - 863E95812EC7B70200BE4F4E /* FirebaseAIExample */, - ); - name = FirebaseAIExampleZip; - packageProductDependencies = ( - 86BB55E42E8B2D6D0054B8B5 /* MarkdownUI */, - 86BB55E62E8B2D6D0054B8B5 /* GenerativeAIUIComponents */, - 86A67E922E9FED1700EDFB8A /* NetworkImage */, - 86A67E942E9FED2200EDFB8A /* cmark-gfm */, - 86A67E962E9FED2200EDFB8A /* cmark-gfm-extensions */, - ); - productName = GenerativeAIExample; - productReference = 86BB56082E8B2D6D0054B8B5 /* FirebaseAIExampleZip.app */; - productType = "com.apple.product-type.application"; - }; - 8848C82E2B0D04BC007B434F /* FirebaseAIExample */ = { - isa = PBXNativeTarget; - buildConfigurationList = 8848C83D2B0D04BD007B434F /* Build configuration list for PBXNativeTarget "FirebaseAIExample" */; - buildPhases = ( - 8848C82B2B0D04BC007B434F /* Sources */, - 8848C82C2B0D04BC007B434F /* Frameworks */, - 8848C82D2B0D04BC007B434F /* Resources */, - ); - buildRules = ( - ); - dependencies = ( - ); - fileSystemSynchronizedGroups = ( - 863E95812EC7B70200BE4F4E /* FirebaseAIExample */, + 88779D372EC8A9CF0080D023 /* FirebaseAIExample */, ); name = FirebaseAIExample; packageProductDependencies = ( - 886F95D72B17BA420036F07A /* MarkdownUI */, - 886F95E22B17D6630036F07A /* GenerativeAIUIComponents */, - DE26D95E2DBB3E9F007E6668 /* FirebaseAI */, - 86A67E8C2E9FECCF00EDFB8A /* cmark-gfm */, - 86A67E8E2E9FECCF00EDFB8A /* cmark-gfm-extensions */, - 86A67E902E9FED0600EDFB8A /* NetworkImage */, + 88779D8F2EC8AA920080D023 /* ConversationKit */, + 88779D922EC8AC460080D023 /* FirebaseAILogic */, + 88151ADB2EC9345700775CFB /* MarkdownUI */, ); - productName = GenerativeAIExample; - productReference = 8848C82F2B0D04BC007B434F /* FirebaseAIExample.app */; + productName = FirebaseAIExample; + productReference = 88779D352EC8A9CF0080D023 /* FirebaseAIExample.app */; productType = "com.apple.product-type.application"; }; /* End PBXNativeTarget section */ /* Begin PBXProject section */ - 8848C8272B0D04BC007B434F /* Project object */ = { + 88779D2D2EC8A9CF0080D023 /* Project object */ = { isa = PBXProject; attributes = { BuildIndependentTargetsInParallel = 1; - LastSwiftUpdateCheck = 1510; - LastUpgradeCheck = 1510; - ORGANIZATIONNAME = "Google LLC"; + LastSwiftUpdateCheck = 2620; + LastUpgradeCheck = 2620; TargetAttributes = { - 8848C82E2B0D04BC007B434F = { - CreatedOnToolsVersion = 15.1; + 88779D342EC8A9CF0080D023 = { + CreatedOnToolsVersion = 26.2; }; }; }; - buildConfigurationList = 8848C82A2B0D04BC007B434F /* Build configuration list for PBXProject "FirebaseAIExample" */; + buildConfigurationList = 88779D302EC8A9CF0080D023 /* Build configuration list for PBXProject "FirebaseAIExample" */; developmentRegion = en; hasScannedForEncodings = 0; knownRegions = ( en, Base, ); - mainGroup = 8848C8262B0D04BC007B434F; + mainGroup = 88779D2C2EC8A9CF0080D023; minimizedProjectReferenceProxies = 1; packageReferences = ( - 88209C212B0FBDF700F64795 /* XCRemoteSwiftPackageReference "swift-markdown-ui" */, - DEA09AC32B1FCE22001962D9 /* XCRemoteSwiftPackageReference "NetworkImage" */, - DEFECAAB2D7BB49700EF9621 /* XCRemoteSwiftPackageReference "firebase-ios-sdk" */, - 86A67E8B2E9FECCF00EDFB8A /* XCRemoteSwiftPackageReference "swift-cmark" */, + 88779D8E2EC8AA920080D023 /* XCRemoteSwiftPackageReference "ConversationKit" */, + 88779D912EC8AC460080D023 /* XCRemoteSwiftPackageReference "firebase-ios-sdk" */, + 88151ADA2EC9345700775CFB /* XCRemoteSwiftPackageReference "swift-markdown-ui" */, ); preferredProjectObjectVersion = 77; - productRefGroup = 8848C8302B0D04BC007B434F /* Products */; + productRefGroup = 88779D362EC8A9CF0080D023 /* Products */; projectDirPath = ""; projectRoot = ""; targets = ( - 8848C82E2B0D04BC007B434F /* FirebaseAIExample */, - 86BB55E32E8B2D6D0054B8B5 /* FirebaseAIExampleZip */, + 88779D342EC8A9CF0080D023 /* FirebaseAIExample */, ); }; /* End PBXProject section */ /* Begin PBXResourcesBuildPhase section */ - 86BB56012E8B2D6D0054B8B5 /* Resources */ = { + 88779D332EC8A9CF0080D023 /* Resources */ = { isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( - 86BB56042E8B2D6D0054B8B5 /* GoogleService-Info.plist in Resources */, - ); - runOnlyForDeploymentPostprocessing = 0; - }; - 8848C82D2B0D04BC007B434F /* Resources */ = { - isa = PBXResourcesBuildPhase; - buildActionMask = 2147483647; - files = ( - 869200B32B879C4F00482873 /* GoogleService-Info.plist in Resources */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXResourcesBuildPhase section */ /* Begin PBXSourcesBuildPhase section */ - 86BB55E92E8B2D6D0054B8B5 /* Sources */ = { - isa = PBXSourcesBuildPhase; - buildActionMask = 2147483647; - files = ( - ); - runOnlyForDeploymentPostprocessing = 0; - }; - 8848C82B2B0D04BC007B434F /* Sources */ = { + 88779D312EC8A9CF0080D023 /* Sources */ = { isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( @@ -230,67 +142,7 @@ /* End PBXSourcesBuildPhase section */ /* Begin XCBuildConfiguration section */ - 86BB56062E8B2D6D0054B8B5 /* Debug */ = { - isa = XCBuildConfiguration; - buildSettings = { - ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; - CODE_SIGN_STYLE = Automatic; - CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_ASSET_PATHS = "\"FirebaseAIExample/Preview Content\""; - DEVELOPMENT_TEAM = ""; - ENABLE_PREVIEWS = YES; - ENABLE_USER_SCRIPT_SANDBOXING = NO; - GENERATE_INFOPLIST_FILE = YES; - INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; - INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; - INFOPLIST_KEY_UILaunchScreen_Generation = YES; - INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - LD_RUNPATH_SEARCH_PATHS = ( - "$(inherited)", - "@executable_path/Frameworks", - ); - MARKETING_VERSION = 1.0; - PRODUCT_BUNDLE_IDENTIFIER = com.google.firebase.quickstart.FirebaseAIExample; - PRODUCT_NAME = "$(TARGET_NAME)"; - SWIFT_EMIT_LOC_STRINGS = YES; - SWIFT_VERSION = 5.0; - TARGETED_DEVICE_FAMILY = "1,2"; - }; - name = Debug; - }; - 86BB56072E8B2D6D0054B8B5 /* Release */ = { - isa = XCBuildConfiguration; - buildSettings = { - ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; - CODE_SIGN_STYLE = Automatic; - CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_ASSET_PATHS = "\"FirebaseAIExample/Preview Content\""; - DEVELOPMENT_TEAM = ""; - ENABLE_PREVIEWS = YES; - ENABLE_USER_SCRIPT_SANDBOXING = NO; - GENERATE_INFOPLIST_FILE = YES; - INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; - INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; - INFOPLIST_KEY_UILaunchScreen_Generation = YES; - INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - LD_RUNPATH_SEARCH_PATHS = ( - "$(inherited)", - "@executable_path/Frameworks", - ); - MARKETING_VERSION = 1.0; - PRODUCT_BUNDLE_IDENTIFIER = com.google.firebase.quickstart.FirebaseAIExample; - PRODUCT_NAME = "$(TARGET_NAME)"; - SWIFT_EMIT_LOC_STRINGS = YES; - SWIFT_VERSION = 5.0; - TARGETED_DEVICE_FAMILY = "1,2"; - }; - name = Release; - }; - 8848C83B2B0D04BD007B434F /* Debug */ = { + 88779D3E2EC8A9CF0080D023 /* Debug */ = { isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; @@ -342,7 +194,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 16.0; + IPHONEOS_DEPLOYMENT_TARGET = 17.6; LOCALIZATION_PREFERS_STRING_CATALOGS = YES; MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; MTL_FAST_MATH = YES; @@ -353,7 +205,7 @@ }; name = Debug; }; - 8848C83C2B0D04BD007B434F /* Release */ = { + 88779D3F2EC8A9CF0080D023 /* Release */ = { isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; @@ -399,7 +251,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 16.0; + IPHONEOS_DEPLOYMENT_TARGET = 17.6; LOCALIZATION_PREFERS_STRING_CATALOGS = YES; MTL_ENABLE_DEBUG_INFO = NO; MTL_FAST_MATH = YES; @@ -409,23 +261,22 @@ }; name = Release; }; - 8848C83E2B0D04BD007B434F /* Debug */ = { + 88779D412EC8A9CF0080D023 /* Debug */ = { isa = XCBuildConfiguration; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_ASSET_PATHS = "\"FirebaseAIExample/Preview Content\""; DEVELOPMENT_TEAM = ""; ENABLE_PREVIEWS = YES; - ENABLE_USER_SCRIPT_SANDBOXING = NO; GENERATE_INFOPLIST_FILE = YES; INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; INFOPLIST_KEY_UILaunchScreen_Generation = YES; INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + IPHONEOS_DEPLOYMENT_TARGET = 17.6; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", @@ -433,29 +284,32 @@ MARKETING_VERSION = 1.0; PRODUCT_BUNDLE_IDENTIFIER = com.google.firebase.quickstart.FirebaseAIExample; PRODUCT_NAME = "$(TARGET_NAME)"; + STRING_CATALOG_GENERATE_SYMBOLS = YES; + SWIFT_APPROACHABLE_CONCURRENCY = YES; + SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor; SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES; SWIFT_VERSION = 5.0; TARGETED_DEVICE_FAMILY = "1,2"; }; name = Debug; }; - 8848C83F2B0D04BD007B434F /* Release */ = { + 88779D422EC8A9CF0080D023 /* Release */ = { isa = XCBuildConfiguration; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_ASSET_PATHS = "\"FirebaseAIExample/Preview Content\""; DEVELOPMENT_TEAM = ""; ENABLE_PREVIEWS = YES; - ENABLE_USER_SCRIPT_SANDBOXING = NO; GENERATE_INFOPLIST_FILE = YES; INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; INFOPLIST_KEY_UILaunchScreen_Generation = YES; INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + IPHONEOS_DEPLOYMENT_TARGET = 17.6; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", @@ -463,7 +317,11 @@ MARKETING_VERSION = 1.0; PRODUCT_BUNDLE_IDENTIFIER = com.google.firebase.quickstart.FirebaseAIExample; PRODUCT_NAME = "$(TARGET_NAME)"; + STRING_CATALOG_GENERATE_SYMBOLS = YES; + SWIFT_APPROACHABLE_CONCURRENCY = YES; + SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor; SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES; SWIFT_VERSION = 5.0; TARGETED_DEVICE_FAMILY = "1,2"; }; @@ -472,29 +330,20 @@ /* End XCBuildConfiguration section */ /* Begin XCConfigurationList section */ - 86BB56052E8B2D6D0054B8B5 /* Build configuration list for PBXNativeTarget "FirebaseAIExampleZip" */ = { + 88779D302EC8A9CF0080D023 /* Build configuration list for PBXProject "FirebaseAIExample" */ = { isa = XCConfigurationList; buildConfigurations = ( - 86BB56062E8B2D6D0054B8B5 /* Debug */, - 86BB56072E8B2D6D0054B8B5 /* Release */, + 88779D3E2EC8A9CF0080D023 /* Debug */, + 88779D3F2EC8A9CF0080D023 /* Release */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; - 8848C82A2B0D04BC007B434F /* Build configuration list for PBXProject "FirebaseAIExample" */ = { + 88779D402EC8A9CF0080D023 /* Build configuration list for PBXNativeTarget "FirebaseAIExample" */ = { isa = XCConfigurationList; buildConfigurations = ( - 8848C83B2B0D04BD007B434F /* Debug */, - 8848C83C2B0D04BD007B434F /* Release */, - ); - defaultConfigurationIsVisible = 0; - defaultConfigurationName = Release; - }; - 8848C83D2B0D04BD007B434F /* Build configuration list for PBXNativeTarget "FirebaseAIExample" */ = { - isa = XCConfigurationList; - buildConfigurations = ( - 8848C83E2B0D04BD007B434F /* Debug */, - 8848C83F2B0D04BD007B434F /* Release */, + 88779D412EC8A9CF0080D023 /* Debug */, + 88779D422EC8A9CF0080D023 /* Release */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; @@ -502,41 +351,25 @@ /* End XCConfigurationList section */ /* Begin XCRemoteSwiftPackageReference section */ - 86A67E8B2E9FECCF00EDFB8A /* XCRemoteSwiftPackageReference "swift-cmark" */ = { - isa = XCRemoteSwiftPackageReference; - repositoryURL = "https://github.com/swiftlang/swift-cmark"; - requirement = { - kind = revision; - revision = 3ccff77b2dc5b96b77db3da0d68d28068593fa53; - }; - }; - 86BB55E52E8B2D6D0054B8B5 /* XCRemoteSwiftPackageReference "swift-markdown-ui" */ = { + 88151ADA2EC9345700775CFB /* XCRemoteSwiftPackageReference "swift-markdown-ui" */ = { isa = XCRemoteSwiftPackageReference; repositoryURL = "https://github.com/gonzalezreal/swift-markdown-ui"; requirement = { - kind = revision; - revision = 55441810c0f678c78ed7e2ebd46dde89228e02fc; - }; - }; - 88209C212B0FBDF700F64795 /* XCRemoteSwiftPackageReference "swift-markdown-ui" */ = { - isa = XCRemoteSwiftPackageReference; - repositoryURL = "https://github.com/gonzalezreal/swift-markdown-ui"; - requirement = { - kind = revision; - revision = 5f613358148239d0292c0cef674a3c2314737f9e; + kind = upToNextMajorVersion; + minimumVersion = 2.4.1; }; }; - DEA09AC32B1FCE22001962D9 /* XCRemoteSwiftPackageReference "NetworkImage" */ = { + 88779D8E2EC8AA920080D023 /* XCRemoteSwiftPackageReference "ConversationKit" */ = { isa = XCRemoteSwiftPackageReference; - repositoryURL = "https://github.com/gonzalezreal/NetworkImage"; + repositoryURL = "https://github.com/peterfriese/ConversationKit"; requirement = { - kind = revision; - revision = 7aff8d1b31148d32c5933d75557d42f6323ee3d1; + branch = main; + kind = branch; }; }; - DEFECAAB2D7BB49700EF9621 /* XCRemoteSwiftPackageReference "firebase-ios-sdk" */ = { + 88779D912EC8AC460080D023 /* XCRemoteSwiftPackageReference "firebase-ios-sdk" */ = { isa = XCRemoteSwiftPackageReference; - repositoryURL = "https://github.com/firebase/firebase-ios-sdk.git"; + repositoryURL = "https://github.com/firebase/firebase-ios-sdk"; requirement = { kind = upToNextMajorVersion; minimumVersion = 12.6.0; @@ -545,60 +378,22 @@ /* End XCRemoteSwiftPackageReference section */ /* Begin XCSwiftPackageProductDependency section */ - 86A67E8C2E9FECCF00EDFB8A /* cmark-gfm */ = { - isa = XCSwiftPackageProductDependency; - package = 86A67E8B2E9FECCF00EDFB8A /* XCRemoteSwiftPackageReference "swift-cmark" */; - productName = "cmark-gfm"; - }; - 86A67E8E2E9FECCF00EDFB8A /* cmark-gfm-extensions */ = { - isa = XCSwiftPackageProductDependency; - package = 86A67E8B2E9FECCF00EDFB8A /* XCRemoteSwiftPackageReference "swift-cmark" */; - productName = "cmark-gfm-extensions"; - }; - 86A67E902E9FED0600EDFB8A /* NetworkImage */ = { - isa = XCSwiftPackageProductDependency; - package = DEA09AC32B1FCE22001962D9 /* XCRemoteSwiftPackageReference "NetworkImage" */; - productName = NetworkImage; - }; - 86A67E922E9FED1700EDFB8A /* NetworkImage */ = { - isa = XCSwiftPackageProductDependency; - package = DEA09AC32B1FCE22001962D9 /* XCRemoteSwiftPackageReference "NetworkImage" */; - productName = NetworkImage; - }; - 86A67E942E9FED2200EDFB8A /* cmark-gfm */ = { - isa = XCSwiftPackageProductDependency; - package = 86A67E8B2E9FECCF00EDFB8A /* XCRemoteSwiftPackageReference "swift-cmark" */; - productName = "cmark-gfm"; - }; - 86A67E962E9FED2200EDFB8A /* cmark-gfm-extensions */ = { - isa = XCSwiftPackageProductDependency; - package = 86A67E8B2E9FECCF00EDFB8A /* XCRemoteSwiftPackageReference "swift-cmark" */; - productName = "cmark-gfm-extensions"; - }; - 86BB55E42E8B2D6D0054B8B5 /* MarkdownUI */ = { - isa = XCSwiftPackageProductDependency; - package = 86BB55E52E8B2D6D0054B8B5 /* XCRemoteSwiftPackageReference "swift-markdown-ui" */; - productName = MarkdownUI; - }; - 86BB55E62E8B2D6D0054B8B5 /* GenerativeAIUIComponents */ = { - isa = XCSwiftPackageProductDependency; - productName = GenerativeAIUIComponents; - }; - 886F95D72B17BA420036F07A /* MarkdownUI */ = { + 88151ADB2EC9345700775CFB /* MarkdownUI */ = { isa = XCSwiftPackageProductDependency; - package = 88209C212B0FBDF700F64795 /* XCRemoteSwiftPackageReference "swift-markdown-ui" */; + package = 88151ADA2EC9345700775CFB /* XCRemoteSwiftPackageReference "swift-markdown-ui" */; productName = MarkdownUI; }; - 886F95E22B17D6630036F07A /* GenerativeAIUIComponents */ = { + 88779D8F2EC8AA920080D023 /* ConversationKit */ = { isa = XCSwiftPackageProductDependency; - productName = GenerativeAIUIComponents; + package = 88779D8E2EC8AA920080D023 /* XCRemoteSwiftPackageReference "ConversationKit" */; + productName = ConversationKit; }; - DE26D95E2DBB3E9F007E6668 /* FirebaseAI */ = { + 88779D922EC8AC460080D023 /* FirebaseAILogic */ = { isa = XCSwiftPackageProductDependency; - package = DEFECAAB2D7BB49700EF9621 /* XCRemoteSwiftPackageReference "firebase-ios-sdk" */; - productName = FirebaseAI; + package = 88779D912EC8AC460080D023 /* XCRemoteSwiftPackageReference "firebase-ios-sdk" */; + productName = FirebaseAILogic; }; /* End XCSwiftPackageProductDependency section */ }; - rootObject = 8848C8272B0D04BC007B434F /* Project object */; + rootObject = 88779D2D2EC8A9CF0080D023 /* Project object */; } diff --git a/firebaseai/FirebaseAIExample.xcodeproj/xcshareddata/xcschemes/FirebaseAIExample (iOS).xcscheme b/firebaseai/FirebaseAIExample.xcodeproj/xcshareddata/xcschemes/FirebaseAIExample (iOS).xcscheme index 616e68298..f6bf8e266 100644 --- a/firebaseai/FirebaseAIExample.xcodeproj/xcshareddata/xcschemes/FirebaseAIExample (iOS).xcscheme +++ b/firebaseai/FirebaseAIExample.xcodeproj/xcshareddata/xcschemes/FirebaseAIExample (iOS).xcscheme @@ -15,7 +15,7 @@ buildForAnalyzing = "YES"> @@ -44,7 +44,7 @@ runnableDebuggingMode = "0"> @@ -67,7 +67,7 @@ runnableDebuggingMode = "0"> diff --git a/firebaseai/FirebaseAIExample/Assets.xcassets/AppIcon.appiconset/Contents.json b/firebaseai/FirebaseAIExample/Assets.xcassets/AppIcon.appiconset/Contents.json index 13613e3ee..860967d00 100644 --- a/firebaseai/FirebaseAIExample/Assets.xcassets/AppIcon.appiconset/Contents.json +++ b/firebaseai/FirebaseAIExample/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -1,6 +1,29 @@ { "images" : [ { + "filename" : "Firebase AI Logic.png", + "idiom" : "universal", + "platform" : "ios", + "size" : "1024x1024" + }, + { + "appearances" : [ + { + "appearance" : "luminosity", + "value" : "dark" + } + ], + "idiom" : "universal", + "platform" : "ios", + "size" : "1024x1024" + }, + { + "appearances" : [ + { + "appearance" : "luminosity", + "value" : "tinted" + } + ], "idiom" : "universal", "platform" : "ios", "size" : "1024x1024" diff --git a/firebaseai/FirebaseAIExample/Assets.xcassets/AppIcon.appiconset/Firebase AI Logic.png b/firebaseai/FirebaseAIExample/Assets.xcassets/AppIcon.appiconset/Firebase AI Logic.png new file mode 100644 index 000000000..63b7bc6cb Binary files /dev/null and b/firebaseai/FirebaseAIExample/Assets.xcassets/AppIcon.appiconset/Firebase AI Logic.png differ diff --git a/firebaseai/FirebaseAIExample/ChatExample/Models/ChatMessage.swift b/firebaseai/FirebaseAIExample/ChatExample/Models/ChatMessage.swift deleted file mode 100644 index 79f18c5e4..000000000 --- a/firebaseai/FirebaseAIExample/ChatExample/Models/ChatMessage.swift +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif -import Foundation - -enum Participant { - case system - case user -} - -struct ChatMessage: Identifiable, Equatable { - let id = UUID().uuidString - var message: String - var groundingMetadata: GroundingMetadata? - let participant: Participant - var pending = false - - static func pending(participant: Participant) -> ChatMessage { - Self(message: "", participant: participant, pending: true) - } - - // TODO(andrewheard): Add Equatable conformance to GroundingMetadata and remove this - static func == (lhs: ChatMessage, rhs: ChatMessage) -> Bool { - lhs.id == rhs.id && lhs.message == rhs.message && lhs.participant == rhs.participant && lhs - .pending == rhs.pending - } -} - -extension ChatMessage { - static var samples: [ChatMessage] = [ - .init(message: "Hello. What can I do for you today?", participant: .system), - .init(message: "Show me a simple loop in Swift.", participant: .user), - .init(message: """ - Sure, here is a simple loop in Swift: - - # Example 1 - ``` - for i in 1...5 { - print("Hello, world!") - } - ``` - - This loop will print the string "Hello, world!" five times. The for loop iterates over a range of numbers, - in this case the numbers from 1 to 5. The variable i is assigned each number in the range, and the code inside the loop is executed. - - **Here is another example of a simple loop in Swift:** - ```swift - var sum = 0 - for i in 1...100 { - sum += i - } - print("The sum of the numbers from 1 to 100 is \\(sum).") - ``` - - This loop calculates the sum of the numbers from 1 to 100. The variable sum is initialized to 0, and then the for loop iterates over the range of numbers from 1 to 100. The variable i is assigned each number in the range, and the value of i is added to the sum variable. After the loop has finished executing, the value of sum is printed to the console. - """, participant: .system), - ] - - static var sample = samples[0] -} diff --git a/firebaseai/FirebaseAIExample/ChatExample/Screens/ConversationScreen.swift b/firebaseai/FirebaseAIExample/ChatExample/Screens/ConversationScreen.swift deleted file mode 100644 index d12341dea..000000000 --- a/firebaseai/FirebaseAIExample/ChatExample/Screens/ConversationScreen.swift +++ /dev/null @@ -1,149 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif -import GenerativeAIUIComponents -import SwiftUI - -struct ConversationScreen: View { - let firebaseService: FirebaseAI - let title: String - @StateObject var viewModel: ConversationViewModel - - @State - private var userPrompt = "" - - init(firebaseService: FirebaseAI, title: String, searchGroundingEnabled: Bool = false) { - let model = firebaseService.generativeModel( - modelName: "gemini-2.0-flash-001", - tools: searchGroundingEnabled ? [.googleSearch()] : [] - ) - self.title = title - self.firebaseService = firebaseService - _viewModel = - StateObject(wrappedValue: ConversationViewModel(firebaseService: firebaseService, - model: model)) - } - - enum FocusedField: Hashable { - case message - } - - @FocusState - var focusedField: FocusedField? - - var body: some View { - VStack { - ScrollViewReader { scrollViewProxy in - List { - ForEach(viewModel.messages) { message in - MessageView(message: message) - } - if let error = viewModel.error { - ErrorView(error: error) - .tag("errorView") - } - } - .listStyle(.plain) - .onChange(of: viewModel.messages, perform: { newValue in - if viewModel.hasError { - // wait for a short moment to make sure we can actually scroll to the bottom - DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) { - withAnimation { - scrollViewProxy.scrollTo("errorView", anchor: .bottom) - } - focusedField = .message - } - } else { - guard let lastMessage = viewModel.messages.last else { return } - - // wait for a short moment to make sure we can actually scroll to the bottom - DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) { - withAnimation { - scrollViewProxy.scrollTo(lastMessage.id, anchor: .bottom) - } - focusedField = .message - } - } - }) - } - InputField("Message...", text: $userPrompt) { - Image(systemName: viewModel.busy ? "stop.circle.fill" : "arrow.up.circle.fill") - .font(.title) - } - .focused($focusedField, equals: .message) - .onSubmit { sendOrStop() } - } - .onTapGesture { - focusedField = nil - } - .toolbar { - ToolbarItem(placement: .primaryAction) { - Button(action: newChat) { - Image(systemName: "square.and.pencil") - } - } - } - .navigationTitle(title) - .onAppear { - focusedField = .message - } - } - - private func sendMessage() { - Task { - let prompt = userPrompt - userPrompt = "" - await viewModel.sendMessage(prompt, streaming: true) - } - } - - private func sendOrStop() { - focusedField = nil - - if viewModel.busy { - viewModel.stop() - } else { - sendMessage() - } - } - - private func newChat() { - viewModel.startNewChat() - } -} - -struct ConversationScreen_Previews: PreviewProvider { - struct ContainerView: View { - @StateObject var viewModel = ConversationViewModel(firebaseService: FirebaseAI - .firebaseAI()) // Example service init - - var body: some View { - ConversationScreen(firebaseService: FirebaseAI.firebaseAI(), title: "Chat sample") - .onAppear { - viewModel.messages = ChatMessage.samples - } - } - } - - static var previews: some View { - NavigationStack { - ConversationScreen(firebaseService: FirebaseAI.firebaseAI(), title: "Chat sample") - } - } -} diff --git a/firebaseai/FirebaseAIExample/ChatExample/Views/ErrorView.swift b/firebaseai/FirebaseAIExample/ChatExample/Views/ErrorView.swift deleted file mode 100644 index bd321426c..000000000 --- a/firebaseai/FirebaseAIExample/ChatExample/Views/ErrorView.swift +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif -import SwiftUI - -struct ErrorView: View { - var error: Error - @State private var isDetailsSheetPresented = false - var body: some View { - HStack { - Text("An error occurred.") - Button(action: { isDetailsSheetPresented.toggle() }) { - Image(systemName: "info.circle") - } - } - .frame(maxWidth: .infinity, alignment: .center) - .listRowSeparator(.hidden) - .sheet(isPresented: $isDetailsSheetPresented) { - ErrorDetailsView(error: error) - } - } -} - -#Preview { - NavigationView { - let errorPromptBlocked = GenerateContentError.promptBlocked( - response: GenerateContentResponse( - candidates: [ - Candidate( - content: ModelContent(role: "model", parts: [ - """ - A _hypothetical_ model response. - Cillum ex aliqua amet aliquip labore amet eiusmod consectetur reprehenderit sit commodo. - """, - ]), - safetyRatings: [ - SafetyRating( - category: .dangerousContent, - probability: .high, - probabilityScore: 0.8, - severity: .medium, - severityScore: 0.9, - blocked: true - ), - SafetyRating( - category: .harassment, - probability: .low, - probabilityScore: 0.5, - severity: .low, - severityScore: 0.6, - blocked: false - ), - SafetyRating( - category: .hateSpeech, - probability: .low, - probabilityScore: 0.3, - severity: .medium, - severityScore: 0.2, - blocked: false - ), - SafetyRating( - category: .sexuallyExplicit, - probability: .low, - probabilityScore: 0.2, - severity: .negligible, - severityScore: 0.5, - blocked: false - ), - ], - finishReason: FinishReason.other, - citationMetadata: nil - ), - ] - ) - ) - List { - MessageView(message: ChatMessage.samples[0]) - MessageView(message: ChatMessage.samples[1]) - ErrorView(error: errorPromptBlocked) - } - .listStyle(.plain) - .navigationTitle("Chat example") - } -} diff --git a/firebaseai/FirebaseAIExample/ContentView.swift b/firebaseai/FirebaseAIExample/ContentView.swift index 73bc64fcf..3d5e15dbf 100644 --- a/firebaseai/FirebaseAIExample/ContentView.swift +++ b/firebaseai/FirebaseAIExample/ContentView.swift @@ -13,95 +13,105 @@ // limitations under the License. import SwiftUI -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif +import FirebaseAILogic -enum BackendOption: String, CaseIterable, Identifiable { - case googleAI = "Gemini Developer API" - case vertexAI = "Vertex AI Gemini API" - var id: String { rawValue } +struct ContentView: View { + @State private var selectedBackend: BackendOption = .googleAI + @State private var selectedUseCase: UseCase = .all - var backendValue: FirebaseAI { - switch self { - case .googleAI: - return FirebaseAI.firebaseAI(backend: .googleAI()) - case .vertexAI: - return FirebaseAI.firebaseAI(backend: .vertexAI()) + var filteredSamples: [Sample] { + if selectedUseCase == .all { + return Sample.samples + } else { + return Sample.samples.filter { $0.useCases.contains(selectedUseCase) } } } -} -struct ContentView: View { - @State private var selectedBackend: BackendOption = .googleAI - @State private var firebaseService: FirebaseAI = FirebaseAI.firebaseAI(backend: .googleAI()) + let columns = [ + GridItem(.adaptive(minimum: 150)), + ] var body: some View { NavigationStack { - List { - Section("Configuration") { - Picker("Backend", selection: $selectedBackend) { - ForEach(BackendOption.allCases) { option in - Text(option.rawValue).tag(option) + ScrollView { + VStack(alignment: .leading, spacing: 20) { + // Backend Configuration + VStack(alignment: .leading) { + Text("Backend Configuration") + .font(.system(size: 20, weight: .bold)) + .padding(.horizontal) + + Picker("Backend", selection: $selectedBackend) { + ForEach(BackendOption.allCases) { option in + Text(option.rawValue) + .tag(option) + } } + .pickerStyle(SegmentedPickerStyle()) + .padding(.horizontal) } - } - Section("Examples") { - NavigationLink { - GenerateContentScreen(firebaseService: firebaseService) - } label: { - Label("Generate Content", systemImage: "doc.text") - } - NavigationLink { - GenerateContentFromTemplateScreen(firebaseService: firebaseService) - } label: { - Label("Generate Content from Template", systemImage: "doc.text.fill") - } - NavigationLink { - PhotoReasoningScreen(firebaseService: firebaseService) - } label: { - Label("Multi-modal", systemImage: "doc.richtext") - } - NavigationLink { - ConversationScreen(firebaseService: firebaseService, title: "Chat") - } label: { - Label("Chat", systemImage: "ellipsis.message.fill") - } - NavigationLink { - ConversationScreen( - firebaseService: firebaseService, - title: "Grounding", - searchGroundingEnabled: true - ) - } label: { - Label("Grounding with Google Search", systemImage: "magnifyingglass") - } - NavigationLink { - FunctionCallingScreen(firebaseService: firebaseService) - } label: { - Label("Function Calling", systemImage: "function") - } - NavigationLink { - ImagenScreen(firebaseService: firebaseService) - } label: { - Label("Imagen", systemImage: "camera.circle") + // Use Case Filter + VStack(alignment: .leading) { + Text("Filter by use case") + .font(.system(size: 20, weight: .bold)) + .padding(.horizontal) + + ScrollView(.horizontal, showsIndicators: false) { + HStack(spacing: 10) { + ForEach(UseCase.allCases) { useCase in + FilterChipView(useCase: useCase, isSelected: selectedUseCase == useCase) { + selectedUseCase = useCase + } + } + } + .padding(.horizontal) + } } - NavigationLink { - ImagenFromTemplateScreen(firebaseService: firebaseService) - } label: { - Label("Imagen from Template", systemImage: "camera.circle.fill") + + // Samples + VStack(alignment: .leading) { + Text("Samples") + .font(.system(size: 20, weight: .bold)) + .padding(.horizontal) + + LazyVGrid(columns: columns, spacing: 20) { + ForEach(filteredSamples) { sample in + NavigationLink(destination: destinationView(for: sample)) { + SampleCardView(sample: sample) + } + .buttonStyle(PlainButtonStyle()) + } + } + .padding(.horizontal) } } + .padding(.vertical) } - .navigationTitle("Generative AI Examples") - .onChange(of: selectedBackend) { newBackend in - firebaseService = newBackend.backendValue - // Note: This might cause views that hold the old service instance to misbehave - // unless they are also correctly updated or recreated. - } + .background(Color(.systemGroupedBackground)) + .navigationTitle("Firebase AI Logic") + } + } + + @ViewBuilder + private func destinationView(for sample: Sample) -> some View { + switch sample.navRoute { + case "ChatScreen": + ChatScreen(backendType: selectedBackend, sample: sample) + case "ImagenScreen": + ImagenScreen(backendType: selectedBackend, sample: sample) + case "ImagenFromTemplateScreen": + ImagenFromTemplateScreen(backendType: selectedBackend, sample: sample) + case "GenerateContentFromTemplateScreen": + GenerateContentFromTemplateScreen(backendType: selectedBackend, sample: sample) + case "MultimodalScreen": + MultimodalScreen(backendType: selectedBackend, sample: sample) + case "FunctionCallingScreen": + FunctionCallingScreen(backendType: selectedBackend, sample: sample) + case "GroundingScreen": + GroundingScreen(backendType: selectedBackend, sample: sample) + default: + EmptyView() } } } diff --git a/firebaseai/FirebaseAIExample/Features/Chat/Models/ChatMessage.swift b/firebaseai/FirebaseAIExample/Features/Chat/Models/ChatMessage.swift new file mode 100644 index 000000000..c672d4d02 --- /dev/null +++ b/firebaseai/FirebaseAIExample/Features/Chat/Models/ChatMessage.swift @@ -0,0 +1,133 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#if canImport(FirebaseAILogic) + import FirebaseAILogic +#else + import FirebaseAI +#endif +import Foundation +import ConversationKit +import UIKit + +public struct ChatMessage: Message { + public let id: UUID = .init() + public var content: String? + public let participant: Participant + public let error: (any Error)? + public var pending = false + public var groundingMetadata: GroundingMetadata? + public var attachments: [MultimodalAttachment] = [] + public var image: UIImage? + // required by the Message protocol, but not used in this app + public var imageURL: String? + + public init(content: String? = nil, imageURL: String? = nil, participant: Participant, + error: (any Error)? = nil, pending: Bool = false, + attachments: [MultimodalAttachment] = [], image: UIImage? = nil) { + self.content = content + self.imageURL = imageURL + self.participant = participant + self.error = error + self.pending = pending + self.attachments = attachments + self.image = image + } + + // Protocol-required initializer + public init(content: String?, imageURL: String? = nil, participant: Participant) { + self.content = content + self.imageURL = imageURL + self.participant = participant + error = nil + } +} + +extension ChatMessage { + public static func pending(participant: Participant) -> ChatMessage { + Self(content: "", participant: participant, pending: true) + } +} + +// Implement Equatable and Hashable for ChatMessage (ignore error) +extension ChatMessage { + public static func == (lhs: ChatMessage, rhs: ChatMessage) -> Bool { + lhs.id == rhs.id && + lhs.content == rhs.content && + lhs.participant == rhs.participant && + lhs.image == rhs.image && + lhs.attachments == rhs.attachments + // intentionally ignore `error` + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(id) + hasher.combine(content) + hasher.combine(participant) + hasher.combine(image) + hasher.combine(attachments) + // intentionally ignore `error` + } +} + +public extension ChatMessage { + static var samples: [ChatMessage] = [ + .init(content: "Hello. What can I do for you today?", participant: .other), + .init(content: "Show me a simple loop in Swift.", participant: .user), + .init(content: """ + Sure, here is a simple loop in Swift: + + # Example 1 + ``` + for i in 1...5 { + print("Hello, world!") + } + ``` + + This loop will print the string "Hello, world!" five times. The for loop iterates over a range of numbers, + in this case the numbers from 1 to 5. The variable i is assigned each number in the range, and the code inside the loop is executed. + + **Here is another example of a simple loop in Swift:** + ```swift + var sum = 0 + for i in 1...100 { + sum += i + } + print("The sum of the numbers from 1 to 100 is \\(sum).") + ``` + + This loop calculates the sum of the numbers from 1 to 100. The variable sum is initialized to 0, and then the for loop iterates over the range of numbers from 1 to 100. The variable i is assigned each number in the range, and the value of i is added to the sum variable. After the loop has finished executing, the value of sum is printed to the console. + """, participant: .other), + ] + + static var sample = samples[0] +} + +public extension ChatMessage { + static func from(_ modelContent: ModelContent) -> ChatMessage? { + // TODO: add non-text parts to message when multi-model support is added + let text = modelContent.parts.compactMap { ($0 as? TextPart)?.text }.joined() + guard !text.isEmpty else { + return nil + } + + let participant: Participant = (modelContent.role == "user") ? .user : .other + + return ChatMessage(content: text, participant: participant) + } + + static func from(_ modelContents: [ModelContent]) -> [ChatMessage] { + return modelContents.compactMap { from($0) } + } +} diff --git a/firebaseai/FirebaseAIExample/Features/Chat/Screens/ChatScreen.swift b/firebaseai/FirebaseAIExample/Features/Chat/Screens/ChatScreen.swift new file mode 100644 index 000000000..9023fdcc0 --- /dev/null +++ b/firebaseai/FirebaseAIExample/Features/Chat/Screens/ChatScreen.swift @@ -0,0 +1,71 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#if canImport(FirebaseAILogic) + import FirebaseAILogic +#else + import FirebaseAI +#endif +import SwiftUI +import ConversationKit + +struct ChatScreen: View { + let backendType: BackendOption + @StateObject var viewModel: ChatViewModel + + init(backendType: BackendOption, sample: Sample? = nil) { + self.backendType = backendType + _viewModel = + StateObject(wrappedValue: ChatViewModel(backendType: backendType, + sample: sample)) + } + + var body: some View { + NavigationStack { + ConversationView(messages: $viewModel.messages, + userPrompt: viewModel.initialPrompt) { message in + MessageView(message: message) + } + .disableAttachments() + .onSendMessage { message in + await viewModel.sendMessage(message.content ?? "", streaming: true) + } + .onError { error in + viewModel.presentErrorDetails = true + } + .sheet(isPresented: $viewModel.presentErrorDetails) { + if let error = viewModel.error { + ErrorDetailsView(error: error) + } + } + .toolbar { + ToolbarItem(placement: .primaryAction) { + Button(action: newChat) { + Image(systemName: "square.and.pencil") + } + } + } + .navigationTitle(viewModel.title) + .navigationBarTitleDisplayMode(.inline) + } + } + + private func newChat() { + viewModel.startNewChat() + } +} + +#Preview { + ChatScreen(backendType: .googleAI) +} diff --git a/firebaseai/FirebaseAIExample/Features/Chat/ViewModels/ChatViewModel.swift b/firebaseai/FirebaseAIExample/Features/Chat/ViewModels/ChatViewModel.swift new file mode 100644 index 000000000..016bbe6ac --- /dev/null +++ b/firebaseai/FirebaseAIExample/Features/Chat/ViewModels/ChatViewModel.swift @@ -0,0 +1,189 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#if canImport(FirebaseAILogic) + import FirebaseAILogic +#else + import FirebaseAI +#endif +import Foundation +import UIKit +import Combine +import ConversationKit + +@MainActor +class ChatViewModel: ObservableObject { + /// This array holds both the user's and the system's chat messages + @Published var messages = [ChatMessage]() + + /// Indicates we're waiting for the model to finish + @Published var busy = false + + @Published var error: Error? + var hasError: Bool { + return error != nil + } + + @Published var presentErrorDetails: Bool = false + + @Published var initialPrompt: String = "" + @Published var title: String = "" + + private var model: GenerativeModel + private var chat: Chat + + private var chatTask: Task? + + private var sample: Sample? + private var backendType: BackendOption + + init(backendType: BackendOption, sample: Sample? = nil) { + self.sample = sample + self.backendType = backendType + + let firebaseService = backendType == .googleAI + ? FirebaseAI.firebaseAI(backend: .googleAI()) + : FirebaseAI.firebaseAI(backend: .vertexAI()) + + model = firebaseService.generativeModel( + modelName: sample?.modelName ?? "gemini-2.5-flash", + generationConfig: sample?.generationConfig, + systemInstruction: sample?.systemInstruction + ) + + if let chatHistory = sample?.chatHistory, !chatHistory.isEmpty { + messages = ChatMessage.from(chatHistory) + chat = model.startChat(history: chatHistory) + } else { + chat = model.startChat() + } + + initialPrompt = sample?.initialPrompt ?? "" + title = sample?.title ?? "" + } + + func sendMessage(_ text: String, streaming: Bool = true) async { + error = nil + if streaming { + await internalSendMessageStreaming(text) + } else { + await internalSendMessage(text) + } + } + + func startNewChat() { + stop() + error = nil + chat = model.startChat() + messages.removeAll() + initialPrompt = "" + } + + func stop() { + chatTask?.cancel() + error = nil + } + + private func internalSendMessageStreaming(_ text: String) async { + chatTask?.cancel() + + chatTask = Task { + busy = true + defer { + busy = false + } + + // first, add the user's message to the chat + let userMessage = ChatMessage(content: text, participant: .user) + messages.append(userMessage) + + // add a pending message while we're waiting for a response from the backend + let systemMessage = ChatMessage.pending(participant: .other) + messages.append(systemMessage) + + do { + let responseStream = try chat.sendMessageStream(text) + for try await chunk in responseStream { + messages[messages.count - 1].pending = false + if let text = chunk.text { + messages[messages.count - 1] + .content = (messages[messages.count - 1].content ?? "") + text + } + + if let inlineDataPart = chunk.inlineDataParts.first { + if let uiImage = UIImage(data: inlineDataPart.data) { + messages[messages.count - 1].image = uiImage + } else { + print("Failed to convert inline data to UIImage") + } + } + } + } catch { + self.error = error + print(error.localizedDescription) + let errorMessage = ChatMessage(content: "An error occurred. Please try again.", + participant: .other, + error: error, + pending: false) + messages[messages.count - 1] = errorMessage + } + } + } + + private func internalSendMessage(_ text: String) async { + chatTask?.cancel() + + chatTask = Task { + busy = true + defer { + busy = false + } + + // first, add the user's message to the chat + let userMessage = ChatMessage(content: text, participant: .user) + messages.append(userMessage) + + // add a pending message while we're waiting for a response from the backend + let systemMessage = ChatMessage.pending(participant: .other) + messages.append(systemMessage) + + do { + var response: GenerateContentResponse? + response = try await chat.sendMessage(text) + + if let responseText = response?.text { + // replace pending message with backend response + messages[messages.count - 1].content = responseText + messages[messages.count - 1].pending = false + } + + if let inlineDataPart = response?.inlineDataParts.first { + if let uiImage = UIImage(data: inlineDataPart.data) { + messages[messages.count - 1].image = uiImage + } else { + print("Failed to convert inline data to UIImage") + } + } + } catch { + self.error = error + print(error.localizedDescription) + let errorMessage = ChatMessage(content: "An error occurred. Please try again.", + participant: .other, + error: error, + pending: false) + messages[messages.count - 1] = errorMessage + } + } + } +} diff --git a/firebaseai/FirebaseAIExample/ChatExample/Views/BouncingDots.swift b/firebaseai/FirebaseAIExample/Features/Chat/Views/BouncingDots.swift similarity index 100% rename from firebaseai/FirebaseAIExample/ChatExample/Views/BouncingDots.swift rename to firebaseai/FirebaseAIExample/Features/Chat/Views/BouncingDots.swift diff --git a/firebaseai/FirebaseAIExample/ChatExample/Views/MessageView.swift b/firebaseai/FirebaseAIExample/Features/Chat/Views/MessageView.swift similarity index 50% rename from firebaseai/FirebaseAIExample/ChatExample/Views/MessageView.swift rename to firebaseai/FirebaseAIExample/Features/Chat/Views/MessageView.swift index f35dd7b35..c8b1d8b1f 100644 --- a/firebaseai/FirebaseAIExample/ChatExample/Views/MessageView.swift +++ b/firebaseai/FirebaseAIExample/Features/Chat/Views/MessageView.swift @@ -16,6 +16,7 @@ import MarkdownUI import SwiftUI #if canImport(FirebaseAILogic) import FirebaseAILogic + import ConversationKit #else import FirebaseAI #endif @@ -41,18 +42,44 @@ extension View { } struct MessageContentView: View { + @Environment(\.presentErrorAction) var presentErrorAction var message: ChatMessage var body: some View { if message.pending { BouncingDots() } else { - // Grounded Response - if let groundingMetadata = message.groundingMetadata { - GroundedResponseView(message: message, groundingMetadata: groundingMetadata) + // Error Message + if let error = message.error { + HStack { + Text("An error occurred.") + Button("More information", systemImage: "info.circle") { + presentErrorAction?(error) + } + .labelStyle(.iconOnly) + } } else { - // Non-grounded response - ResponseTextView(message: message) + VStack(alignment: .leading, spacing: 8) { + if message.participant == .user && !message.attachments.isEmpty { + AttachmentPreviewScrollView(attachments: message.attachments) + } + + if let image = message.image { + Image(uiImage: image) + .resizable() + .aspectRatio(contentMode: .fit) + .frame(maxWidth: 300, maxHeight: 300) + .clipShape(RoundedRectangle(cornerRadius: 8)) + } + + // Grounded Response + if let groundingMetadata = message.groundingMetadata { + GroundedResponseView(message: message, groundingMetadata: groundingMetadata) + } else { + // Non-grounded response + ResponseTextView(message: message) + } + } } } } @@ -62,11 +89,11 @@ struct ResponseTextView: View { var message: ChatMessage var body: some View { - Markdown(message.message) + Markdown(message.content ?? "") .markdownTextStyle { FontFamilyVariant(.normal) FontSize(.em(0.85)) - ForegroundColor(message.participant == .system ? Color(UIColor.label) : .white) + ForegroundColor(message.participant == .other ? Color(UIColor.label) : .white) } .markdownBlockStyle(\.codeBlock) { configuration in configuration.label @@ -87,24 +114,41 @@ struct ResponseTextView: View { struct MessageView: View { var message: ChatMessage + private var participantLabel: String { + message.participant == .user ? "User" : "Model" + } + var body: some View { - HStack { - if message.participant == .user { - Spacer() - } - MessageContentView(message: message) - .padding(10) - .background(message.participant == .system - ? Color(UIColor.systemFill) - : Color(UIColor.systemBlue)) - .roundedCorner(10, - corners: [ - .topLeft, - .topRight, - message.participant == .system ? .bottomRight : .bottomLeft, - ]) - if message.participant == .system { - Spacer() + VStack(alignment: message.participant == .user ? .trailing : .leading, spacing: 4) { + // Sender label + Text(participantLabel) + .font(.caption2) + .fontWeight(.medium) + .foregroundColor(.secondary) + .textCase(.uppercase) + .padding(.horizontal, 8) + .padding(.vertical, 2) + .frame(maxWidth: .infinity, alignment: message.participant == .user ? .trailing : .leading) + + // Message content + HStack { + if message.participant == .user { + Spacer() + } + MessageContentView(message: message) + .padding(10) + .background(message.participant == .other + ? Color(UIColor.systemFill) + : Color(UIColor.systemBlue)) + .roundedCorner(10, + corners: [ + .topLeft, + .topRight, + message.participant == .other ? .bottomRight : .bottomLeft, + ]) + if message.participant == .other { + Spacer() + } } } .listRowSeparator(.hidden) @@ -118,7 +162,7 @@ struct MessageView_Previews: PreviewProvider { MessageView(message: ChatMessage.samples[0]) MessageView(message: ChatMessage.samples[1]) MessageView(message: ChatMessage.samples[2]) - MessageView(message: ChatMessage(message: "Hello!", participant: .system, pending: true)) + MessageView(message: ChatMessage(content: "Hello!", participant: .other, pending: true)) } .listStyle(.plain) .navigationTitle("Chat example") diff --git a/firebaseai/FirebaseAIExample/Features/FunctionCalling/Screens/FunctionCallingScreen.swift b/firebaseai/FirebaseAIExample/Features/FunctionCalling/Screens/FunctionCallingScreen.swift new file mode 100644 index 000000000..3a58641c6 --- /dev/null +++ b/firebaseai/FirebaseAIExample/Features/FunctionCalling/Screens/FunctionCallingScreen.swift @@ -0,0 +1,71 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#if canImport(FirebaseAILogic) + import FirebaseAILogic +#else + import FirebaseAI +#endif +import SwiftUI +import ConversationKit + +struct FunctionCallingScreen: View { + let backendType: BackendOption + @StateObject var viewModel: FunctionCallingViewModel + + init(backendType: BackendOption, sample: Sample? = nil) { + self.backendType = backendType + _viewModel = + StateObject(wrappedValue: FunctionCallingViewModel(backendType: backendType, + sample: sample)) + } + + var body: some View { + NavigationStack { + ConversationView(messages: $viewModel.messages, + userPrompt: viewModel.initialPrompt) { message in + MessageView(message: message) + } + .disableAttachments() + .onSendMessage { message in + await viewModel.sendMessage(message.content ?? "", streaming: true) + } + .onError { error in + viewModel.presentErrorDetails = true + } + .sheet(isPresented: $viewModel.presentErrorDetails) { + if let error = viewModel.error { + ErrorDetailsView(error: error) + } + } + .toolbar { + ToolbarItem(placement: .primaryAction) { + Button(action: newChat) { + Image(systemName: "square.and.pencil") + } + } + } + .navigationTitle(viewModel.title) + .navigationBarTitleDisplayMode(.inline) + } + } + + private func newChat() { + viewModel.startNewChat() + } +} + +#Preview { + FunctionCallingScreen(backendType: .googleAI) +} diff --git a/firebaseai/GenerativeAIUIComponents/Package.swift b/firebaseai/FirebaseAIExample/Features/FunctionCalling/Services/WeatherService.swift similarity index 54% rename from firebaseai/GenerativeAIUIComponents/Package.swift rename to firebaseai/FirebaseAIExample/Features/FunctionCalling/Services/WeatherService.swift index 808f5f42a..8b257af1c 100644 --- a/firebaseai/GenerativeAIUIComponents/Package.swift +++ b/firebaseai/FirebaseAIExample/Features/FunctionCalling/Services/WeatherService.swift @@ -1,7 +1,4 @@ -// swift-tools-version: 5.9 -// The swift-tools-version declares the minimum version of Swift required to build this package. - -// Copyright 2023 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -14,22 +11,21 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -import PackageDescription -let package = Package( - name: "GenerativeAIUIComponents", - platforms: [ - .iOS(.v16), - ], - products: [ - .library( - name: "GenerativeAIUIComponents", - targets: ["GenerativeAIUIComponents"] - ), - ], - targets: [ - .target( - name: "GenerativeAIUIComponents" - ), - ] -) +#if canImport(FirebaseAILogic) + import FirebaseAILogic +#else + import FirebaseAI +#endif +import Foundation +import UIKit + +class WeatherService { + public static func fetchWeather(city: String, state: String, date: String) -> JSONObject { + return [ + "temperature": .number(38), + "chancePrecipitation": .string("56%"), + "cloudCover": .string("partlyCloudy"), + ] + } +} diff --git a/firebaseai/FirebaseAIExample/Features/FunctionCalling/ViewModels/FunctionCallingViewModel.swift b/firebaseai/FirebaseAIExample/Features/FunctionCalling/ViewModels/FunctionCallingViewModel.swift new file mode 100644 index 000000000..672ddd378 --- /dev/null +++ b/firebaseai/FirebaseAIExample/Features/FunctionCalling/ViewModels/FunctionCallingViewModel.swift @@ -0,0 +1,288 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#if canImport(FirebaseAILogic) + import FirebaseAILogic +#else + import FirebaseAI +#endif +import Foundation +import UIKit +import Combine +import ConversationKit + +@MainActor +class FunctionCallingViewModel: ObservableObject { + /// This array holds both the user's and the system's chat messages + @Published var messages = [ChatMessage]() + + /// Indicates we're waiting for the model to finish + @Published var busy = false + + @Published var error: Error? + var hasError: Bool { + return error != nil + } + + @Published var presentErrorDetails: Bool = false + + @Published var initialPrompt: String = "" + @Published var title: String = "" + + private var model: GenerativeModel + private var chat: Chat + + private var chatTask: Task? + + private var sample: Sample? + private var backendType: BackendOption + + init(backendType: BackendOption, sample: Sample? = nil) { + self.sample = sample + self.backendType = backendType + + let firebaseService = backendType == .googleAI + ? FirebaseAI.firebaseAI(backend: .googleAI()) + : FirebaseAI.firebaseAI(backend: .vertexAI()) + + // create a generative model with sample data + model = firebaseService.generativeModel( + modelName: sample?.modelName ?? "gemini-2.5-flash-lite", + tools: sample?.tools, + systemInstruction: sample?.systemInstruction + ) + + chat = model.startChat() + + initialPrompt = sample?.initialPrompt ?? "" + title = sample?.title ?? "" + } + + func sendMessage(_ text: String, streaming: Bool = true) async { + error = nil + if streaming { + await internalSendMessageStreaming(text) + } else { + await internalSendMessage(text) + } + } + + func startNewChat() { + stop() + error = nil + chat = model.startChat() + messages.removeAll() + initialPrompt = "" + } + + func stop() { + chatTask?.cancel() + error = nil + } + + private func internalSendMessageStreaming(_ text: String) async { + chatTask?.cancel() + + chatTask = Task { + busy = true + defer { + busy = false + } + + // first, add the user's message to the chat + let userMessage = ChatMessage(content: text, participant: .user) + messages.append(userMessage) + + // add a pending message while we're waiting for a response from the backend + let systemMessage = ChatMessage.pending(participant: .other) + messages.append(systemMessage) + + do { + let responseStream = try chat.sendMessageStream(text) + + var functionCalls = [FunctionCallPart]() + + for try await chunk in responseStream { + if !chunk.functionCalls.isEmpty { + functionCalls.append(contentsOf: chunk.functionCalls) + } + if let text = chunk.text { + messages[messages.count - 1] + .content = (messages[messages.count - 1].content ?? "") + text + messages[messages.count - 1].pending = false + } + } + + // On functionCalls, never keep reading the old stream or call the second API inside the first for-loop. + // Start a NEW stream only after the function response turn is sent. + if !functionCalls.isEmpty { + try await handleFunctionCallsStreaming(functionCalls) + } + } catch { + self.error = error + print(error.localizedDescription) + let errorMessage = ChatMessage(content: "An error occurred. Please try again.", + participant: .other, + error: error, + pending: false) + messages[messages.count - 1] = errorMessage + } + } + } + + private func internalSendMessage(_ text: String) async { + chatTask?.cancel() + + chatTask = Task { + busy = true + defer { + busy = false + } + + // first, add the user's message to the chat + let userMessage = ChatMessage(content: text, participant: .user) + messages.append(userMessage) + + // add a pending message while we're waiting for a response from the backend + let systemMessage = ChatMessage.pending(participant: .other) + messages.append(systemMessage) + + do { + let response = try await chat.sendMessage(text) + + if !response.functionCalls.isEmpty { + try await handleFunctionCalls(response) + } else { + if let responseText = response.text { + // replace pending message with backend response + messages[messages.count - 1].content = responseText + messages[messages.count - 1].pending = false + } + } + } catch { + self.error = error + print(error.localizedDescription) + let errorMessage = ChatMessage(content: "An error occurred. Please try again.", + participant: .other, + error: error, + pending: false) + messages[messages.count - 1] = errorMessage + } + } + } + + private func handleFunctionCallsStreaming(_ functionCalls: [FunctionCallPart]) async throws { + var functionResponses = [FunctionResponsePart]() + + for functionCall in functionCalls { + switch functionCall.name { + case "fetchWeather": + guard case let .string(city) = functionCall.args["city"], + case let .string(state) = functionCall.args["state"], + case let .string(date) = functionCall.args["date"] else { + throw NSError( + domain: "FunctionCallingError", + code: 0, + userInfo: [ + NSLocalizedDescriptionKey: "Malformed arguments for fetchWeather: \(functionCall.args)", + ] + ) + } + + functionResponses.append( + FunctionResponsePart( + name: functionCall.name, + response: WeatherService.fetchWeather(city: city, state: state, date: date) + ) + ) + default: + print("Unknown function named \"\(functionCall.name)\".") + } + } + + if !functionResponses.isEmpty { + let finalResponse = try chat + .sendMessageStream([ModelContent(role: "function", parts: functionResponses)]) + + for try await chunk in finalResponse { + guard let candidate = chunk.candidates.first else { + throw NSError( + domain: "FunctionCallingError", + code: 1, + userInfo: [NSLocalizedDescriptionKey: "No candidate in response chunk"] + ) + } + + for part in candidate.content.parts { + if let textPart = part as? TextPart { + messages[messages.count - 1] + .content = (messages[messages.count - 1].content ?? "") + textPart.text + messages[messages.count - 1].pending = false + } + } + } + } + } + + private func handleFunctionCalls(_ response: GenerateContentResponse) async throws { + var functionResponses = [FunctionResponsePart]() + + for functionCall in response.functionCalls { + switch functionCall.name { + case "fetchWeather": + guard case let .string(city) = functionCall.args["city"], + case let .string(state) = functionCall.args["state"], + case let .string(date) = functionCall.args["date"] else { + throw NSError( + domain: "FunctionCallingError", + code: 0, + userInfo: [ + NSLocalizedDescriptionKey: "Malformed arguments for fetchWeather: \(functionCall.args)", + ] + ) + } + + functionResponses.append( + FunctionResponsePart( + name: functionCall.name, + response: WeatherService.fetchWeather(city: city, state: state, date: date) + ) + ) + default: + print("Unknown function named \"\(functionCall.name)\".") + } + } + + if !functionResponses.isEmpty { + let finalResponse = try await chat + .sendMessage([ModelContent(role: "function", parts: functionResponses)]) + + guard let candidate = finalResponse.candidates.first else { + throw NSError( + domain: "FunctionCallingError", + code: 1, + userInfo: [NSLocalizedDescriptionKey: "No candidate in response"] + ) + } + + for part in candidate.content.parts { + if let textPart = part as? TextPart { + messages[messages.count - 1] + .content = (messages[messages.count - 1].content ?? "") + textPart.text + messages[messages.count - 1].pending = false + } + } + } + } +} diff --git a/firebaseai/FirebaseAIExample/Features/GenerativeAIText/Screens/GenerateContentFromTemplateScreen.swift b/firebaseai/FirebaseAIExample/Features/GenerativeAIText/Screens/GenerateContentFromTemplateScreen.swift new file mode 100644 index 000000000..539cf95f7 --- /dev/null +++ b/firebaseai/FirebaseAIExample/Features/GenerativeAIText/Screens/GenerateContentFromTemplateScreen.swift @@ -0,0 +1,110 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ConversationKit +import MarkdownUI +import SwiftUI +#if canImport(FirebaseAILogic) + import FirebaseAILogic +#else + import FirebaseAI +#endif + +struct GenerateContentFromTemplateScreen: View { + let backendType: BackendOption + @StateObject var viewModel: GenerateContentFromTemplateViewModel + + init(backendType: BackendOption, sample: Sample? = nil) { + self.backendType = backendType + _viewModel = + StateObject(wrappedValue: GenerateContentFromTemplateViewModel(backendType: backendType, + sample: sample)) + } + + enum FocusedField: Hashable { + case message + } + + @FocusState + var focusedField: FocusedField? + + var body: some View { + ZStack { + ScrollView { + VStack { + MessageComposerView(message: $viewModel.userInput) + .padding(.bottom, 10) + .focused($focusedField, equals: .message) + .disableAttachments() + .onSubmitAction { sendOrStop() } + + if viewModel.error != nil { + HStack { + Text("An error occurred.") + Button("More information", systemImage: "info.circle") { + viewModel.presentErrorDetails = true + } + .labelStyle(.iconOnly) + } + } + + HStack(alignment: .top) { + Image(systemName: "text.bubble.fill") + .font(.title2) + + Markdown(viewModel.content) + } + .padding() + } + } + if viewModel.inProgress { + ProgressOverlay() + } + } + .onTapGesture { + focusedField = nil + } + .sheet(isPresented: $viewModel.presentErrorDetails) { + if let error = viewModel.error { + ErrorDetailsView(error: error) + } + } + .navigationTitle("Story teller") + .navigationBarTitleDisplayMode(.inline) + .onAppear { + focusedField = .message + } + } + + private func sendMessage() { + Task { + await viewModel.generateContent(prompt: viewModel.userInput) + focusedField = .message + } + } + + private func sendOrStop() { + if viewModel.inProgress { + viewModel.stop() + } else { + sendMessage() + } + } +} + +#Preview { + NavigationStack { + GenerateContentFromTemplateScreen(backendType: .googleAI) + } +} diff --git a/firebaseai/FirebaseAIExample/Features/GenerativeAIText/ViewModels/GenerateContentFromTemplateViewModel.swift b/firebaseai/FirebaseAIExample/Features/GenerativeAIText/ViewModels/GenerateContentFromTemplateViewModel.swift new file mode 100644 index 000000000..642428417 --- /dev/null +++ b/firebaseai/FirebaseAIExample/Features/GenerativeAIText/ViewModels/GenerateContentFromTemplateViewModel.swift @@ -0,0 +1,110 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#if canImport(FirebaseAILogic) + import FirebaseAILogic +#else + import FirebaseAI +#endif +import Foundation +import OSLog +import SwiftUI +import Combine + +@MainActor +class GenerateContentFromTemplateViewModel: ObservableObject { + private var logger = Logger(subsystem: Bundle.main.bundleIdentifier!, category: "generative-ai") + + @Published + var userInput: String = "" + + @Published + var content: String = "" + + @Published + var error: Error? + var hasError: Bool { + return error != nil + } + + @Published + var presentErrorDetails: Bool = false + + @Published + var inProgress = false + + private let model: TemplateGenerativeModel + private var backendType: BackendOption + + private var generateContentTask: Task? + + private var sample: Sample? + + init(backendType: BackendOption, sample: Sample? = nil) { + self.sample = sample + self.backendType = backendType + + let firebaseService = backendType == .googleAI + ? FirebaseAI.firebaseAI(backend: .googleAI()) + : FirebaseAI.firebaseAI(backend: .vertexAI()) + + model = firebaseService.templateGenerativeModel() + + if let sample { + userInput = sample.initialPrompt ?? "" + } + } + + func generateContent(prompt: String) async { + stop() + + generateContentTask = Task { + inProgress = true + defer { + inProgress = false + } + + // Clear previous content before generating new content + content = "" + + do { + let responseStream = try model.generateContentStream( + templateID: "apple-qs-greeting", + inputs: [ + "name": prompt, + "language": "Spanish", + ] + ) + + for try await chunk in responseStream { + if let text = chunk.text { + if !Task.isCancelled { + content += text + } + } + } + } catch { + if !Task.isCancelled { + self.error = error + logger.error("Error generating content from template: \(error)") + } + } + } + } + + func stop() { + generateContentTask?.cancel() + generateContentTask = nil + } +} diff --git a/firebaseai/FirebaseAIExample/Features/Grounding/Screens/GroundingScreen.swift b/firebaseai/FirebaseAIExample/Features/Grounding/Screens/GroundingScreen.swift new file mode 100644 index 000000000..77bc414da --- /dev/null +++ b/firebaseai/FirebaseAIExample/Features/Grounding/Screens/GroundingScreen.swift @@ -0,0 +1,71 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#if canImport(FirebaseAILogic) + import FirebaseAILogic +#else + import FirebaseAI +#endif +import SwiftUI +import ConversationKit + +struct GroundingScreen: View { + let backendType: BackendOption + @StateObject var viewModel: GroundingViewModel + + init(backendType: BackendOption, sample: Sample? = nil) { + self.backendType = backendType + _viewModel = + StateObject(wrappedValue: GroundingViewModel(backendType: backendType, + sample: sample)) + } + + var body: some View { + NavigationStack { + ConversationView(messages: $viewModel.messages, + userPrompt: viewModel.initialPrompt) { message in + MessageView(message: message) + } + .disableAttachments() + .onSendMessage { message in + await viewModel.sendMessage(message.content ?? "", streaming: true) + } + .onError { error in + viewModel.presentErrorDetails = true + } + .sheet(isPresented: $viewModel.presentErrorDetails) { + if let error = viewModel.error { + ErrorDetailsView(error: error) + } + } + .toolbar { + ToolbarItem(placement: .primaryAction) { + Button(action: newChat) { + Image(systemName: "square.and.pencil") + } + } + } + .navigationTitle(viewModel.title) + .navigationBarTitleDisplayMode(.inline) + } + } + + private func newChat() { + viewModel.startNewChat() + } +} + +#Preview { + GroundingScreen(backendType: .googleAI) +} diff --git a/firebaseai/FirebaseAIExample/ChatExample/ViewModels/ConversationViewModel.swift b/firebaseai/FirebaseAIExample/Features/Grounding/ViewModels/GroundingViewModel.swift similarity index 63% rename from firebaseai/FirebaseAIExample/ChatExample/ViewModels/ConversationViewModel.swift rename to firebaseai/FirebaseAIExample/Features/Grounding/ViewModels/GroundingViewModel.swift index 1326ac95a..7085cd8b5 100644 --- a/firebaseai/FirebaseAIExample/ChatExample/ViewModels/ConversationViewModel.swift +++ b/firebaseai/FirebaseAIExample/Features/Grounding/ViewModels/GroundingViewModel.swift @@ -1,4 +1,4 @@ -// Copyright 2023 Google LLC +// Copyright 2025 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -18,10 +18,12 @@ import FirebaseAI #endif import Foundation +import Combine import UIKit +import ConversationKit @MainActor -class ConversationViewModel: ObservableObject { +class GroundingViewModel: ObservableObject { /// This array holds both the user's and the system's chat messages @Published var messages = [ChatMessage]() @@ -33,21 +35,38 @@ class ConversationViewModel: ObservableObject { return error != nil } + @Published var presentErrorDetails: Bool = false + + @Published var initialPrompt: String = "" + @Published var title: String = "" + private var model: GenerativeModel private var chat: Chat - private var stopGenerating = false private var chatTask: Task? - init(firebaseService: FirebaseAI, model: GenerativeModel? = nil) { - if let model { - self.model = model - } else { - self.model = firebaseService.generativeModel( - modelName: "gemini-2.0-flash-001" - ) - } - chat = self.model.startChat() + private var sample: Sample? + + private var backendType: BackendOption + + init(backendType: BackendOption, sample: Sample? = nil) { + self.sample = sample + self.backendType = backendType + + let firebaseService = backendType == .googleAI + ? FirebaseAI.firebaseAI(backend: .googleAI()) + : FirebaseAI.firebaseAI(backend: .vertexAI()) + + model = firebaseService.generativeModel( + modelName: sample?.modelName ?? "gemini-2.5-flash", + tools: sample?.tools, + systemInstruction: sample?.systemInstruction + ) + + chat = model.startChat() + + initialPrompt = sample?.initialPrompt ?? "" + title = sample?.title ?? "" } func sendMessage(_ text: String, streaming: Bool = true) async { @@ -64,6 +83,7 @@ class ConversationViewModel: ObservableObject { error = nil chat = model.startChat() messages.removeAll() + initialPrompt = "" } func stop() { @@ -81,11 +101,11 @@ class ConversationViewModel: ObservableObject { } // first, add the user's message to the chat - let userMessage = ChatMessage(message: text, participant: .user) + let userMessage = ChatMessage(content: text, participant: .user) messages.append(userMessage) // add a pending message while we're waiting for a response from the backend - let systemMessage = ChatMessage.pending(participant: .system) + let systemMessage = ChatMessage.pending(participant: .other) messages.append(systemMessage) do { @@ -93,7 +113,8 @@ class ConversationViewModel: ObservableObject { for try await chunk in responseStream { messages[messages.count - 1].pending = false if let text = chunk.text { - messages[messages.count - 1].message += text + messages[messages.count - 1] + .content = (messages[messages.count - 1].content ?? "") + text } if let candidate = chunk.candidates.first { @@ -106,7 +127,11 @@ class ConversationViewModel: ObservableObject { } catch { self.error = error print(error.localizedDescription) - messages.removeLast() + let errorMessage = ChatMessage(content: "An error occurred. Please try again.", + participant: .other, + error: error, + pending: false) + messages[messages.count - 1] = errorMessage } } } @@ -121,11 +146,11 @@ class ConversationViewModel: ObservableObject { } // first, add the user's message to the chat - let userMessage = ChatMessage(message: text, participant: .user) + let userMessage = ChatMessage(content: text, participant: .user) messages.append(userMessage) // add a pending message while we're waiting for a response from the backend - let systemMessage = ChatMessage.pending(participant: .system) + let systemMessage = ChatMessage.pending(participant: .other) messages.append(systemMessage) do { @@ -134,7 +159,7 @@ class ConversationViewModel: ObservableObject { if let responseText = response?.text { // replace pending message with backend response - messages[messages.count - 1].message = responseText + messages[messages.count - 1].content = responseText messages[messages.count - 1].pending = false if let candidate = response?.candidates.first { @@ -143,10 +168,15 @@ class ConversationViewModel: ObservableObject { } } } + } catch { self.error = error print(error.localizedDescription) - messages.removeLast() + let errorMessage = ChatMessage(content: "An error occurred. Please try again.", + participant: .other, + error: error, + pending: false) + messages[messages.count - 1] = errorMessage } } } diff --git a/firebaseai/FirebaseAIExample/ChatExample/Views/Grounding/GoogleSearchSuggestionView.swift b/firebaseai/FirebaseAIExample/Features/Grounding/Views/GoogleSearchSuggestionView.swift similarity index 100% rename from firebaseai/FirebaseAIExample/ChatExample/Views/Grounding/GoogleSearchSuggestionView.swift rename to firebaseai/FirebaseAIExample/Features/Grounding/Views/GoogleSearchSuggestionView.swift diff --git a/firebaseai/FirebaseAIExample/ChatExample/Views/Grounding/GroundedResponseView.swift b/firebaseai/FirebaseAIExample/Features/Grounding/Views/GroundedResponseView.swift similarity index 100% rename from firebaseai/FirebaseAIExample/ChatExample/Views/Grounding/GroundedResponseView.swift rename to firebaseai/FirebaseAIExample/Features/Grounding/Views/GroundedResponseView.swift diff --git a/firebaseai/FirebaseAIExample/ImagenExample/ImagenFromTemplateScreen.swift b/firebaseai/FirebaseAIExample/Features/Imagen/ImagenFromTemplateScreen.swift similarity index 65% rename from firebaseai/FirebaseAIExample/ImagenExample/ImagenFromTemplateScreen.swift rename to firebaseai/FirebaseAIExample/Features/Imagen/ImagenFromTemplateScreen.swift index 8f2df7d27..7e762dbc0 100644 --- a/firebaseai/FirebaseAIExample/ImagenExample/ImagenFromTemplateScreen.swift +++ b/firebaseai/FirebaseAIExample/Features/Imagen/ImagenFromTemplateScreen.swift @@ -13,21 +13,22 @@ // limitations under the License. import SwiftUI -import GenerativeAIUIComponents #if canImport(FirebaseAILogic) import FirebaseAILogic #else import FirebaseAI #endif +import ConversationKit struct ImagenFromTemplateScreen: View { - let firebaseService: FirebaseAI + let backendType: BackendOption @StateObject var viewModel: ImagenFromTemplateViewModel - init(firebaseService: FirebaseAI) { - self.firebaseService = firebaseService + init(backendType: BackendOption, sample: Sample? = nil) { + self.backendType = backendType _viewModel = - StateObject(wrappedValue: ImagenFromTemplateViewModel(firebaseService: firebaseService)) + StateObject(wrappedValue: ImagenFromTemplateViewModel(backendType: backendType, + sample: sample)) } enum FocusedField: Hashable { @@ -41,17 +42,21 @@ struct ImagenFromTemplateScreen: View { ZStack { ScrollView { VStack { - InputField( - "Enter a prompt to generate an image from template", - text: $viewModel.userInput - ) { - Image( - systemName: viewModel.inProgress ? "stop.circle.fill" : "paperplane.circle.fill" - ) - .font(.title) + MessageComposerView(message: $viewModel.userInput) + .padding(.bottom, 10) + .focused($focusedField, equals: .message) + .disableAttachments() + .onSubmitAction { sendOrStop() } + + if viewModel.error != nil { + HStack { + Text("An error occurred.") + Button("More information", systemImage: "info.circle") { + viewModel.presentErrorDetails = true + } + .labelStyle(.iconOnly) + } } - .focused($focusedField, equals: .message) - .onSubmit { sendOrStop() } let spacing: CGFloat = 10 LazyVGrid(columns: [ @@ -76,7 +81,13 @@ struct ImagenFromTemplateScreen: View { .onTapGesture { focusedField = nil } + .sheet(isPresented: $viewModel.presentErrorDetails) { + if let error = viewModel.error { + ErrorDetailsView(error: error) + } + } .navigationTitle("Imagen Template") + .navigationBarTitleDisplayMode(.inline) .onAppear { focusedField = .message } @@ -99,5 +110,5 @@ struct ImagenFromTemplateScreen: View { } #Preview { - ImagenFromTemplateScreen(firebaseService: FirebaseAI.firebaseAI()) + ImagenFromTemplateScreen(backendType: .googleAI) } diff --git a/firebaseai/FirebaseAIExample/ImagenExample/ImagenFromTemplateViewModel.swift b/firebaseai/FirebaseAIExample/Features/Imagen/ImagenFromTemplateViewModel.swift similarity index 69% rename from firebaseai/FirebaseAIExample/ImagenExample/ImagenFromTemplateViewModel.swift rename to firebaseai/FirebaseAIExample/Features/Imagen/ImagenFromTemplateViewModel.swift index 02bb15bfc..50c821d0c 100644 --- a/firebaseai/FirebaseAIExample/ImagenExample/ImagenFromTemplateViewModel.swift +++ b/firebaseai/FirebaseAIExample/Features/Imagen/ImagenFromTemplateViewModel.swift @@ -20,6 +20,7 @@ import Foundation import OSLog import SwiftUI +import Combine // Template Details // @@ -45,16 +46,32 @@ class ImagenFromTemplateViewModel: ObservableObject { var images = [UIImage]() @Published - var errorMessage: String? + var error: Error? + var hasError: Bool { + return error != nil + } + + @Published + var presentErrorDetails: Bool = false @Published var inProgress = false private let model: TemplateImagenModel + private var backendType: BackendOption private var generateImagesTask: Task? - init(firebaseService: FirebaseAI) { + private var sample: Sample? + + init(backendType: BackendOption, sample: Sample? = nil) { + self.sample = sample + self.backendType = backendType + + let firebaseService = backendType == .googleAI + ? FirebaseAI.firebaseAI(backend: .googleAI()) + : FirebaseAI.firebaseAI(backend: .vertexAI()) + model = firebaseService.templateImagenModel() } @@ -68,18 +85,26 @@ class ImagenFromTemplateViewModel: ObservableObject { } do { + // 1. Call generateImages with the text prompt let response = try await model.generateImages( - templateID: "image-generation-basic", + templateID: "imagen-generation-basic", inputs: [ "prompt": prompt, ] ) + // 2. Print the reason images were filtered out, if any. + if let filteredReason = response.filteredReason { + print("Image(s) Blocked: \(filteredReason)") + } + if !Task.isCancelled { + // 3. Convert the image data to UIImage for display in the UI images = response.images.compactMap { UIImage(data: $0.data) } } } catch { if !Task.isCancelled { + self.error = error logger.error("Error generating images from template: \(error)") } } diff --git a/firebaseai/FirebaseAIExample/ImagenExample/ImagenScreen.swift b/firebaseai/FirebaseAIExample/Features/Imagen/ImagenScreen.swift similarity index 61% rename from firebaseai/FirebaseAIExample/ImagenExample/ImagenScreen.swift rename to firebaseai/FirebaseAIExample/Features/Imagen/ImagenScreen.swift index 830bd3bc7..76a2bbb2e 100644 --- a/firebaseai/FirebaseAIExample/ImagenExample/ImagenScreen.swift +++ b/firebaseai/FirebaseAIExample/Features/Imagen/ImagenScreen.swift @@ -13,20 +13,25 @@ // limitations under the License. import SwiftUI -import GenerativeAIUIComponents #if canImport(FirebaseAILogic) import FirebaseAILogic #else import FirebaseAI #endif +import ConversationKit struct ImagenScreen: View { - let firebaseService: FirebaseAI + let backendType: BackendOption @StateObject var viewModel: ImagenViewModel - init(firebaseService: FirebaseAI) { - self.firebaseService = firebaseService - _viewModel = StateObject(wrappedValue: ImagenViewModel(firebaseService: firebaseService)) + @State + private var userPrompt = "" + + init(backendType: BackendOption, sample: Sample? = nil) { + self.backendType = backendType + _viewModel = + StateObject(wrappedValue: ImagenViewModel(backendType: backendType, + sample: sample)) } enum FocusedField: Hashable { @@ -40,14 +45,21 @@ struct ImagenScreen: View { ZStack { ScrollView { VStack { - InputField("Enter a prompt to generate an image", text: $viewModel.userInput) { - Image( - systemName: viewModel.inProgress ? "stop.circle.fill" : "paperplane.circle.fill" - ) - .font(.title) + MessageComposerView(message: $userPrompt) + .padding(.bottom, 10) + .focused($focusedField, equals: .message) + .disableAttachments() + .onSubmitAction { sendOrStop() } + + if viewModel.error != nil { + HStack { + Text("An error occurred.") + Button("More information", systemImage: "info.circle") { + viewModel.presentErrorDetails = true + } + .labelStyle(.iconOnly) + } } - .focused($focusedField, equals: .message) - .onSubmit { sendOrStop() } let spacing: CGFloat = 10 LazyVGrid(columns: [ @@ -72,15 +84,24 @@ struct ImagenScreen: View { .onTapGesture { focusedField = nil } + .sheet(isPresented: $viewModel.presentErrorDetails) { + if let error = viewModel.error { + ErrorDetailsView(error: error) + } + } .navigationTitle("Imagen example") + .navigationBarTitleDisplayMode(.inline) .onAppear { focusedField = .message + if userPrompt.isEmpty && !viewModel.initialPrompt.isEmpty { + userPrompt = viewModel.initialPrompt + } } } private func sendMessage() { Task { - await viewModel.generateImage(prompt: viewModel.userInput) + await viewModel.generateImage(prompt: userPrompt) focusedField = .message } } @@ -94,31 +115,6 @@ struct ImagenScreen: View { } } -struct ProgressOverlay: View { - var body: some View { - ZStack { - Color.black.opacity(0.3) - .ignoresSafeArea() - - ZStack { - RoundedRectangle(cornerRadius: 16) - .fill(Material.ultraThinMaterial) - .frame(width: 120, height: 100) - .shadow(radius: 8) - - VStack(spacing: 12) { - ProgressView() - .scaleEffect(1.5) - - Text("Loading...") - .font(.subheadline) - .foregroundColor(.secondary) - } - } - } - } -} - #Preview { - ImagenScreen(firebaseService: FirebaseAI.firebaseAI()) + ImagenScreen(backendType: .googleAI) } diff --git a/firebaseai/FirebaseAIExample/ImagenExample/ImagenViewModel.swift b/firebaseai/FirebaseAIExample/Features/Imagen/ImagenViewModel.swift similarity index 70% rename from firebaseai/FirebaseAIExample/ImagenExample/ImagenViewModel.swift rename to firebaseai/FirebaseAIExample/Features/Imagen/ImagenViewModel.swift index 826f88660..2328f83fe 100644 --- a/firebaseai/FirebaseAIExample/ImagenExample/ImagenViewModel.swift +++ b/firebaseai/FirebaseAIExample/Features/Imagen/ImagenViewModel.swift @@ -18,6 +18,7 @@ import FirebaseAI #endif import Foundation +import Combine import OSLog import SwiftUI @@ -26,35 +27,53 @@ class ImagenViewModel: ObservableObject { private var logger = Logger(subsystem: Bundle.main.bundleIdentifier!, category: "generative-ai") @Published - var userInput: String = "" + var initialPrompt: String = "" @Published var images = [UIImage]() @Published - var errorMessage: String? + var error: Error? + var hasError: Bool { + return error != nil + } + + @Published + var presentErrorDetails: Bool = false @Published var inProgress = false private let model: ImagenModel + private var backendType: BackendOption private var generateImagesTask: Task? - init(firebaseService: FirebaseAI) { - let modelName = "imagen-3.0-generate-002" + private var sample: Sample? + + init(backendType: BackendOption, sample: Sample? = nil) { + self.sample = sample + self.backendType = backendType + + let firebaseService = backendType == .googleAI + ? FirebaseAI.firebaseAI(backend: .googleAI()) + : FirebaseAI.firebaseAI(backend: .vertexAI()) + + let modelName = "imagen-4.0-generate-001" let safetySettings = ImagenSafetySettings( safetyFilterLevel: .blockLowAndAbove ) var generationConfig = ImagenGenerationConfig() generationConfig.numberOfImages = 4 - generationConfig.aspectRatio = .landscape4x3 + generationConfig.aspectRatio = .square1x1 model = firebaseService.imagenModel( modelName: modelName, generationConfig: generationConfig, safetySettings: safetySettings ) + + initialPrompt = sample?.initialPrompt ?? "" } func generateImage(prompt: String) async { @@ -67,20 +86,21 @@ class ImagenViewModel: ObservableObject { } do { - // 4. Call generateImages with the text prompt + // 1. Call generateImages with the text prompt let response = try await model.generateImages(prompt: prompt) - // 5. Print the reason images were filtered out, if any. + // 2. Print the reason images were filtered out, if any. if let filteredReason = response.filteredReason { print("Image(s) Blocked: \(filteredReason)") } if !Task.isCancelled { - // 6. Convert the image data to UIImage for display in the UI + // 3. Convert the image data to UIImage for display in the UI images = response.images.compactMap { UIImage(data: $0.data) } } } catch { if !Task.isCancelled { + self.error = error logger.error("Error generating images: \(error)") } } diff --git a/firebaseai/FirebaseAIExample/Features/Multimodal/Models/MultimodalAttachment.swift b/firebaseai/FirebaseAIExample/Features/Multimodal/Models/MultimodalAttachment.swift new file mode 100644 index 000000000..d98d78f47 --- /dev/null +++ b/firebaseai/FirebaseAIExample/Features/Multimodal/Models/MultimodalAttachment.swift @@ -0,0 +1,272 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import Foundation +import SwiftUI +import PhotosUI +#if canImport(FirebaseAILogic) + import FirebaseAILogic +#else + import FirebaseAI +#endif +import ConversationKit + +public enum MultimodalAttachmentError: LocalizedError { + case unsupportedFileType(extension: String) + case noDataAvailable + case loadingFailed(Error) + case mimeTypeMismatch(expected: String, provided: String, extension: String) + + public var errorDescription: String? { + switch self { + case let .unsupportedFileType(ext): + return "Unsupported file format: .\(ext). Please select a supported format file." + case .noDataAvailable: + return "File data is not available" + case let .loadingFailed(error): + return "File loading failed: \(error.localizedDescription)" + case let .mimeTypeMismatch(expected, provided, ext): + return "MIME type mismatch for .\(ext) file: expected '\(expected)', got '\(provided)'" + } + } +} + +// MultimodalAttachment is a struct used for transporting data between ViewModels and AttachmentPreviewCard +public struct MultimodalAttachment: Attachment, Equatable { + public let id = UUID() + public let mimeType: String + public let data: Data? + public let url: URL? + public var isCloudStorage: Bool = false + + public static func == (lhs: MultimodalAttachment, rhs: MultimodalAttachment) -> Bool { + return lhs.id == rhs.id + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(id) + } + + public init(mimeType: String, data: Data? = nil, url: URL? = nil) { + self.mimeType = mimeType + self.data = data + self.url = url + } + + public init(fileDataPart: FileDataPart) { + mimeType = fileDataPart.mimeType + data = nil + url = URL(string: fileDataPart.uri) + isCloudStorage = true + } +} + +// validate file type & mime type +extension MultimodalAttachment { + public static let supportedFileExtensions: Set = [ + // Documents / text + "pdf", "txt", "text", + // Images + "jpg", "jpeg", "png", "webp", + // Video + "flv", "mov", "qt", "mpeg", "mpg", "ps", "mp4", "webm", "wmv", "3gp", "3gpp", + // Audio + "aac", "flac", "mp3", "m4a", "mpga", "mp4a", "opus", "pcm", "raw", "wav", "weba", + ] + + public static func validateFileType(url: URL) throws { + let fileExtension = url.pathExtension.lowercased() + guard !fileExtension.isEmpty else { + throw MultimodalAttachmentError.unsupportedFileType(extension: "No extension") + } + + guard supportedFileExtensions.contains(fileExtension) else { + throw MultimodalAttachmentError.unsupportedFileType(extension: fileExtension) + } + } + + public static func validateMimeTypeMatch(url: URL, mimeType: String) throws { + let expectedMimeType = getMimeType(for: url) + + guard mimeType == expectedMimeType else { + throw MultimodalAttachmentError.mimeTypeMismatch( + expected: expectedMimeType, + provided: mimeType, + extension: url.pathExtension + ) + } + } + + public static func validatePhotoType(_ item: PhotosPickerItem) throws -> String { + guard let fileExtension = item.supportedContentTypes.first?.preferredFilenameExtension else { + throw MultimodalAttachmentError.unsupportedFileType(extension: "No extension") + } + + guard supportedFileExtensions.contains(fileExtension) else { + throw MultimodalAttachmentError.unsupportedFileType(extension: fileExtension) + } + + guard let fileMimeType = item.supportedContentTypes.first?.preferredMIMEType else { + throw MultimodalAttachmentError.unsupportedFileType(extension: "No MIME type") + } + + return fileMimeType + } +} + +// load data from picker item or url +extension MultimodalAttachment { + public static func fromPhotosPickerItem(_ item: PhotosPickerItem) async throws + -> MultimodalAttachment { + let fileMimeType = try validatePhotoType(item) + + do { + guard let data = try await item.loadTransferable(type: Data.self) else { + throw MultimodalAttachmentError.noDataAvailable + } + + return MultimodalAttachment( + mimeType: fileMimeType, + data: data + ) + } catch let error as MultimodalAttachmentError { + throw error + } catch { + throw MultimodalAttachmentError.loadingFailed(error) + } + } + + public static func fromFilePickerItem(from url: URL) async throws -> MultimodalAttachment { + try validateFileType(url: url) + + do { + let data = try await Task.detached(priority: .utility) { + try Data(contentsOf: url) + }.value + + let mimeType = Self.getMimeType(for: url) + + return MultimodalAttachment( + mimeType: mimeType, + data: data, + url: url + ) + } catch { + throw MultimodalAttachmentError.loadingFailed(error) + } + } + + public static func fromURL(_ url: URL, mimeType: String) async throws -> MultimodalAttachment { + try validateFileType(url: url) + try validateMimeTypeMatch(url: url, mimeType: mimeType) + + do { + let data = try await Task.detached(priority: .utility) { + try Data(contentsOf: url) + }.value + + return MultimodalAttachment( + mimeType: mimeType, + data: data, + url: url + ) + } catch { + throw MultimodalAttachmentError.loadingFailed(error) + } + } + + public func toInlineDataPart() async -> InlineDataPart? { + if let data = data, !data.isEmpty { + return InlineDataPart(data: data, mimeType: mimeType) + } + + // If the data is not available, try to read it from the url. + guard let url = url else { return nil } + do { + let data = try await Task.detached(priority: .utility) { + try Data(contentsOf: url) + }.value + + guard !data.isEmpty else { return nil } + return InlineDataPart(data: data, mimeType: mimeType) + } catch { + return nil + } + } + + private static func getMimeType(for url: URL) -> String { + let fileExtension = url.pathExtension.lowercased() + + switch fileExtension { + // Documents / text + case "pdf": + return "application/pdf" + case "txt", "text": + return "text/plain" + + // Images + case "jpg", "jpeg": + return "image/jpeg" + case "png": + return "image/png" + case "webp": + return "image/webp" + + // Video + case "flv": + return "video/x-flv" + case "mov", "qt": + return "video/quicktime" + case "mpeg": + return "video/mpeg" + case "mpg": + return "video/mpg" + case "ps": + return "video/mpegps" + case "mp4": + return "video/mp4" + case "webm": + return "video/webm" + case "wmv": + return "video/wmv" + case "3gp", "3gpp": + return "video/3gpp" + + // Audio + case "aac": + return "audio/aac" + case "flac": + return "audio/flac" + case "mp3": + return "audio/mpeg" + case "m4a": + return "audio/m4a" + case "mpga": + return "audio/mpga" + case "mp4a": + return "audio/mp4" + case "opus": + return "audio/opus" + case "pcm", "raw": + return "audio/pcm" + case "wav": + return "audio/wav" + case "weba": + return "audio/webm" + + default: + return "application/octet-stream" + } + } +} diff --git a/firebaseai/FirebaseAIExample/Features/Multimodal/Screens/MultimodalScreen.swift b/firebaseai/FirebaseAIExample/Features/Multimodal/Screens/MultimodalScreen.swift new file mode 100644 index 000000000..c3623cc77 --- /dev/null +++ b/firebaseai/FirebaseAIExample/Features/Multimodal/Screens/MultimodalScreen.swift @@ -0,0 +1,199 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#if canImport(FirebaseAILogic) + import FirebaseAILogic +#else + import FirebaseAI +#endif +import SwiftUI +import PhotosUI +import ConversationKit + +struct MultimodalScreen: View { + let backendType: BackendOption + @StateObject var viewModel: MultimodalViewModel + + @State private var showingPhotoPicker = false + @State private var showingFilePicker = false + @State private var showingLinkDialog = false + @State private var linkText = "" + @State private var linkMimeType = "" + @State private var selectedPhotoItems = [PhotosPickerItem]() + + init(backendType: BackendOption, sample: Sample? = nil) { + self.backendType = backendType + _viewModel = + StateObject(wrappedValue: MultimodalViewModel(backendType: backendType, + sample: sample)) + } + + var body: some View { + NavigationStack { + ConversationView(messages: $viewModel.messages, + attachments: $viewModel.attachments, + userPrompt: viewModel.initialPrompt) { message in + MessageView(message: message) + } + .attachmentActions { + Button(action: showLinkDialog) { + Label("Link", systemImage: "link") + } + Button(action: showFilePicker) { + Label("File", systemImage: "doc.text") + } + Button(action: showPhotoPicker) { + Label("Photo", systemImage: "photo.on.rectangle.angled") + } + } + .onSendMessage { message in + await viewModel.sendMessage(message.content ?? "", streaming: true) + } + .onError { error in + viewModel.presentErrorDetails = true + } + .sheet(isPresented: $viewModel.presentErrorDetails) { + if let error = viewModel.error { + ErrorDetailsView(error: error) + } + } + .photosPicker( + isPresented: $showingPhotoPicker, + selection: $selectedPhotoItems, + maxSelectionCount: 5, + matching: .any(of: [.images, .videos]) + ) + .fileImporter( + isPresented: $showingFilePicker, + allowedContentTypes: [.pdf, .audio], + allowsMultipleSelection: true + ) { result in + handleFileImport(result) + } + .alert("Add Web URL", isPresented: $showingLinkDialog) { + TextField("Enter URL", text: $linkText) + TextField("Enter mimeType", text: $linkMimeType) + Button("Add") { + handleLinkAttachment() + } + Button("Cancel", role: .cancel) { + linkText = "" + linkMimeType = "" + } + } + } + .onChange(of: selectedPhotoItems) { _, newItems in + handlePhotoSelection(newItems) + } + .toolbar { + ToolbarItem(placement: .primaryAction) { + Button(action: newChat) { + Image(systemName: "square.and.pencil") + } + } + } + .navigationTitle(viewModel.title) + .navigationBarTitleDisplayMode(.inline) + } + + private func newChat() { + viewModel.startNewChat() + } + + private func showPhotoPicker() { + showingPhotoPicker = true + } + + private func showFilePicker() { + showingFilePicker = true + } + + private func showLinkDialog() { + showingLinkDialog = true + } + + private func handlePhotoSelection(_ items: [PhotosPickerItem]) { + Task { + for item in items { + do { + let attachment = try await MultimodalAttachment.fromPhotosPickerItem(item) + await MainActor.run { + viewModel.addAttachment(attachment) + } + } catch { + await MainActor.run { + viewModel.error = error + viewModel.presentErrorDetails = true + } + } + } + await MainActor.run { + selectedPhotoItems = [] + } + } + } + + private func handleFileImport(_ result: Result<[URL], Error>) { + switch result { + case let .success(urls): + Task { + for url in urls { + do { + let attachment = try await MultimodalAttachment.fromFilePickerItem(from: url) + await MainActor.run { + viewModel.addAttachment(attachment) + } + } catch { + await MainActor.run { + viewModel.error = error + viewModel.presentErrorDetails = true + } + } + } + } + case let .failure(error): + viewModel.error = error + viewModel.presentErrorDetails = true + } + } + + private func handleLinkAttachment() { + guard !linkText.isEmpty, let url = URL(string: linkText) else { + return + } + + let trimmedMime = linkMimeType.lowercased().trimmingCharacters(in: .whitespacesAndNewlines) + Task { + do { + let attachment = try await MultimodalAttachment.fromURL(url, mimeType: trimmedMime) + await MainActor.run { + viewModel.addAttachment(attachment) + } + } catch { + await MainActor.run { + viewModel.error = error + viewModel.presentErrorDetails = true + } + } + await MainActor.run { + linkText = "" + linkMimeType = "" + } + } + } +} + +#Preview { + MultimodalScreen(backendType: .googleAI) +} diff --git a/firebaseai/FirebaseAIExample/Features/Multimodal/ViewModels/MultimodalViewModel.swift b/firebaseai/FirebaseAIExample/Features/Multimodal/ViewModels/MultimodalViewModel.swift new file mode 100644 index 000000000..cb13cb694 --- /dev/null +++ b/firebaseai/FirebaseAIExample/Features/Multimodal/ViewModels/MultimodalViewModel.swift @@ -0,0 +1,223 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#if canImport(FirebaseAILogic) + import FirebaseAILogic +#else + import FirebaseAI +#endif +import Foundation +import OSLog +import PhotosUI +import SwiftUI +import AVFoundation +import Combine +import ConversationKit + +@MainActor +class MultimodalViewModel: ObservableObject { + @Published var messages = [ChatMessage]() + @Published var initialPrompt: String = "" + @Published var title: String = "" + @Published var error: Error? + @Published var inProgress = false + + @Published var presentErrorDetails: Bool = false + + @Published var attachments = [MultimodalAttachment]() + + private var model: GenerativeModel + private var chat: Chat + private var chatTask: Task? + private let logger = Logger(subsystem: "com.example.firebaseai", category: "MultimodalViewModel") + + private var sample: Sample? + private var backendType: BackendOption + private var fileDataParts: [FileDataPart]? + + init(backendType: BackendOption, sample: Sample? = nil) { + self.sample = sample + self.backendType = backendType + + let firebaseService = backendType == .googleAI + ? FirebaseAI.firebaseAI(backend: .googleAI()) + : FirebaseAI.firebaseAI(backend: .vertexAI()) + + model = firebaseService.generativeModel( + modelName: sample?.modelName ?? "gemini-2.5-flash", + systemInstruction: sample?.systemInstruction + ) + + if let chatHistory = sample?.chatHistory, !chatHistory.isEmpty { + messages = ChatMessage.from(chatHistory) + chat = model.startChat(history: chatHistory) + } else { + chat = model.startChat() + } + + initialPrompt = sample?.initialPrompt ?? "" + title = sample?.title ?? "" + + fileDataParts = sample?.fileDataParts + if let fileDataParts = fileDataParts, !fileDataParts.isEmpty { + for fileDataPart in fileDataParts { + attachments.append(MultimodalAttachment(fileDataPart: fileDataPart)) + } + } + } + + func sendMessage(_ text: String, streaming: Bool = true) async { + error = nil + if streaming { + await internalSendMessageStreaming(text) + } else { + await internalSendMessage(text) + } + } + + func startNewChat() { + stop() + error = nil + chat = model.startChat() + messages.removeAll() + attachments.removeAll() + initialPrompt = "" + } + + func stop() { + chatTask?.cancel() + error = nil + } + + private func internalSendMessageStreaming(_ text: String) async { + chatTask?.cancel() + + chatTask = Task { + inProgress = true + defer { + inProgress = false + } + + let userMessage = ChatMessage(content: text, participant: .user, attachments: attachments) + messages.append(userMessage) + let systemMessage = ChatMessage.pending(participant: .other) + messages.append(systemMessage) + + do { + var parts: [any PartsRepresentable] = [text] + + if backendType == .vertexAI, let fileDataParts = fileDataParts { + // This is a patch for Cloud Storage support. Only available when using Vertex AI Gemini API. + // For non-text inputs (e.g., media files), you can attach files from Cloud Storage to the request. + // if you do not want to use Cloud Storage, you can remove this `if` statement. + // Reference: https://firebase.google.com/docs/ai-logic/solutions/cloud-storage + for fileDataPart in fileDataParts { + parts.append(fileDataPart) + } + } else { + for attachment in attachments { + if let inlineDataPart = await attachment.toInlineDataPart() { + parts.append(inlineDataPart) + } + } + } + + attachments.removeAll() + + let responseStream = try chat.sendMessageStream(parts) + for try await chunk in responseStream { + messages[messages.count - 1].pending = false + if let text = chunk.text { + messages[messages.count - 1] + .content = (messages[messages.count - 1].content ?? "") + text + } + } + } catch { + self.error = error + logger.error("\(error.localizedDescription)") + let errorMessage = ChatMessage(content: "An error occurred. Please try again.", + participant: .other, + error: error, + pending: false) + messages[messages.count - 1] = errorMessage + } + } + } + + private func internalSendMessage(_ text: String) async { + chatTask?.cancel() + + chatTask = Task { + inProgress = true + defer { + inProgress = false + } + let userMessage = ChatMessage(content: text, participant: .user, attachments: attachments) + messages.append(userMessage) + + let systemMessage = ChatMessage.pending(participant: .other) + messages.append(systemMessage) + + do { + var parts: [any PartsRepresentable] = [text] + + if backendType == .vertexAI, let fileDataParts = fileDataParts { + // This is a patch for Cloud Storage support. Only available when using Vertex AI Gemini API. + // For non-text inputs (e.g., media files), you can attach files from Cloud Storage to the request. + // if you do not want to use Cloud Storage, you can remove this `if` statement. + // Reference: https://firebase.google.com/docs/ai-logic/solutions/cloud-storage + for fileDataPart in fileDataParts { + parts.append(fileDataPart) + } + } else { + for attachment in attachments { + if let inlineDataPart = await attachment.toInlineDataPart() { + parts.append(inlineDataPart) + } + } + } + + attachments.removeAll() + + let response = try await chat.sendMessage(parts) + + if let responseText = response.text { + messages[messages.count - 1].content = responseText + messages[messages.count - 1].pending = false + } + } catch { + self.error = error + logger.error("\(error.localizedDescription)") + let errorMessage = ChatMessage(content: "An error occurred. Please try again.", + participant: .other, + error: error, + pending: false) + messages[messages.count - 1] = errorMessage + } + } + } + + func addAttachment(_ attachment: MultimodalAttachment) { + attachments.append(attachment) + } + + func removeAttachment(_ attachment: MultimodalAttachment) { + if attachment.isCloudStorage { + // Remove corresponding fileDataPart when attachment is deleted. + fileDataParts?.removeAll { $0.uri == attachment.url?.absoluteString } + } + + attachments.removeAll { $0.id == attachment.id } + } +} diff --git a/firebaseai/FirebaseAIExample/Features/Multimodal/Views/AttachmentPreviewCard.swift b/firebaseai/FirebaseAIExample/Features/Multimodal/Views/AttachmentPreviewCard.swift new file mode 100644 index 000000000..2bb37e25a --- /dev/null +++ b/firebaseai/FirebaseAIExample/Features/Multimodal/Views/AttachmentPreviewCard.swift @@ -0,0 +1,188 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import SwiftUI + +private enum AttachmentType: String { + case image, video, audio, pdf, other + + init(mimeType: String) { + let mt = mimeType.lowercased() + if mt.hasPrefix("image/") { self = .image } + else if mt.hasPrefix("video/") { self = .video } + else if mt.hasPrefix("audio/") { self = .audio } + else if mt == "application/pdf" { self = .pdf } + else { self = .other } + } + + var systemImageName: String { + switch self { + case .image: return "photo" + case .video: return "video" + case .audio: return "waveform" + case .pdf: return "doc.text" + case .other: return "questionmark" + } + } + + var typeTagColor: Color { + switch self { + case .image: return .green + case .video: return .purple + case .audio: return .orange + case .pdf: return .red + case .other: return .blue + } + } + + var displayFileType: String { + switch self { + case .image: return "IMAGE" + case .video: return "VIDEO" + case .audio: return "AUDIO" + case .pdf: return "PDF" + case .other: return "UNKNOWN" + } + } +} + +struct AttachmentPreviewCard: View { + let attachment: MultimodalAttachment + let onRemove: (() -> Void)? + + private var attachmentType: AttachmentType { + AttachmentType(mimeType: attachment.mimeType) + } + + var body: some View { + HStack(spacing: 12) { + Image(systemName: attachmentType.systemImageName) + .font(.system(size: 20)) + .foregroundColor(.blue) + .frame(width: 40, height: 40) + .background(Color.blue.opacity(0.1)) + .clipShape(RoundedRectangle(cornerRadius: 6)) + + VStack(alignment: .leading, spacing: 4) { + Text(displayName) + .font(.system(size: 14, weight: .medium)) + .lineLimit(1) + .truncationMode(.middle) + .foregroundColor(.primary) + + HStack(spacing: 8) { + Text(attachmentType.displayFileType) + .font(.system(size: 10, weight: .semibold)) + .padding(.horizontal, 6) + .padding(.vertical, 2) + .background(attachmentType.typeTagColor) + .foregroundColor(.white) + .clipShape(Capsule()) + + Spacer() + } + } + + if let onRemove = onRemove { + Button(action: onRemove) { + Image(systemName: "xmark.circle.fill") + .font(.system(size: 16)) + .foregroundColor(.gray) + } + .buttonStyle(PlainButtonStyle()) + } + } + .padding(12) + .background(Color(.systemGray6)) + .clipShape(RoundedRectangle(cornerRadius: 12)) + .overlay( + RoundedRectangle(cornerRadius: 12) + .stroke(Color(.separator), lineWidth: 0.5) + ) + } + + private var displayName: String { + let fileName = attachment.url?.lastPathComponent ?? "Default" + let maxLength = 30 + if fileName.count <= maxLength { + return fileName + } + + let prefixName = fileName.prefix(15) + let suffixName = fileName.suffix(10) + return "\(prefixName)...\(suffixName)" + } +} + +struct AttachmentPreviewScrollView: View { + let attachments: [MultimodalAttachment] + var onAttachmentRemove: ((MultimodalAttachment) -> Void)? = nil + + var body: some View { + if !attachments.isEmpty { + ScrollView(.horizontal, showsIndicators: false) { + LazyHStack(spacing: 8) { + ForEach(attachments) { attachment in + AttachmentPreviewCard( + attachment: attachment, + onRemove: onAttachmentRemove == nil ? nil : { onAttachmentRemove?(attachment) } + ) + .frame(width: 180) + } + } + .padding(.horizontal, 16) + } + .frame(height: 80) + } else { + EmptyView() + } + } +} + +#Preview { + VStack(spacing: 20) { + AttachmentPreviewCard( + attachment: MultimodalAttachment( + mimeType: "image/jpeg", + data: Data() + ), + onRemove: { print("Image removed") } + ) + + AttachmentPreviewCard( + attachment: MultimodalAttachment( + mimeType: "application/pdf", + data: Data() + ), + onRemove: { print("PDF removed") } + ) + + AttachmentPreviewCard( + attachment: MultimodalAttachment( + mimeType: "video/mp4", + data: Data() + ), + onRemove: { print("Video removed") } + ) + + AttachmentPreviewCard( + attachment: MultimodalAttachment( + mimeType: "audio/mpeg", + data: Data() + ), + onRemove: { print("Audio removed") } + ) + } + .padding() +} diff --git a/firebaseai/FirebaseAIExample/FunctionCallingExample/Screens/FunctionCallingScreen.swift b/firebaseai/FirebaseAIExample/FunctionCallingExample/Screens/FunctionCallingScreen.swift deleted file mode 100644 index 8af6b5568..000000000 --- a/firebaseai/FirebaseAIExample/FunctionCallingExample/Screens/FunctionCallingScreen.swift +++ /dev/null @@ -1,140 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif -import GenerativeAIUIComponents -import SwiftUI - -struct FunctionCallingScreen: View { - let firebaseService: FirebaseAI - @StateObject var viewModel: FunctionCallingViewModel - - @State - private var userPrompt = "What is 100 Euros in U.S. Dollars?" - - init(firebaseService: FirebaseAI) { - self.firebaseService = firebaseService - _viewModel = - StateObject(wrappedValue: FunctionCallingViewModel(firebaseService: firebaseService)) - } - - enum FocusedField: Hashable { - case message - } - - @FocusState - var focusedField: FocusedField? - - var body: some View { - VStack { - ScrollViewReader { scrollViewProxy in - List { - Text("Interact with a currency conversion API using function calling in Gemini.") - ForEach(viewModel.messages) { message in - MessageView(message: message) - } - if let error = viewModel.error { - ErrorView(error: error) - .tag("errorView") - } - } - .listStyle(.plain) - .onChange(of: viewModel.messages, perform: { newValue in - if viewModel.hasError { - // Wait for a short moment to make sure we can actually scroll to the bottom. - DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) { - withAnimation { - scrollViewProxy.scrollTo("errorView", anchor: .bottom) - } - focusedField = .message - } - } else { - guard let lastMessage = viewModel.messages.last else { return } - - // Wait for a short moment to make sure we can actually scroll to the bottom. - DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) { - withAnimation { - scrollViewProxy.scrollTo(lastMessage.id, anchor: .bottom) - } - focusedField = .message - } - } - }) - .onTapGesture { - focusedField = nil - } - } - InputField("Message...", text: $userPrompt) { - Image(systemName: viewModel.busy ? "stop.circle.fill" : "arrow.up.circle.fill") - .font(.title) - } - .focused($focusedField, equals: .message) - .onSubmit { sendOrStop() } - } - .toolbar { - ToolbarItem(placement: .primaryAction) { - Button(action: newChat) { - Image(systemName: "square.and.pencil") - } - } - } - .navigationTitle("Function Calling") - .onAppear { - focusedField = .message - } - } - - private func sendMessage() { - Task { - let prompt = userPrompt - userPrompt = "" - await viewModel.sendMessage(prompt, streaming: true) - } - } - - private func sendOrStop() { - if viewModel.busy { - viewModel.stop() - } else { - sendMessage() - } - } - - private func newChat() { - viewModel.startNewChat() - } -} - -struct FunctionCallingScreen_Previews: PreviewProvider { - struct ContainerView: View { - @StateObject var viewModel = FunctionCallingViewModel(firebaseService: FirebaseAI.firebaseAI()) - - var body: some View { - FunctionCallingScreen(firebaseService: FirebaseAI.firebaseAI()) - .onAppear { - viewModel.messages = ChatMessage.samples - } - } - } - - static var previews: some View { - NavigationStack { - FunctionCallingScreen(firebaseService: FirebaseAI.firebaseAI()) - } - } -} diff --git a/firebaseai/FirebaseAIExample/FunctionCallingExample/ViewModels/FunctionCallingViewModel.swift b/firebaseai/FirebaseAIExample/FunctionCallingExample/ViewModels/FunctionCallingViewModel.swift deleted file mode 100644 index a5a4412c9..000000000 --- a/firebaseai/FirebaseAIExample/FunctionCallingExample/ViewModels/FunctionCallingViewModel.swift +++ /dev/null @@ -1,259 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif -import Foundation -import UIKit - -@MainActor -class FunctionCallingViewModel: ObservableObject { - /// This array holds both the user's and the system's chat messages - @Published var messages = [ChatMessage]() - - /// Indicates we're waiting for the model to finish - @Published var busy = false - - @Published var error: Error? - var hasError: Bool { - return error != nil - } - - /// Function calls pending processing - private var functionCalls = [FunctionCallPart]() - - private var model: GenerativeModel - private var chat: Chat - - private var chatTask: Task? - - init(firebaseService: FirebaseAI) { // Accept FirebaseAI instance - model = firebaseService.generativeModel( - modelName: "gemini-2.0-flash-001", - tools: [.functionDeclarations([ - FunctionDeclaration( - name: "get_exchange_rate", - description: "Get the exchange rate for currencies between countries", - parameters: [ - "currency_from": .enumeration( - values: ["USD", "EUR", "JPY", "GBP", "AUD", "CAD"], - description: "The currency to convert from in ISO 4217 format" - ), - "currency_to": .enumeration( - values: ["USD", "EUR", "JPY", "GBP", "AUD", "CAD"], - description: "The currency to convert to in ISO 4217 format" - ), - ] - ), - ])] - ) - chat = model.startChat() // Initialize chat with the model from the service - } - - func sendMessage(_ text: String, streaming: Bool = true) async { - error = nil - chatTask?.cancel() - - chatTask = Task { - busy = true - defer { - busy = false - } - - // first, add the user's message to the chat - let userMessage = ChatMessage(message: text, participant: .user) - messages.append(userMessage) - - // add a pending message while we're waiting for a response from the backend - let systemMessage = ChatMessage.pending(participant: .system) - messages.append(systemMessage) - - print(messages) - do { - repeat { - if streaming { - try await internalSendMessageStreaming(text) - } else { - try await internalSendMessage(text) - } - } while !functionCalls.isEmpty - } catch { - self.error = error - print(error.localizedDescription) - messages.removeLast() - } - } - } - - func startNewChat() { - stop() - error = nil - chat = model.startChat() - messages.removeAll() - } - - func stop() { - chatTask?.cancel() - error = nil - } - - private func internalSendMessageStreaming(_ text: String) async throws { - let functionResponses = try await processFunctionCalls() - let responseStream: AsyncThrowingStream - if functionResponses.isEmpty { - responseStream = try chat.sendMessageStream(text) - } else { - for functionResponse in functionResponses { - messages.insert(functionResponse.chatMessage(), at: messages.count - 1) - } - responseStream = try chat.sendMessageStream([functionResponses.modelContent()]) - } - for try await chunk in responseStream { - processResponseContent(content: chunk) - } - } - - private func internalSendMessage(_ text: String) async throws { - let functionResponses = try await processFunctionCalls() - let response: GenerateContentResponse - if functionResponses.isEmpty { - response = try await chat.sendMessage(text) - } else { - for functionResponse in functionResponses { - messages.insert(functionResponse.chatMessage(), at: messages.count - 1) - } - response = try await chat.sendMessage([functionResponses.modelContent()]) - } - processResponseContent(content: response) - } - - func processResponseContent(content: GenerateContentResponse) { - guard let candidate = content.candidates.first else { - fatalError("No candidate.") - } - - for part in candidate.content.parts { - switch part { - case let textPart as TextPart: - // replace pending message with backend response - messages[messages.count - 1].message += textPart.text - messages[messages.count - 1].pending = false - case let functionCallPart as FunctionCallPart: - messages.insert(functionCallPart.chatMessage(), at: messages.count - 1) - functionCalls.append(functionCallPart) - default: - fatalError("Unsupported response part: \(part)") - } - } - } - - func processFunctionCalls() async throws -> [FunctionResponsePart] { - var functionResponses = [FunctionResponsePart]() - for functionCall in functionCalls { - switch functionCall.name { - case "get_exchange_rate": - let exchangeRates = getExchangeRate(args: functionCall.args) - functionResponses.append(FunctionResponsePart( - name: "get_exchange_rate", - response: exchangeRates - )) - default: - fatalError("Unknown function named \"\(functionCall.name)\".") - } - } - functionCalls = [] - - return functionResponses - } - - // MARK: - Callable Functions - - func getExchangeRate(args: JSONObject) -> JSONObject { - // 1. Validate and extract the parameters provided by the model (from a `FunctionCall`) - guard case let .string(from) = args["currency_from"] else { - fatalError("Missing `currency_from` parameter.") - } - guard case let .string(to) = args["currency_to"] else { - fatalError("Missing `currency_to` parameter.") - } - - // 2. Get the exchange rate - let allRates: [String: [String: Double]] = [ - "AUD": ["CAD": 0.89265, "EUR": 0.6072, "GBP": 0.51714, "JPY": 97.75, "USD": 0.66379], - "CAD": ["AUD": 1.1203, "EUR": 0.68023, "GBP": 0.57933, "JPY": 109.51, "USD": 0.74362], - "EUR": ["AUD": 1.6469, "CAD": 1.4701, "GBP": 0.85168, "JPY": 160.99, "USD": 1.0932], - "GBP": ["AUD": 1.9337, "CAD": 1.7261, "EUR": 1.1741, "JPY": 189.03, "USD": 1.2836], - "JPY": ["AUD": 0.01023, "CAD": 0.00913, "EUR": 0.00621, "GBP": 0.00529, "USD": 0.00679], - "USD": ["AUD": 1.5065, "CAD": 1.3448, "EUR": 0.91475, "GBP": 0.77907, "JPY": 147.26], - ] - guard let fromRates = allRates[from] else { - return ["error": .string("No data for currency \(from).")] - } - guard let toRate = fromRates[to] else { - return ["error": .string("No data for currency \(to).")] - } - - // 3. Return the exchange rates as a JSON object (returned to the model in a `FunctionResponse`) - return ["rates": .number(toRate)] - } -} - -private extension FunctionCallPart { - func chatMessage() -> ChatMessage { - let encoder = JSONEncoder() - encoder.outputFormatting = .prettyPrinted - - let jsonData: Data - do { - jsonData = try encoder.encode(self) - } catch { - fatalError("JSON Encoding Failed: \(error.localizedDescription)") - } - guard let json = String(data: jsonData, encoding: .utf8) else { - fatalError("Failed to convert JSON data to a String.") - } - let messageText = "Function call requested by model:\n```\n\(json)\n```" - - return ChatMessage(message: messageText, participant: .system) - } -} - -private extension FunctionResponsePart { - func chatMessage() -> ChatMessage { - let encoder = JSONEncoder() - encoder.outputFormatting = .prettyPrinted - - let jsonData: Data - do { - jsonData = try encoder.encode(self) - } catch { - fatalError("JSON Encoding Failed: \(error.localizedDescription)") - } - guard let json = String(data: jsonData, encoding: .utf8) else { - fatalError("Failed to convert JSON data to a String.") - } - let messageText = "Function response returned by app:\n```\n\(json)\n```" - - return ChatMessage(message: messageText, participant: .user) - } -} - -private extension [FunctionResponsePart] { - func modelContent() -> ModelContent { - return ModelContent(role: "function", parts: self) - } -} diff --git a/firebaseai/FirebaseAIExample/GenerativeAIMultimodalExample/Screens/PhotoReasoningScreen.swift b/firebaseai/FirebaseAIExample/GenerativeAIMultimodalExample/Screens/PhotoReasoningScreen.swift deleted file mode 100644 index 469cb7586..000000000 --- a/firebaseai/FirebaseAIExample/GenerativeAIMultimodalExample/Screens/PhotoReasoningScreen.swift +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import GenerativeAIUIComponents -import MarkdownUI -import PhotosUI -import SwiftUI -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif - -struct PhotoReasoningScreen: View { - let firebaseService: FirebaseAI - @StateObject var viewModel: PhotoReasoningViewModel - - init(firebaseService: FirebaseAI) { - self.firebaseService = firebaseService - _viewModel = - StateObject(wrappedValue: PhotoReasoningViewModel(firebaseService: firebaseService)) - } - - enum FocusedField: Hashable { - case message - } - - @FocusState - var focusedField: FocusedField? - - var body: some View { - VStack { - MultimodalInputField(text: $viewModel.userInput, selection: $viewModel.selectedItems) - .focused($focusedField, equals: .message) - .onSubmit { - onSendTapped() - } - - ScrollViewReader { scrollViewProxy in - List { - if let outputText = viewModel.outputText { - HStack(alignment: .top) { - if viewModel.inProgress { - ProgressView() - } else { - Image(systemName: "cloud.circle.fill") - .font(.title2) - } - - Markdown("\(outputText)") - } - .listRowSeparator(.hidden) - } - } - .listStyle(.plain) - } - } - .onTapGesture { - focusedField = nil - } - .navigationTitle("Multimodal example") - .onAppear { - focusedField = .message - } - } - - // MARK: - Actions - - private func onSendTapped() { - focusedField = nil - - Task { - await viewModel.reason() - } - } -} - -#Preview { - NavigationStack { - PhotoReasoningScreen(firebaseService: FirebaseAI.firebaseAI()) - } -} diff --git a/firebaseai/FirebaseAIExample/GenerativeAIMultimodalExample/ViewModels/PhotoReasoningViewModel.swift b/firebaseai/FirebaseAIExample/GenerativeAIMultimodalExample/ViewModels/PhotoReasoningViewModel.swift deleted file mode 100644 index 11113fb81..000000000 --- a/firebaseai/FirebaseAIExample/GenerativeAIMultimodalExample/ViewModels/PhotoReasoningViewModel.swift +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif -import Foundation -import OSLog -import PhotosUI -import SwiftUI - -@MainActor -class PhotoReasoningViewModel: ObservableObject { - // Maximum value for the larger of the two image dimensions (height and width) in pixels. This is - // being used to reduce the image size in bytes. - private static let largestImageDimension = 768.0 - - private var logger = Logger(subsystem: Bundle.main.bundleIdentifier!, category: "generative-ai") - - @Published - var userInput: String = "" - - @Published - var selectedItems = [PhotosPickerItem]() - - @Published - var outputText: String? = nil - - @Published - var errorMessage: String? - - @Published - var inProgress = false - - private var model: GenerativeModel? - - init(firebaseService: FirebaseAI) { - model = firebaseService.generativeModel(modelName: "gemini-2.0-flash-001") - } - - func reason() async { - defer { - inProgress = false - } - guard let model else { - return - } - - do { - inProgress = true - errorMessage = nil - outputText = "" - - let prompt = "Look at the image(s), and then answer the following question: \(userInput)" - - var images = [any PartsRepresentable]() - for item in selectedItems { - if let data = try? await item.loadTransferable(type: Data.self) { - guard let image = UIImage(data: data) else { - logger.error("Failed to parse data as an image, skipping.") - continue - } - if image.size.fits(largestDimension: PhotoReasoningViewModel.largestImageDimension) { - images.append(image) - } else { - guard let resizedImage = image - .preparingThumbnail(of: image.size - .aspectFit(largestDimension: PhotoReasoningViewModel.largestImageDimension)) else { - logger.error("Failed to resize image: \(image)") - continue - } - - images.append(resizedImage) - } - } - } - - let outputContentStream = try model.generateContentStream(prompt, images) - - // stream response - for try await outputContent in outputContentStream { - guard let line = outputContent.text else { - return - } - - outputText = (outputText ?? "") + line - } - } catch { - logger.error("\(error.localizedDescription)") - errorMessage = error.localizedDescription - } - } -} - -private extension CGSize { - func fits(largestDimension length: CGFloat) -> Bool { - return width <= length && height <= length - } - - func aspectFit(largestDimension length: CGFloat) -> CGSize { - let aspectRatio = width / height - if width > height { - let width = min(self.width, length) - return CGSize(width: width, height: round(width / aspectRatio)) - } else { - let height = min(self.height, length) - return CGSize(width: round(height * aspectRatio), height: height) - } - } -} diff --git a/firebaseai/FirebaseAIExample/GenerativeAITextExample/Screens/GenerateContentFromTemplateScreen.swift b/firebaseai/FirebaseAIExample/GenerativeAITextExample/Screens/GenerateContentFromTemplateScreen.swift deleted file mode 100644 index 1b4b28bbe..000000000 --- a/firebaseai/FirebaseAIExample/GenerativeAITextExample/Screens/GenerateContentFromTemplateScreen.swift +++ /dev/null @@ -1,91 +0,0 @@ -// Copyright 2025 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import MarkdownUI -import SwiftUI -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif -import GenerativeAIUIComponents - -struct GenerateContentFromTemplateScreen: View { - let firebaseService: FirebaseAI - @StateObject var viewModel: GenerateContentFromTemplateViewModel - @State var userInput = "" - - init(firebaseService: FirebaseAI) { - self.firebaseService = firebaseService - _viewModel = - StateObject( - wrappedValue: GenerateContentFromTemplateViewModel(firebaseService: firebaseService) - ) - } - - enum FocusedField: Hashable { - case message - } - - @FocusState - var focusedField: FocusedField? - - var body: some View { - VStack { - VStack(alignment: .leading) { - Text("Enter your name, then tap on _Go_ to run generateContent from template on it.") - .padding(.horizontal, 6) - InputField("Enter your name", text: $userInput) { - Text("Go") - } - .focused($focusedField, equals: .message) - .onSubmit { onGenerateContentTapped() } - } - .padding(.horizontal, 16) - - List { - HStack(alignment: .top) { - if viewModel.inProgress { - ProgressView() - } else { - Image(systemName: "cloud.circle.fill") - .font(.title2) - } - - Markdown("\(viewModel.outputText)") - } - .listRowSeparator(.hidden) - } - .listStyle(.plain) - } - .onTapGesture { - focusedField = nil - } - .navigationTitle("Template Generate Content") - } - - private func onGenerateContentTapped() { - focusedField = nil - - Task { - await viewModel.generateContentFromTemplate(name: userInput) - } - } -} - -#Preview { - NavigationStack { - GenerateContentFromTemplateScreen(firebaseService: FirebaseAI.firebaseAI()) - } -} diff --git a/firebaseai/FirebaseAIExample/GenerativeAITextExample/Screens/GenerateContentScreen.swift b/firebaseai/FirebaseAIExample/GenerativeAITextExample/Screens/GenerateContentScreen.swift deleted file mode 100644 index 5c5e34a0c..000000000 --- a/firebaseai/FirebaseAIExample/GenerativeAITextExample/Screens/GenerateContentScreen.swift +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import MarkdownUI -import SwiftUI -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif -import GenerativeAIUIComponents - -struct GenerateContentScreen: View { - let firebaseService: FirebaseAI - @StateObject var viewModel: GenerateContentViewModel - @State var userInput = "" - - init(firebaseService: FirebaseAI) { - self.firebaseService = firebaseService - _viewModel = - StateObject(wrappedValue: GenerateContentViewModel(firebaseService: firebaseService)) - } - - enum FocusedField: Hashable { - case message - } - - @FocusState - var focusedField: FocusedField? - - var body: some View { - VStack { - VStack(alignment: .leading) { - Text("Enter some text, then tap on _Go_ to run generateContent on it.") - .padding(.horizontal, 6) - InputField("Enter generate content input", text: $userInput) { - Text("Go") - } - .focused($focusedField, equals: .message) - .onSubmit { onGenerateContentTapped() } - } - .padding(.horizontal, 16) - - List { - HStack(alignment: .top) { - if viewModel.inProgress { - ProgressView() - } else { - Image(systemName: "cloud.circle.fill") - .font(.title2) - } - - Markdown("\(viewModel.outputText)") - } - .listRowSeparator(.hidden) - } - .listStyle(.plain) - } - .onTapGesture { - focusedField = nil - } - .navigationTitle("Text example") - } - - private func onGenerateContentTapped() { - focusedField = nil - - Task { - await viewModel.generateContent(inputText: userInput) - } - } -} - -#Preview { - NavigationStack { - GenerateContentScreen(firebaseService: FirebaseAI.firebaseAI()) - } -} diff --git a/firebaseai/FirebaseAIExample/GenerativeAITextExample/ViewModels/GenerateContentFromTemplateViewModel.swift b/firebaseai/FirebaseAIExample/GenerativeAITextExample/ViewModels/GenerateContentFromTemplateViewModel.swift deleted file mode 100644 index d07544f2e..000000000 --- a/firebaseai/FirebaseAIExample/GenerativeAITextExample/ViewModels/GenerateContentFromTemplateViewModel.swift +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright 2025 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif -import Foundation -import OSLog - -// Template Details -// -// Configuration -// -// input: -// default: -// language: english -// schema: -// name: string -// language?: string -// -// Prompt and system instructions -// -// {{role "system"}} -// The user's name is {{name}}. They prefer to communicate in {{language}}. -// {{role "user"}} -// Say hello. -// - -@MainActor -class GenerateContentFromTemplateViewModel: ObservableObject { - private var logger = Logger( - subsystem: Bundle.main.bundleIdentifier ?? "com.google.firebase.quickstart.FirebaseAIExample", - category: "generative-ai" - ) - - @Published - var outputText = "" - - @Published - var errorMessage: String? - - @Published - var inProgress = false - - private var model: TemplateGenerativeModel? - - init(firebaseService: FirebaseAI) { - model = firebaseService.templateGenerativeModel() - } - - func generateContentFromTemplate(name: String) async { - defer { - inProgress = false - } - guard let model else { - return - } - - do { - inProgress = true - errorMessage = nil - outputText = "" - - let response = try await model.generateContent( - templateID: "apple-qs-greeting", - inputs: [ - "name": name, - "language": "Spanish", - ] - ) - if let text = response.text { - outputText = text - } - } catch { - logger.error("\(error.localizedDescription)") - errorMessage = error.localizedDescription - } - } -} diff --git a/firebaseai/FirebaseAIExample/GenerativeAITextExample/ViewModels/GenerateContentViewModel.swift b/firebaseai/FirebaseAIExample/GenerativeAITextExample/ViewModels/GenerateContentViewModel.swift deleted file mode 100644 index fe2344ba0..000000000 --- a/firebaseai/FirebaseAIExample/GenerativeAITextExample/ViewModels/GenerateContentViewModel.swift +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif -import Foundation -import OSLog - -@MainActor -class GenerateContentViewModel: ObservableObject { - private var logger = Logger(subsystem: Bundle.main.bundleIdentifier!, category: "generative-ai") - - @Published - var outputText = "" - - @Published - var errorMessage: String? - - @Published - var inProgress = false - - private var model: GenerativeModel? - - init(firebaseService: FirebaseAI) { - model = firebaseService.generativeModel(modelName: "gemini-2.0-flash-001") - } - - func generateContent(inputText: String) async { - defer { - inProgress = false - } - guard let model else { - return - } - - do { - inProgress = true - errorMessage = nil - outputText = "" - - let outputContentStream = try model.generateContentStream(inputText) - - // stream response - for try await outputContent in outputContentStream { - guard let line = outputContent.text else { - return - } - - outputText = outputText + line - } - } catch { - logger.error("\(error.localizedDescription)") - errorMessage = error.localizedDescription - } - } -} diff --git a/firebaseai/FirebaseAIExample/Preview Content/Preview Assets.xcassets/Contents.json b/firebaseai/FirebaseAIExample/Preview Content/Preview Assets.xcassets/Contents.json deleted file mode 100644 index 73c00596a..000000000 --- a/firebaseai/FirebaseAIExample/Preview Content/Preview Assets.xcassets/Contents.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "info" : { - "author" : "xcode", - "version" : 1 - } -} diff --git a/firebaseai/FirebaseAIExample/Shared/Models/BackendOption.swift b/firebaseai/FirebaseAIExample/Shared/Models/BackendOption.swift new file mode 100644 index 000000000..0731fba0c --- /dev/null +++ b/firebaseai/FirebaseAIExample/Shared/Models/BackendOption.swift @@ -0,0 +1,22 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import Foundation + +enum BackendOption: String, CaseIterable, Identifiable { + case googleAI = "Google AI" + case vertexAI = "Firebase Vertex AI" + + var id: String { rawValue } +} diff --git a/firebaseai/FirebaseAIExample/Shared/Models/Sample.swift b/firebaseai/FirebaseAIExample/Shared/Models/Sample.swift new file mode 100644 index 000000000..0e59334c9 --- /dev/null +++ b/firebaseai/FirebaseAIExample/Shared/Models/Sample.swift @@ -0,0 +1,267 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import Foundation +#if canImport(FirebaseAILogic) + import FirebaseAILogic +#else + import FirebaseAI +#endif + +public struct Sample: Identifiable { + public let id = UUID() + public let title: String + public let description: String + public let useCases: [UseCase] + public let navRoute: String + public let modelName: String + public let chatHistory: [ModelContent]? + public let initialPrompt: String? + public let systemInstruction: ModelContent? + public let tools: [Tool]? + public let generationConfig: GenerationConfig? + public let fileDataParts: [FileDataPart]? + + public init(title: String, + description: String, + useCases: [UseCase], + navRoute: String, + modelName: String = "gemini-2.5-flash", + chatHistory: [ModelContent]? = nil, + initialPrompt: String? = nil, + systemInstruction: ModelContent? = nil, + tools: [Tool]? = nil, + generationConfig: GenerationConfig? = nil, + fileDataParts: [FileDataPart]? = nil) { + self.title = title + self.description = description + self.useCases = useCases + self.navRoute = navRoute + self.modelName = modelName + self.chatHistory = chatHistory + self.initialPrompt = initialPrompt + self.systemInstruction = systemInstruction + self.tools = tools + self.generationConfig = generationConfig + self.fileDataParts = fileDataParts + } +} + +extension Sample { + public static let samples: [Sample] = [ + // Text + Sample( + title: "Travel tips", + description: "The user wants the model to help a new traveler" + + " with travel tips", + useCases: [.text], + navRoute: "ChatScreen", + chatHistory: [ + ModelContent( + role: "user", + parts: "I have never traveled before. When should I book a flight?" + ), + ModelContent( + role: "model", + parts: "You should book flights a couple of months ahead of time. It will be cheaper and more flexible for you." + ), + ModelContent(role: "user", parts: "Do I need a passport?"), + ModelContent( + role: "model", + parts: "If you are traveling outside your own country, make sure your passport is up-to-date and valid for more than 6 months during your travel." + ), + ], + initialPrompt: "What else is important when traveling?", + systemInstruction: ModelContent(parts: "You are a Travel assistant. You will answer" + + " questions the user asks based on the information listed" + + " in Relevant Information. Do not hallucinate. Do not use" + + " the internet."), + ), + Sample( + title: "Hello world (with template)", + description: "Uses a template to say hello. The template uses 'name' and 'language' (defaults to Spanish) as inputs.", + useCases: [.text], + navRoute: "GenerateContentFromTemplateScreen", + initialPrompt: "Peter", + systemInstruction: ModelContent( + parts: "The user's name is {{name}}. They prefer to communicate in {{language}}." + ) + ), + Sample( + title: "Chatbot recommendations for courses", + description: "A chatbot suggests courses for a performing arts program.", + useCases: [.text], + navRoute: "ChatScreen", + initialPrompt: "I am interested in Performing Arts. I have taken Theater 1A.", + systemInstruction: ModelContent(parts: "You are a chatbot for the county's performing and fine arts" + + " program. You help students decide what course they will" + + " take during the summer."), + ), + // Image + Sample( + title: "Blog post creator", + description: "Create a blog post from an image file stored in Cloud Storage.", + useCases: [.image], + navRoute: "MultimodalScreen", + initialPrompt: "Write a short, engaging blog post based on this picture." + + " It should include a description of the meal in the" + + " photo and talk about my journey meal prepping.", + fileDataParts: [ + FileDataPart( + uri: "https://storage.googleapis.com/cloud-samples-data/generative-ai/image/meal-prep.jpeg", + mimeType: "image/jpeg" + ), + ] + ), + Sample( + title: "Imagen - image generation", + description: "Generate images using Imagen 3", + useCases: [.image], + navRoute: "ImagenScreen", + initialPrompt: "A photo of a modern building with water in the background" + ), + Sample( + title: "[T] Imagen - image generation", + description: "[T] Generate images using Imagen 3", + useCases: [.image], + navRoute: "ImagenFromTemplateScreen", + initialPrompt: "A photo of a modern building with water in the background" + ), + Sample( + title: "Gemini Flash - image generation", + description: "Generate and/or edit images using Gemini 2.0 Flash", + useCases: [.image], + navRoute: "ChatScreen", + modelName: "gemini-2.0-flash-preview-image-generation", + initialPrompt: "Hi, can you create a 3d rendered image of a pig " + + "with wings and a top hat flying over a happy " + + "futuristic scifi city with lots of greenery?", + generationConfig: GenerationConfig(responseModalities: [.text, .image]), + ), + // Video + Sample( + title: "Hashtags for a video", + description: "Generate hashtags for a video ad stored in Cloud Storage.", + useCases: [.video], + navRoute: "MultimodalScreen", + initialPrompt: "Generate 5-10 hashtags that relate to the video content." + + " Try to use more popular and engaging terms," + + " e.g. #Viral. Do not add content not related to" + + " the video.\n Start the output with 'Tags:'", + fileDataParts: [ + FileDataPart( + uri: "https://storage.googleapis.com/cloud-samples-data/generative-ai/video/google_home_celebrity_ad.mp4", + mimeType: "video/mp4" + ), + ] + ), + Sample( + title: "Summarize video", + description: "Summarize a video and extract important dialogue.", + useCases: [.video], + navRoute: "MultimodalScreen", + chatHistory: [ + ModelContent(role: "user", parts: "Can you help me with the description of a video file?"), + ModelContent( + role: "model", + parts: "Sure! Click on the attach button below and choose a video file for me to describe." + ), + ], + initialPrompt: "I have attached the video file. Provide a description of" + + " the video. The description should also contain" + + " anything important which people say in the video." + ), + // Audio + Sample( + title: "Audio Summarization", + description: "Summarize an audio file", + useCases: [.audio], + navRoute: "MultimodalScreen", + chatHistory: [ + ModelContent(role: "user", parts: "Can you help me summarize an audio file?"), + ModelContent( + role: "model", + parts: "Of course! Click on the attach button below and choose an audio file for me to summarize." + ), + ], + initialPrompt: "I have attached the audio file. Please analyze it and summarize the contents" + + " of the audio as bullet points." + ), + Sample( + title: "Translation from audio", + description: "Translate an audio file stored in Cloud Storage", + useCases: [.audio], + navRoute: "MultimodalScreen", + initialPrompt: "Please translate the audio in Mandarin.", + fileDataParts: [ + FileDataPart( + uri: "https://storage.googleapis.com/cloud-samples-data/generative-ai/audio/How_to_create_a_My_Map_in_Google_Maps.mp3", + mimeType: "audio/mp3" + ), + ] + ), + // Document + Sample( + title: "Document comparison", + description: "Compare the contents of 2 documents." + + " Supported by the Vertex AI Gemini API because the documents are stored in Cloud Storage", + useCases: [.document], + navRoute: "MultimodalScreen", + initialPrompt: "The first document is from 2013, and the second document is" + + " from 2023. How did the standard deduction evolve?", + fileDataParts: [ + FileDataPart( + uri: "https://storage.googleapis.com/cloud-samples-data/generative-ai/pdf/form_1040_2013.pdf", + mimeType: "application/pdf" + ), + FileDataPart( + uri: "https://storage.googleapis.com/cloud-samples-data/generative-ai/pdf/form_1040_2023.pdf", + mimeType: "application/pdf" + ), + ] + ), + // Function Calling + Sample( + title: "Weather Chat", + description: "Use function calling to get the weather conditions" + + " for a specific US city on a specific date.", + useCases: [.functionCalling, .text], + navRoute: "FunctionCallingScreen", + initialPrompt: "What was the weather in Boston, MA on October 17, 2024?", + tools: [.functionDeclarations([ + FunctionDeclaration( + name: "fetchWeather", + description: "Get the weather conditions for a specific US city on a specific date", + parameters: [ + "city": .string(description: "The US city of the location"), + "state": .string(description: "The US state of the location"), + "date": .string(description: "The date for which to get the weather." + + " Date must be in the format: YYYY-MM-DD"), + ] + ), + ])] + ), + // Grounding + Sample( + title: "Grounding with Google Search", + description: "Use Grounding with Google Search to get responses based on up-to-date information from the web.", + useCases: [.text], + navRoute: "GroundingScreen", + initialPrompt: "What's the weather in Chicago this weekend?", + tools: [.googleSearch()] + ), + ] + + public static var sample = samples[0] +} diff --git a/firebaseai/FirebaseAIExample/Shared/Models/UseCase.swift b/firebaseai/FirebaseAIExample/Shared/Models/UseCase.swift new file mode 100644 index 000000000..ee4e80f8a --- /dev/null +++ b/firebaseai/FirebaseAIExample/Shared/Models/UseCase.swift @@ -0,0 +1,27 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import Foundation + +public enum UseCase: String, CaseIterable, Identifiable { + case all = "All" + case text = "Text" + case image = "Image" + case video = "Video" + case audio = "Audio" + case document = "Document" + case functionCalling = "Function Calling" + + public var id: String { rawValue } +} diff --git a/firebaseai/FirebaseAIExample/ChatExample/Views/ErrorDetailsView.swift b/firebaseai/FirebaseAIExample/Shared/Views/ErrorDetailsView.swift similarity index 100% rename from firebaseai/FirebaseAIExample/ChatExample/Views/ErrorDetailsView.swift rename to firebaseai/FirebaseAIExample/Shared/Views/ErrorDetailsView.swift diff --git a/firebaseai/FirebaseAIExample/Shared/Views/FilterChipView.swift b/firebaseai/FirebaseAIExample/Shared/Views/FilterChipView.swift new file mode 100644 index 000000000..b3701db1e --- /dev/null +++ b/firebaseai/FirebaseAIExample/Shared/Views/FilterChipView.swift @@ -0,0 +1,54 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import SwiftUI + +struct FilterChipView: View { + let useCase: UseCase + let isSelected: Bool + let action: () -> Void + + var body: some View { + Button(action: action) { + Text(useCase.rawValue) + .padding(.horizontal) + } + .filterChipStyle(isSelected: isSelected) + } +} + +private struct FilterChipStyle: ViewModifier { + let isSelected: Bool + + func body(content: Content) -> some View { + if isSelected { + content.buttonStyle(.borderedProminent) + } else { + content.buttonStyle(.bordered) + } + } +} + +extension View { + func filterChipStyle(isSelected: Bool) -> some View { + modifier(FilterChipStyle(isSelected: isSelected)) + } +} + +#Preview { + VStack(spacing: 16) { + FilterChipView(useCase: .text, isSelected: true) {} + FilterChipView(useCase: .text, isSelected: false) {} + } +} diff --git a/firebaseai/FirebaseAIExample/Shared/Views/ProgressOverlay.swift b/firebaseai/FirebaseAIExample/Shared/Views/ProgressOverlay.swift new file mode 100644 index 000000000..b2391f77b --- /dev/null +++ b/firebaseai/FirebaseAIExample/Shared/Views/ProgressOverlay.swift @@ -0,0 +1,40 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import SwiftUI + +struct ProgressOverlay: View { + var body: some View { + ZStack { + Color.black.opacity(0.3) + .ignoresSafeArea() + + ZStack { + RoundedRectangle(cornerRadius: 16) + .fill(Material.ultraThinMaterial) + .frame(width: 120, height: 100) + .shadow(radius: 8) + + VStack(spacing: 12) { + ProgressView() + .scaleEffect(1.5) + + Text("Loading...") + .font(.subheadline) + .foregroundColor(.secondary) + } + } + } + } +} diff --git a/firebaseai/FirebaseAIExample/Shared/Views/SampleCardView.swift b/firebaseai/FirebaseAIExample/Shared/Views/SampleCardView.swift new file mode 100644 index 000000000..58034475a --- /dev/null +++ b/firebaseai/FirebaseAIExample/Shared/Views/SampleCardView.swift @@ -0,0 +1,125 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import SwiftUI + +struct SampleCardView: View { + let sample: Sample + + var body: some View { + GroupBox { + Text(sample.description) + .font(.system(size: 14)) + .foregroundColor(.secondary) + .frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .topLeading) + } label: { + if let useCase = sample.useCases.first { + Label(sample.title, systemImage: systemName(for: useCase)) + .font(.system(size: 17, weight: .medium)) + .foregroundColor(color(for: useCase)) + } else { + Text(sample.title) + .font(.system(size: 17, weight: .medium)) + } + } + .groupBoxStyle(CardGroupBoxStyle()) + .frame(maxWidth: .infinity, minHeight: 150, maxHeight: .infinity, alignment: .top) + } + + private func systemName(for useCase: UseCase) -> String { + switch useCase { + case .all: "square.grid.2x2.fill" + case .text: "text.bubble.fill" + case .image: "photo.fill" + case .video: "video.fill" + case .audio: "waveform" + case .document: "doc.fill" + case .functionCalling: "gearshape.2.fill" + } + } + + private func color(for useCase: UseCase) -> Color { + switch useCase { + case .all:.primary + case .text:.blue + case .image:.purple + case .video:.red + case .audio:.orange + case .document:.gray + case .functionCalling:.green + } + } +} + +public struct CardGroupBoxStyle: GroupBoxStyle { + private var cornerRadius: CGFloat { + if #available(iOS 26.0, *) { + return 28 + } else { + return 12 + } + } + + public func makeBody(configuration: Configuration) -> some View { + VStack(alignment: .leading, spacing: 12) { + configuration.label + configuration.content + } + .padding() + .background(Color(.secondarySystemGroupedBackground)) + .clipShape(RoundedRectangle(cornerRadius: cornerRadius, style: .continuous)) + } +} + +#Preview { + let samples = [ + Sample( + title: "Sample 1", + description: "This is the first sample card.", + useCases: [.text], + navRoute: "ConversationScreen" + ), + Sample( + title: "Sample 2", + description: "This is the second sample card.", + useCases: [.image], + navRoute: "PhotoReasoningScreen" + ), + Sample( + title: "Sample 3", + description: "This is the third sample card.", + useCases: [.video], + navRoute: "ConversationScreen" + ), + Sample( + title: "Sample 4", + description: "This is the fourth sample card, which is a bit longer to see how the text wraps and if everything still aligns correctly.", + useCases: [.audio], + navRoute: "ConversationScreen" + ), + ] + + ScrollView { + LazyVGrid(columns: [ + GridItem(.flexible()), + GridItem(.flexible()), + ], spacing: 16) { + ForEach(samples) { sample in + SampleCardView(sample: sample) + } + } + .padding() + } + .background(Color(.systemGroupedBackground)) +} diff --git a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/InputField.swift b/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/InputField.swift deleted file mode 100644 index 67941c370..000000000 --- a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/InputField.swift +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import SwiftUI - -public struct InputField