diff --git a/.github/workflows/vertexai.yml b/.github/workflows/vertexai.yml
deleted file mode 100644
index c5db31d75df..00000000000
--- a/.github/workflows/vertexai.yml
+++ /dev/null
@@ -1,34 +0,0 @@
-name: vertexai
-
-permissions:
- contents: read
-
-on:
- pull_request:
- paths:
- - 'FirebaseAI**'
- - 'FirebaseVertexAI**'
- - '.github/workflows/vertexai.yml'
- - '.github/workflows/common.yml'
- - '.github/workflows/common_cocoapods.yml'
- - 'Gemfile*'
- schedule:
- # Run every day at 11pm (PST) - cron uses UTC times
- - cron: '0 7 * * *'
- workflow_dispatch:
-
-concurrency:
- group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
- cancel-in-progress: true
-
-jobs:
- spm:
- uses: ./.github/workflows/common.yml
- with:
- target: FirebaseVertexAIUnit
-
- pod_lib_lint:
- uses: ./.github/workflows/common_cocoapods.yml
- with:
- product: FirebaseVertexAI
- supports_swift6: true
diff --git a/FirebaseCore/CHANGELOG.md b/FirebaseCore/CHANGELOG.md
index a30f93d02aa..14adaffc7e1 100644
--- a/FirebaseCore/CHANGELOG.md
+++ b/FirebaseCore/CHANGELOG.md
@@ -7,6 +7,13 @@
- [removed] **Breaking change**: Removed the following unused API.
- `Options.androidClientID`
- `Options.trackingID`
+- [removed] The deprecated Vertex AI in Firebase SDK (`FirebaseVertexAI`) has
+ been removed. It has been replaced by the Firebase AI Logic
+ SDK (`FirebaseAI`) to
+ accommodate the evolving set of supported features and services.
+ To start using the new SDK, import the `FirebaseAI` module and use the
+ top-level `FirebaseAI` class. See details in the [migration guide
+ ](https://firebase.google.com/docs/ai-logic/migrate-to-latest-sdk).
# Firebase 11.15.0
- [fixed] Remove c99 as the required C language standard. (#14950)
diff --git a/FirebaseVertexAI.podspec b/FirebaseVertexAI.podspec
deleted file mode 100644
index 655bf991e21..00000000000
--- a/FirebaseVertexAI.podspec
+++ /dev/null
@@ -1,61 +0,0 @@
-Pod::Spec.new do |s|
- s.name = 'FirebaseVertexAI'
- s.version = '11.15.0'
- s.summary = 'Vertex AI in Firebase SDK'
-
- s.description = <<-DESC
-Build AI-powered apps and features with the Gemini API using the Vertex AI in
-Firebase SDK.
- DESC
-
- s.homepage = 'https://firebase.google.com'
- s.license = { :type => 'Apache-2.0', :file => 'LICENSE' }
- s.authors = 'Google, Inc.'
-
- s.source = {
- :git => 'https://github.com/firebase/firebase-ios-sdk.git',
- :tag => 'CocoaPods-' + s.version.to_s
- }
-
- s.social_media_url = 'https://twitter.com/Firebase'
-
- ios_deployment_target = '15.0'
- osx_deployment_target = '12.0'
- tvos_deployment_target = '15.0'
- watchos_deployment_target = '8.0'
-
- s.ios.deployment_target = ios_deployment_target
- s.osx.deployment_target = osx_deployment_target
- s.tvos.deployment_target = tvos_deployment_target
- s.watchos.deployment_target = watchos_deployment_target
-
- s.cocoapods_version = '>= 1.12.0'
- s.prefix_header_file = false
-
- s.source_files = [
- 'FirebaseVertexAI/Sources/**/*.swift',
- ]
-
- s.swift_version = '5.9'
-
- s.framework = 'Foundation'
- s.ios.framework = 'UIKit'
- s.osx.framework = 'AppKit'
- s.tvos.framework = 'UIKit'
- s.watchos.framework = 'WatchKit'
-
- s.dependency 'FirebaseAI', '~> 11.15.0'
-
- s.test_spec 'unit' do |unit_tests|
- unit_tests_dir = 'FirebaseVertexAI/Tests/Unit/'
- unit_tests.scheme = { :code_coverage => true }
- unit_tests.platforms = {
- :ios => ios_deployment_target,
- :osx => osx_deployment_target,
- :tvos => tvos_deployment_target
- }
- unit_tests.source_files = [
- unit_tests_dir + '**/*.swift',
- ]
- end
-end
diff --git a/FirebaseVertexAI/CHANGELOG.md b/FirebaseVertexAI/CHANGELOG.md
deleted file mode 100644
index 8cc4493a468..00000000000
--- a/FirebaseVertexAI/CHANGELOG.md
+++ /dev/null
@@ -1,205 +0,0 @@
-# 11.13.0
-- [changed] **Renamed:** Vertex AI in Firebase (`FirebaseVertexAI`) has been
- renamed and replaced by the new Firebase AI SDK (`FirebaseAI`). Please migrate
- to the new `FirebaseAI` module. See the Firebase AI release notes for
- migration details and new changes.
-
- Note: Existing Vertex AI in Firebase users may continue to use
- `import FirebaseVertexAI` and the `VertexAI` top-level class, though these
- will be removed in a future release.
-- [fixed] Fixed `ModalityTokenCount` decoding when the `tokenCount` field is
- omitted; this occurs when the count is 0. (#14745)
-
-# 11.12.0
-- [added] **Public Preview**: Added support for specifying response modalities
- in `GenerationConfig`. This includes **public experimental** support for image
- generation using Gemini 2.0 Flash (`gemini-2.0-flash-exp`). (#14658)
-
- Note: This feature is in Public Preview and relies on experimental models,
- which means that it is not subject to any SLA or deprecation policy and could
- change in backwards-incompatible ways.
-- [added] Added support for more `Schema` fields: `minItems`/`maxItems` (array
- size limits), `title` (schema name), `minimum`/`maximum` (numeric ranges),
- `anyOf` (select from sub-schemas), and `propertyOrdering` (JSON key order). (#14647)
-- [fixed] Fixed an issue where network requests would fail in the iOS 18.4
- simulator due to a `URLSession` bug introduced in Xcode 16.3. (#14677)
-
-# 11.11.0
-- [added] Emits a warning when attempting to use an incompatible model with
- `GenerativeModel` or `ImagenModel`. (#14610)
-
-# 11.10.0
-- [feature] The Vertex AI SDK no longer requires `@preconcurrency` when imported in Swift 6.
-- [feature] The Vertex AI Sample App now includes an image generation example.
-- [changed] The Vertex AI Sample App is now part of the
- [quickstart-ios repo](https://github.com/firebase/quickstart-ios/tree/main/vertexai).
-- [changed] The `role` in system instructions is now ignored; no code changes
- are required. (#14558)
-
-# 11.9.0
-- [feature] **Public Preview**: Added support for
- [generating images](https://firebase.google.com/docs/vertex-ai/generate-images-imagen?platform=ios)
- using the Imagen 3 models.
-
- Note: This feature is in Public Preview, which means that it is not subject to
- any SLA or deprecation policy and could change in backwards-incompatible ways.
-- [feature] Added support for modality-based token count. (#14406)
-
-# 11.6.0
-- [changed] The token counts from `GenerativeModel.countTokens(...)` now include
- tokens from the schema for JSON output and function calling; reported token
- counts will now be higher if using these features.
-
-# 11.5.0
-- [fixed] Fixed an issue where `VertexAI.vertexAI(app: app1)` and
- `VertexAI.vertexAI(app: app2)` would return the same instance if their
- `location` was the same, including the default `us-central1`. (#14007)
-- [changed] Removed `format: "double"` in `Schema.double()` since
- double-precision accuracy isn't enforced by the model; continue using the
- Swift `Double` type when decoding data produced with this schema. (#13990)
-
-# 11.4.0
-- [feature] Vertex AI in Firebase is now Generally Available (GA) and can be
- used in production apps. (#13725)
-
- Use the Vertex AI in Firebase library to call the Vertex AI Gemini API
- directly from your app. This client library is built specifically for use with
- Swift apps, offering security options against unauthorized clients as well as
- integrations with other Firebase services.
-
- Note: Vertex AI in Firebase is currently only available in Swift Package
- Manager and CocoaPods. Stay tuned for the next release for the Zip and
- Carthage distributions.
-
- - If you're new to this library, visit the
- [getting started guide](http://firebase.google.com/docs/vertex-ai/get-started?platform=ios).
- - If you used the preview version of the library, visit the
- [migration guide](https://firebase.google.com/docs/vertex-ai/migrate-to-ga?platform=ios)
- to learn about some important updates.
-- [changed] **Breaking Change**: The `HarmCategory` enum is no longer nested
- inside the `SafetySetting` struct and the `unspecified` case has been
- removed. (#13686)
-- [changed] **Breaking Change**: The `BlockThreshold` enum in `SafetySetting`
- has been renamed to `HarmBlockThreshold`. (#13696)
-- [changed] **Breaking Change**: The `unspecified` case has been removed from
- the `FinishReason`, `BlockReason` and `HarmProbability` enums; this scenario
- is now handled by the existing `unknown` case. (#13699)
-- [changed] **Breaking Change**: The property `citationSources` of
- `CitationMetadata` has been renamed to `citations`. (#13702)
-- [changed] **Breaking Change**: The initializer for `Schema` is now internal;
- use the new type methods `Schema.string(...)`, `Schema.object(...)`, etc.,
- instead. (#13852)
-- [changed] **Breaking Change**: The initializer for `FunctionDeclaration` now
- accepts an array of *optional* parameters instead of a list of *required*
- parameters; if a parameter is not listed as optional it is assumed to be
- required. (#13616)
-- [changed] **Breaking Change**: `CountTokensResponse.totalBillableCharacters`
- is now optional (`Int?`); it may be `null` in cases such as when a
- `GenerateContentRequest` contains only images or other non-text content.
- (#13721)
-- [changed] **Breaking Change**: The `ImageConversionError` enum is no longer
- public; image conversion errors are still reported as
- `GenerateContentError.promptImageContentError`. (#13735)
-- [changed] **Breaking Change**: The `CountTokensError` enum has been removed;
- errors occurring in `GenerativeModel.countTokens(...)` are now thrown directly
- instead of being wrapped in a `CountTokensError.internalError`. (#13736)
-- [changed] **Breaking Change**: The enum `ModelContent.Part` has been replaced
- with a protocol named `Part` to avoid future breaking changes with new part
- types. The new types `TextPart` and `FunctionCallPart` may be received when
- generating content; additionally the types `InlineDataPart`, `FileDataPart`
- and `FunctionResponsePart` may be provided as input. (#13767)
-- [changed] **Breaking Change**: All initializers for `ModelContent` now require
- the label `parts: `. (#13832)
-- [changed] **Breaking Change**: `HarmCategory`, `HarmProbability`, and
- `FinishReason` are now structs instead of enums types and the `unknown` cases
- have been removed; in a `switch` statement, use the `default:` case to cover
- unknown or unhandled values. (#13728, #13854, #13860)
-- [changed] **Breaking Change**: The `Tool` initializer is now internal; use the
- new type method `functionDeclarations(_:)` to create a `Tool` for function
- calling. (#13873)
-- [changed] **Breaking Change**: The `FunctionCallingConfig` initializer and
- `Mode` enum are now internal; use one of the new type methods `auto()`,
- `any(allowedFunctionNames:)`, or `none()` to create a config. (#13873)
-- [changed] **Breaking Change**: The `CandidateResponse` type is now named
- `Candidate`. (#13897)
-- [changed] **Breaking Change**: The minimum deployment target for the SDK is
- now macOS 12.0; all other platform minimums remain the same at iOS 15.0,
- macCatalyst 15.0, tvOS 15.0, and watchOS 8.0. (#13903)
-- [changed] **Breaking Change**: All of the public properties of
- `GenerationConfig` are now `internal`; they all remain configurable in the
- initializer. (#13904)
-- [changed] The default request timeout is now 180 seconds instead of the
- platform-default value of 60 seconds for a `URLRequest`; this timeout may
- still be customized in `RequestOptions`. (#13722)
-- [changed] The response from `GenerativeModel.countTokens(...)` now includes
- `systemInstruction`, `tools` and `generationConfig` in the `totalTokens` and
- `totalBillableCharacters` counts, where applicable. (#13813)
-- [added] Added a new `HarmCategory` `.civicIntegrity` for filtering content
- that may be used to harm civic integrity. (#13728)
-- [added] Added `probabilityScore`, `severity` and `severityScore` in
- `SafetyRating` to provide more fine-grained detail on blocked responses.
- (#13875)
-- [added] Added a new `HarmBlockThreshold` `.off`, which turns off the safety
- filter. (#13863)
-- [added] Added an optional `HarmBlockMethod` parameter `method` in
- `SafetySetting` that configures whether responses are blocked based on the
- `probability` and/or `severity` of content being in a `HarmCategory`. (#13876)
-- [added] Added new `FinishReason` values `.blocklist`, `.prohibitedContent`,
- `.spii` and `.malformedFunctionCall` that may be reported. (#13860)
-- [added] Added new `BlockReason` values `.blocklist` and `.prohibitedContent`
- that may be reported when a prompt is blocked. (#13861)
-- [added] Added the `PromptFeedback` property `blockReasonMessage` that *may* be
- provided alongside the `blockReason`. (#13891)
-- [added] Added an optional `publicationDate` property that *may* be provided in
- `Citation`. (#13893)
-- [added] Added `presencePenalty` and `frequencyPenalty` parameters to
- `GenerationConfig`. (#13899)
-
-# 11.3.0
-- [added] Added `Decodable` conformance for `FunctionResponse`. (#13606)
-- [changed] **Breaking Change**: Reverted refactor of `GenerativeModel` and
- `Chat` as Swift actors (#13545) introduced in 11.2; The methods
- `generateContentStream`, `startChat` and `sendMessageStream` no longer need to
- be called with `await`. (#13703)
-
-# 11.2.0
-- [fixed] Resolved a decoding error for citations without a `uri` and added
- support for decoding `title` fields, which were previously ignored. (#13518)
-- [changed] **Breaking Change**: The methods for starting streaming requests
- (`generateContentStream` and `sendMessageStream`) are now throwing and
- asynchronous and must be called with `try await`. (#13545, #13573)
-- [changed] **Breaking Change**: Creating a chat instance (`startChat`) is now
- asynchronous and must be called with `await`. (#13545)
-- [changed] **Breaking Change**: The source image in the
- `ImageConversionError.couldNotConvertToJPEG` error case is now an enum value
- instead of the `Any` type. (#13575)
-- [added] Added support for specifying a JSON `responseSchema` in
- `GenerationConfig`; see
- [control generated output](https://firebase.google.com/docs/vertex-ai/structured-output?platform=ios)
- for more details. (#13576)
-
-# 10.29.0
-- [feature] Added community support for watchOS. (#13215)
-
-# 10.28.0
-- [changed] Removed uses of the `gemini-1.5-flash-preview-0514` model in docs
- and samples. Developers should now use the auto-updated versions,
- `gemini-1.5-pro` or `gemini-1.5-flash`, or a specific stable version; see
- [available model names](https://firebase.google.com/docs/vertex-ai/gemini-models#available-model-names)
- for more details. (#13099)
-- [feature] Added community support for tvOS and visionOS. (#13090, #13092)
-
-# 10.27.0
-- [changed] Removed uses of the `gemini-1.5-pro-preview-0409` model in docs and
- samples. Developers should now use `gemini-1.5-pro-preview-0514` or
- `gemini-1.5-flash-preview-0514`; see
- [available model names](https://firebase.google.com/docs/vertex-ai/gemini-models#available-model-names)
- for more details. (#12979)
-- [changed] Logged additional details when required APIs for Vertex AI are
- not enabled or response payloads when requests fail. (#13007, #13009)
-
-# 10.26.0
-- [feature] Initial release of the Vertex AI for Firebase SDK (public preview).
- Learn how to
- [get started](https://firebase.google.com/docs/vertex-ai/get-started?platform=ios)
- with the SDK in your app.
diff --git a/FirebaseVertexAI/Sources/VertexAI.swift b/FirebaseVertexAI/Sources/VertexAI.swift
deleted file mode 100644
index fc4e5409ab0..00000000000
--- a/FirebaseVertexAI/Sources/VertexAI.swift
+++ /dev/null
@@ -1,110 +0,0 @@
-// Copyright 2024 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-@_exported public import FirebaseAI
-
-import FirebaseCore
-
-/// The Vertex AI for Firebase SDK provides access to Gemini models directly from your app.
-@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
-public class VertexAI {
- // MARK: - Public APIs
-
- /// Creates an instance of `VertexAI`.
- ///
- /// - Parameters:
- /// - app: A custom `FirebaseApp` used for initialization; if not specified, uses the default
- /// ``FirebaseApp``.
- /// - location: The region identifier, defaulting to `us-central1`; see
- /// [Vertex AI locations]
- /// (https://firebase.google.com/docs/vertex-ai/locations?platform=ios#available-locations)
- /// for a list of supported locations.
- /// - Returns: A `VertexAI` instance, configured with the custom `FirebaseApp`.
- public static func vertexAI(app: FirebaseApp? = nil,
- location: String = "us-central1") -> VertexAI {
- let firebaseAI = FirebaseAI.firebaseAI(app: app, backend: .vertexAI(location: location))
- return VertexAI(firebaseAI: firebaseAI)
- }
-
- /// Initializes a generative model with the given parameters.
- ///
- /// - Note: Refer to [Gemini models](https://firebase.google.com/docs/vertex-ai/gemini-models) for
- /// guidance on choosing an appropriate model for your use case.
- ///
- /// - Parameters:
- /// - modelName: The name of the model to use, for example `"gemini-1.5-flash"`; see
- /// [available model names
- /// ](https://firebase.google.com/docs/vertex-ai/gemini-models#available-model-names) for a
- /// list of supported model names.
- /// - generationConfig: The content generation parameters your model should use.
- /// - safetySettings: A value describing what types of harmful content your model should allow.
- /// - tools: A list of ``Tool`` objects that the model may use to generate the next response.
- /// - toolConfig: Tool configuration for any `Tool` specified in the request.
- /// - systemInstruction: Instructions that direct the model to behave a certain way; currently
- /// only text content is supported.
- /// - requestOptions: Configuration parameters for sending requests to the backend.
- public func generativeModel(modelName: String,
- generationConfig: GenerationConfig? = nil,
- safetySettings: [SafetySetting]? = nil,
- tools: [Tool]? = nil,
- toolConfig: ToolConfig? = nil,
- systemInstruction: ModelContent? = nil,
- requestOptions: RequestOptions = RequestOptions())
- -> GenerativeModel {
- return firebaseAI.generativeModel(
- modelName: modelName,
- generationConfig: generationConfig,
- safetySettings: safetySettings,
- tools: tools,
- toolConfig: toolConfig,
- systemInstruction: systemInstruction,
- requestOptions: requestOptions
- )
- }
-
- /// **[Public Preview]** Initializes an ``ImagenModel`` with the given parameters.
- ///
- /// > Warning: For Vertex AI in Firebase, image generation using Imagen 3 models is in Public
- /// Preview, which means that the feature is not subject to any SLA or deprecation policy and
- /// could change in backwards-incompatible ways.
- ///
- /// > Important: Only Imagen 3 models (named `imagen-3.0-*`) are supported.
- ///
- /// - Parameters:
- /// - modelName: The name of the Imagen 3 model to use, for example `"imagen-3.0-generate-002"`;
- /// see [model versions](https://firebase.google.com/docs/vertex-ai/models) for a list of
- /// supported Imagen 3 models.
- /// - generationConfig: Configuration options for generating images with Imagen.
- /// - safetySettings: Settings describing what types of potentially harmful content your model
- /// should allow.
- /// - requestOptions: Configuration parameters for sending requests to the backend.
- public func imagenModel(modelName: String, generationConfig: ImagenGenerationConfig? = nil,
- safetySettings: ImagenSafetySettings? = nil,
- requestOptions: RequestOptions = RequestOptions()) -> ImagenModel {
- return firebaseAI.imagenModel(
- modelName: modelName,
- generationConfig: generationConfig,
- safetySettings: safetySettings,
- requestOptions: requestOptions
- )
- }
-
- // MARK: - Internal APIs
-
- let firebaseAI: FirebaseAI
-
- init(firebaseAI: FirebaseAI) {
- self.firebaseAI = firebaseAI
- }
-}
diff --git a/FirebaseVertexAI/Tests/Unit/Resources/animals.mp4 b/FirebaseVertexAI/Tests/Unit/Resources/animals.mp4
deleted file mode 100644
index 8abcffb1eba..00000000000
Binary files a/FirebaseVertexAI/Tests/Unit/Resources/animals.mp4 and /dev/null differ
diff --git a/FirebaseVertexAI/Tests/Unit/Resources/blue.png b/FirebaseVertexAI/Tests/Unit/Resources/blue.png
deleted file mode 100644
index a0cf28c6edb..00000000000
Binary files a/FirebaseVertexAI/Tests/Unit/Resources/blue.png and /dev/null differ
diff --git a/FirebaseVertexAI/Tests/Unit/Resources/gemini-report.pdf b/FirebaseVertexAI/Tests/Unit/Resources/gemini-report.pdf
deleted file mode 100644
index 2a2f00cb77b..00000000000
Binary files a/FirebaseVertexAI/Tests/Unit/Resources/gemini-report.pdf and /dev/null differ
diff --git a/FirebaseVertexAI/Tests/Unit/Resources/hello-world.mp3 b/FirebaseVertexAI/Tests/Unit/Resources/hello-world.mp3
deleted file mode 100644
index be617e65a5a..00000000000
Binary files a/FirebaseVertexAI/Tests/Unit/Resources/hello-world.mp3 and /dev/null differ
diff --git a/FirebaseVertexAI/Tests/Unit/Snippets/ChatSnippets.swift b/FirebaseVertexAI/Tests/Unit/Snippets/ChatSnippets.swift
deleted file mode 100644
index 2d96b90e2fd..00000000000
--- a/FirebaseVertexAI/Tests/Unit/Snippets/ChatSnippets.swift
+++ /dev/null
@@ -1,67 +0,0 @@
-// Copyright 2024 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-import FirebaseCore
-import FirebaseVertexAI
-import XCTest
-
-// These snippet tests are intentionally skipped in CI jobs; see the README file in this directory
-// for instructions on running them manually.
-
-@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
-final class ChatSnippets: XCTestCase {
- lazy var model = VertexAI.vertexAI().generativeModel(modelName: "gemini-1.5-flash")
-
- override func setUpWithError() throws {
- try FirebaseApp.configureDefaultAppForSnippets()
- }
-
- override func tearDown() async throws {
- await FirebaseApp.deleteDefaultAppForSnippets()
- }
-
- func testChatNonStreaming() async throws {
- // Optionally specify existing chat history
- let history = [
- ModelContent(role: "user", parts: "Hello, I have 2 dogs in my house."),
- ModelContent(role: "model", parts: "Great to meet you. What would you like to know?"),
- ]
-
- // Initialize the chat with optional chat history
- let chat = model.startChat(history: history)
-
- // To generate text output, call sendMessage and pass in the message
- let response = try await chat.sendMessage("How many paws are in my house?")
- print(response.text ?? "No text in response.")
- }
-
- func testChatStreaming() async throws {
- // Optionally specify existing chat history
- let history = [
- ModelContent(role: "user", parts: "Hello, I have 2 dogs in my house."),
- ModelContent(role: "model", parts: "Great to meet you. What would you like to know?"),
- ]
-
- // Initialize the chat with optional chat history
- let chat = model.startChat(history: history)
-
- // To stream generated text output, call sendMessageStream and pass in the message
- let contentStream = try chat.sendMessageStream("How many paws are in my house?")
- for try await chunk in contentStream {
- if let text = chunk.text {
- print(text)
- }
- }
- }
-}
diff --git a/FirebaseVertexAI/Tests/Unit/Snippets/FirebaseAppSnippetsUtil.swift b/FirebaseVertexAI/Tests/Unit/Snippets/FirebaseAppSnippetsUtil.swift
deleted file mode 100644
index f463fbda188..00000000000
--- a/FirebaseVertexAI/Tests/Unit/Snippets/FirebaseAppSnippetsUtil.swift
+++ /dev/null
@@ -1,57 +0,0 @@
-// Copyright 2024 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-import FirebaseCore
-import Foundation
-import XCTest
-
-extension FirebaseApp {
- /// Configures the default `FirebaseApp` for use in snippets tests.
- ///
- /// Uses a `GoogleService-Info.plist` file from the
- /// [`Resources`](https://github.com/firebase/firebase-ios-sdk/tree/main/FirebaseVertexAI/Tests/Unit/Resources)
- /// directory.
- ///
- /// > Note: This is typically called in a snippet test's set up; overriding
- /// > `setUpWithError() throws` works well since it supports throwing errors.
- static func configureDefaultAppForSnippets() throws {
- guard let plistPath = BundleTestUtil.bundle().path(
- forResource: "GoogleService-Info",
- ofType: "plist"
- ) else {
- throw XCTSkip("No GoogleService-Info.plist found in FirebaseVertexAI/Tests/Unit/Resources.")
- }
-
- let options = try XCTUnwrap(FirebaseOptions(contentsOfFile: plistPath))
- FirebaseApp.configure(options: options)
-
- guard FirebaseApp.isDefaultAppConfigured() else {
- XCTFail("Default Firebase app not configured.")
- return
- }
- }
-
- /// Deletes the default `FirebaseApp` if configured.
- ///
- /// > Note: This is typically called in a snippet test's tear down; overriding
- /// > `tearDown() async throws` works well since deletion is asynchronous.
- static func deleteDefaultAppForSnippets() async {
- // Checking if `isDefaultAppConfigured()` before calling `FirebaseApp.app()` suppresses a log
- // message that "The default Firebase app has not yet been configured." during `tearDown` when
- // the tests are skipped. This reduces extraneous noise in the test logs.
- if FirebaseApp.isDefaultAppConfigured(), let app = FirebaseApp.app() {
- await app.delete()
- }
- }
-}
diff --git a/FirebaseVertexAI/Tests/Unit/Snippets/FunctionCallingSnippets.swift b/FirebaseVertexAI/Tests/Unit/Snippets/FunctionCallingSnippets.swift
deleted file mode 100644
index 492574dc11d..00000000000
--- a/FirebaseVertexAI/Tests/Unit/Snippets/FunctionCallingSnippets.swift
+++ /dev/null
@@ -1,110 +0,0 @@
-// Copyright 2024 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-import FirebaseCore
-import FirebaseVertexAI
-import XCTest
-
-// These snippet tests are intentionally skipped in CI jobs; see the README file in this directory
-// for instructions on running them manually.
-
-@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
-final class FunctionCallingSnippets: XCTestCase {
- override func setUpWithError() throws {
- try FirebaseApp.configureDefaultAppForSnippets()
- }
-
- override func tearDown() async throws {
- await FirebaseApp.deleteDefaultAppForSnippets()
- }
-
- func testFunctionCalling() async throws {
- // This function calls a hypothetical external API that returns
- // a collection of weather information for a given location on a given date.
- func fetchWeather(city: String, state: String, date: String) -> JSONObject {
- // TODO(developer): Write a standard function that would call an external weather API.
-
- // For demo purposes, this hypothetical response is hardcoded here in the expected format.
- return [
- "temperature": .number(38),
- "chancePrecipitation": .string("56%"),
- "cloudConditions": .string("partlyCloudy"),
- ]
- }
-
- let fetchWeatherTool = FunctionDeclaration(
- name: "fetchWeather",
- description: "Get the weather conditions for a specific city on a specific date.",
- parameters: [
- "location": .object(
- properties: [
- "city": .string(description: "The city of the location."),
- "state": .string(description: "The US state of the location."),
- ],
- description: """
- The name of the city and its state for which to get the weather. Only cities in the
- USA are supported.
- """
- ),
- "date": .string(
- description: """
- The date for which to get the weather. Date must be in the format: YYYY-MM-DD.
- """
- ),
- ]
- )
-
- // Initialize the Vertex AI service and the generative model.
- // Use a model that supports function calling, like a Gemini 1.5 model.
- let model = VertexAI.vertexAI().generativeModel(
- modelName: "gemini-1.5-flash",
- // Provide the function declaration to the model.
- tools: [.functionDeclarations([fetchWeatherTool])]
- )
-
- let chat = model.startChat()
- let prompt = "What was the weather in Boston on October 17, 2024?"
-
- // Send the user's question (the prompt) to the model using multi-turn chat.
- let response = try await chat.sendMessage(prompt)
-
- var functionResponses = [FunctionResponsePart]()
-
- // When the model responds with one or more function calls, invoke the function(s).
- for functionCall in response.functionCalls {
- if functionCall.name == "fetchWeather" {
- // TODO(developer): Handle invalid arguments.
- guard case let .object(location) = functionCall.args["location"] else { fatalError() }
- guard case let .string(city) = location["city"] else { fatalError() }
- guard case let .string(state) = location["state"] else { fatalError() }
- guard case let .string(date) = functionCall.args["date"] else { fatalError() }
-
- functionResponses.append(FunctionResponsePart(
- name: functionCall.name,
- response: fetchWeather(city: city, state: state, date: date)
- ))
- }
- // TODO(developer): Handle other potential function calls, if any.
- }
-
- // Send the response(s) from the function back to the model so that the model can use it
- // to generate its final response.
- let finalResponse = try await chat.sendMessage(
- [ModelContent(role: "function", parts: functionResponses)]
- )
-
- // Log the text response.
- print(finalResponse.text ?? "No text in response.")
- }
-}
diff --git a/FirebaseVertexAI/Tests/Unit/Snippets/MultimodalSnippets.swift b/FirebaseVertexAI/Tests/Unit/Snippets/MultimodalSnippets.swift
deleted file mode 100644
index 9e4683d8c64..00000000000
--- a/FirebaseVertexAI/Tests/Unit/Snippets/MultimodalSnippets.swift
+++ /dev/null
@@ -1,215 +0,0 @@
-// Copyright 2024 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-import FirebaseCore
-import FirebaseVertexAI
-import XCTest
-
-#if canImport(UIKit)
- import UIKit
-#endif // canImport(UIKit)
-
-// These snippet tests are intentionally skipped in CI jobs; see the README file in this directory
-// for instructions on running them manually.
-
-@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
-final class MultimodalSnippets: XCTestCase {
- let bundle = BundleTestUtil.bundle()
- lazy var model = VertexAI.vertexAI().generativeModel(modelName: "gemini-2.0-flash")
- lazy var videoURL = {
- guard let url = bundle.url(forResource: "animals", withExtension: "mp4") else {
- fatalError("Video file animals.mp4 not found in Resources.")
- }
- return url
- }()
-
- lazy var audioURL = {
- guard let url = bundle.url(forResource: "hello-world", withExtension: "mp3") else {
- fatalError("Audio file hello-world.mp3 not found in Resources.")
- }
- return url
- }()
-
- lazy var pdfURL = {
- guard let url = bundle.url(forResource: "gemini-report", withExtension: "pdf") else {
- fatalError("PDF file gemini-report.pdf not found in Resources.")
- }
- return url
- }()
-
- override func setUpWithError() throws {
- try FirebaseApp.configureDefaultAppForSnippets()
- }
-
- override func tearDown() async throws {
- await FirebaseApp.deleteDefaultAppForSnippets()
- }
-
- // MARK: - Image Input
-
- #if canImport(UIKit)
- func testMultimodalOneImageNonStreaming() async throws {
- guard let image = UIImage(systemName: "bicycle") else { fatalError() }
-
- // Provide a text prompt to include with the image
- let prompt = "What's in this picture?"
-
- // To generate text output, call generateContent and pass in the prompt
- let response = try await model.generateContent(image, prompt)
- print(response.text ?? "No text in response.")
- }
-
- func testMultimodalOneImageStreaming() async throws {
- guard let image = UIImage(systemName: "bicycle") else { fatalError() }
-
- // Provide a text prompt to include with the image
- let prompt = "What's in this picture?"
-
- // To stream generated text output, call generateContentStream and pass in the prompt
- let contentStream = try model.generateContentStream(image, prompt)
- for try await chunk in contentStream {
- if let text = chunk.text {
- print(text)
- }
- }
- }
-
- func testMultimodalMultiImagesNonStreaming() async throws {
- guard let image1 = UIImage(systemName: "car") else { fatalError() }
- guard let image2 = UIImage(systemName: "car.2") else { fatalError() }
-
- // Provide a text prompt to include with the images
- let prompt = "What's different between these pictures?"
-
- // To generate text output, call generateContent and pass in the prompt
- let response = try await model.generateContent(image1, image2, prompt)
- print(response.text ?? "No text in response.")
- }
-
- func testMultimodalMultiImagesStreaming() async throws {
- guard let image1 = UIImage(systemName: "car") else { fatalError() }
- guard let image2 = UIImage(systemName: "car.2") else { fatalError() }
-
- // Provide a text prompt to include with the images
- let prompt = "What's different between these pictures?"
-
- // To stream generated text output, call generateContentStream and pass in the prompt
- let contentStream = try model.generateContentStream(image1, image2, prompt)
- for try await chunk in contentStream {
- if let text = chunk.text {
- print(text)
- }
- }
- }
- #endif // canImport(UIKit)
-
- // MARK: - Video Input
-
- func testMultimodalVideoNonStreaming() async throws {
- // Provide the video as `Data` with the appropriate MIME type
- let video = try InlineDataPart(data: Data(contentsOf: videoURL), mimeType: "video/mp4")
-
- // Provide a text prompt to include with the video
- let prompt = "What is in the video?"
-
- // To generate text output, call generateContent with the text and video
- let response = try await model.generateContent(video, prompt)
- print(response.text ?? "No text in response.")
- }
-
- func testMultimodalVideoStreaming() async throws {
- // Provide the video as `Data` with the appropriate MIME type
- let video = try InlineDataPart(data: Data(contentsOf: videoURL), mimeType: "video/mp4")
-
- // Provide a text prompt to include with the video
- let prompt = "What is in the video?"
-
- // To stream generated text output, call generateContentStream with the text and video
- let contentStream = try model.generateContentStream(video, prompt)
- for try await chunk in contentStream {
- if let text = chunk.text {
- print(text)
- }
- }
- }
-
- // MARK: - Audio Input
-
- func testMultiModalAudioNonStreaming() async throws {
- // Provide the audio as `Data` with the appropriate MIME type
- let audio = try InlineDataPart(data: Data(contentsOf: audioURL), mimeType: "audio/mpeg")
-
- // Provide a text prompt to include with the audio
- let prompt = "Transcribe what's said in this audio recording."
-
- // To generate text output, call `generateContent` with the audio and text prompt
- let response = try await model.generateContent(audio, prompt)
-
- // Print the generated text, handling the case where it might be nil
- print(response.text ?? "No text in response.")
- }
-
- func testMultiModalAudioStreaming() async throws {
- // Provide the audio as `Data` with the appropriate MIME type
- let audio = try InlineDataPart(data: Data(contentsOf: audioURL), mimeType: "audio/mpeg")
-
- // Provide a text prompt to include with the audio
- let prompt = "Transcribe what's said in this audio recording."
-
- // To stream generated text output, call `generateContentStream` with the audio and text prompt
- let contentStream = try model.generateContentStream(audio, prompt)
-
- // Print the generated text, handling the case where it might be nil
- for try await chunk in contentStream {
- if let text = chunk.text {
- print(text)
- }
- }
- }
-
- // MARK: - Document Input
-
- func testMultiModalPDFStreaming() async throws {
- // Provide the PDF as `Data` with the appropriate MIME type
- let pdf = try InlineDataPart(data: Data(contentsOf: pdfURL), mimeType: "application/pdf")
-
- // Provide a text prompt to include with the PDF file
- let prompt = "Summarize the important results in this report."
-
- // To stream generated text output, call `generateContentStream` with the PDF file and text
- // prompt
- let contentStream = try model.generateContentStream(pdf, prompt)
-
- // Print the generated text, handling the case where it might be nil
- for try await chunk in contentStream {
- if let text = chunk.text {
- print(text)
- }
- }
- }
-
- func testMultiModalPDFNonStreaming() async throws {
- // Provide the PDF as `Data` with the appropriate MIME type
- let pdf = try InlineDataPart(data: Data(contentsOf: pdfURL), mimeType: "application/pdf")
-
- // Provide a text prompt to include with the PDF file
- let prompt = "Summarize the important results in this report."
-
- // To generate text output, call `generateContent` with the PDF file and text prompt
- let response = try await model.generateContent(pdf, prompt)
-
- // Print the generated text, handling the case where it might be nil
- print(response.text ?? "No text in response.")
- }
-}
diff --git a/FirebaseVertexAI/Tests/Unit/Snippets/README.md b/FirebaseVertexAI/Tests/Unit/Snippets/README.md
deleted file mode 100644
index 8d03458c456..00000000000
--- a/FirebaseVertexAI/Tests/Unit/Snippets/README.md
+++ /dev/null
@@ -1,10 +0,0 @@
-# Vertex AI in Firebase Code Snippet Tests
-
-These "tests" are for verifying that the code snippets provided in our
-documentation continue to compile. They are intentionally skipped in CI but can
-be manually run to verify expected behavior / outputs.
-
-To run the tests, place a valid `GoogleService-Info.plist` file in the
-[`FirebaseVertexAI/Tests/Unit/Resources`](https://github.com/firebase/firebase-ios-sdk/tree/main/FirebaseVertexAI/Tests/Unit/Resources)
-folder. They may then be invoked individually or alongside the rest of the unit
-tests in Xcode.
diff --git a/FirebaseVertexAI/Tests/Unit/Snippets/StructuredOutputSnippets.swift b/FirebaseVertexAI/Tests/Unit/Snippets/StructuredOutputSnippets.swift
deleted file mode 100644
index 1ad137188c5..00000000000
--- a/FirebaseVertexAI/Tests/Unit/Snippets/StructuredOutputSnippets.swift
+++ /dev/null
@@ -1,96 +0,0 @@
-// Copyright 2024 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-import FirebaseCore
-import FirebaseVertexAI
-import XCTest
-
-// These snippet tests are intentionally skipped in CI jobs; see the README file in this directory
-// for instructions on running them manually.
-
-@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
-final class StructuredOutputSnippets: XCTestCase {
- override func setUpWithError() throws {
- try FirebaseApp.configureDefaultAppForSnippets()
- }
-
- override func tearDown() async throws {
- await FirebaseApp.deleteDefaultAppForSnippets()
- }
-
- func testStructuredOutputJSONBasic() async throws {
- // Provide a JSON schema object using a standard format.
- // Later, pass this schema object into `responseSchema` in the generation config.
- let jsonSchema = Schema.object(
- properties: [
- "characters": Schema.array(
- items: .object(
- properties: [
- "name": .string(),
- "age": .integer(),
- "species": .string(),
- "accessory": .enumeration(values: ["hat", "belt", "shoes"]),
- ],
- optionalProperties: ["accessory"]
- )
- ),
- ]
- )
-
- // Initialize the Vertex AI service and the generative model.
- // Use a model that supports `responseSchema`, like one of the Gemini 1.5 models.
- let model = VertexAI.vertexAI().generativeModel(
- modelName: "gemini-1.5-flash",
- // In the generation config, set the `responseMimeType` to `application/json`
- // and pass the JSON schema object into `responseSchema`.
- generationConfig: GenerationConfig(
- responseMIMEType: "application/json",
- responseSchema: jsonSchema
- )
- )
-
- let prompt = "For use in a children's card game, generate 10 animal-based characters."
-
- let response = try await model.generateContent(prompt)
- print(response.text ?? "No text in response.")
- }
-
- func testStructuredOutputEnumBasic() async throws {
- // Provide an enum schema object using a standard format.
- // Later, pass this schema object into `responseSchema` in the generation config.
- let enumSchema = Schema.enumeration(values: ["drama", "comedy", "documentary"])
-
- // Initialize the Vertex AI service and the generative model.
- // Use a model that supports `responseSchema`, like one of the Gemini 1.5 models.
- let model = VertexAI.vertexAI().generativeModel(
- modelName: "gemini-1.5-flash",
- // In the generation config, set the `responseMimeType` to `text/x.enum`
- // and pass the enum schema object into `responseSchema`.
- generationConfig: GenerationConfig(
- responseMIMEType: "text/x.enum",
- responseSchema: enumSchema
- )
- )
-
- let prompt = """
- The film aims to educate and inform viewers about real-life subjects, events, or people.
- It offers a factual record of a particular topic by combining interviews, historical footage,
- and narration. The primary purpose of a film is to present information and provide insights
- into various aspects of reality.
- """
-
- let response = try await model.generateContent(prompt)
- print(response.text ?? "No text in response.")
- }
-}
diff --git a/FirebaseVertexAI/Tests/Unit/Snippets/TextSnippets.swift b/FirebaseVertexAI/Tests/Unit/Snippets/TextSnippets.swift
deleted file mode 100644
index bd7c70fa06b..00000000000
--- a/FirebaseVertexAI/Tests/Unit/Snippets/TextSnippets.swift
+++ /dev/null
@@ -1,55 +0,0 @@
-// Copyright 2024 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-import FirebaseCore
-import FirebaseVertexAI
-import XCTest
-
-// These snippet tests are intentionally skipped in CI jobs; see the README file in this directory
-// for instructions on running them manually.
-
-@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
-final class TextSnippets: XCTestCase {
- lazy var model = VertexAI.vertexAI().generativeModel(modelName: "gemini-1.5-flash")
-
- override func setUpWithError() throws {
- try FirebaseApp.configureDefaultAppForSnippets()
- }
-
- override func tearDown() async throws {
- await FirebaseApp.deleteDefaultAppForSnippets()
- }
-
- func testTextOnlyNonStreaming() async throws {
- // Provide a prompt that contains text
- let prompt = "Write a story about a magic backpack."
-
- // To generate text output, call generateContent with the text input
- let response = try await model.generateContent(prompt)
- print(response.text ?? "No text in response.")
- }
-
- func testTextOnlyStreaming() async throws {
- // Provide a prompt that contains text
- let prompt = "Write a story about a magic backpack."
-
- // To stream generated text output, call generateContentStream with the text input
- let contentStream = try model.generateContentStream(prompt)
- for try await chunk in contentStream {
- if let text = chunk.text {
- print(text)
- }
- }
- }
-}
diff --git a/FirebaseVertexAI/Tests/Unit/TestUtilities/BundleTestUtil.swift b/FirebaseVertexAI/Tests/Unit/TestUtilities/BundleTestUtil.swift
deleted file mode 100644
index 272be41c1e4..00000000000
--- a/FirebaseVertexAI/Tests/Unit/TestUtilities/BundleTestUtil.swift
+++ /dev/null
@@ -1,31 +0,0 @@
-// Copyright 2024 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-import Foundation
-
-/// `Bundle` test utilities.
-final class BundleTestUtil {
- /// Returns the `Bundle` for the test module or target containing the file.
- ///
- /// This abstracts away the `Bundle` differences between SPM and CocoaPods tests.
- static func bundle() -> Bundle {
- #if SWIFT_PACKAGE
- return Bundle.module
- #else // SWIFT_PACKAGE
- return Bundle(for: Self.self)
- #endif // SWIFT_PACKAGE
- }
-
- private init() {}
-}
diff --git a/FirebaseVertexAI/Tests/Unit/VertexAIAPITests.swift b/FirebaseVertexAI/Tests/Unit/VertexAIAPITests.swift
deleted file mode 100644
index 8514d76b543..00000000000
--- a/FirebaseVertexAI/Tests/Unit/VertexAIAPITests.swift
+++ /dev/null
@@ -1,213 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-import FirebaseCore
-import FirebaseVertexAI
-import XCTest
-#if canImport(AppKit)
- import AppKit // For NSImage extensions.
-#elseif canImport(UIKit)
- import UIKit // For UIImage extensions.
-#endif
-
-@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
-final class VertexAIAPITests: XCTestCase {
- func codeSamples() async throws {
- let app = FirebaseApp.app()
- let config = GenerationConfig(temperature: 0.2,
- topP: 0.1,
- topK: 16,
- candidateCount: 4,
- maxOutputTokens: 256,
- stopSequences: ["..."],
- responseMIMEType: "text/plain")
- let filters = [SafetySetting(harmCategory: .dangerousContent, threshold: .blockOnlyHigh)]
- let systemInstruction = ModelContent(
- role: "system",
- parts: TextPart("Talk like a pirate.")
- )
-
- let requestOptions = RequestOptions()
- let _ = RequestOptions(timeout: 30.0)
-
- // Instantiate Vertex AI SDK - Default App
- let vertexAI = VertexAI.vertexAI()
- let _ = VertexAI.vertexAI(location: "my-location")
-
- // Instantiate Vertex AI SDK - Custom App
- let _ = VertexAI.vertexAI(app: app!)
- let _ = VertexAI.vertexAI(app: app!, location: "my-location")
-
- // Permutations without optional arguments.
-
- let _ = vertexAI.generativeModel(modelName: "gemini-1.0-pro")
-
- let _ = vertexAI.generativeModel(
- modelName: "gemini-1.0-pro",
- safetySettings: filters
- )
-
- let _ = vertexAI.generativeModel(
- modelName: "gemini-1.0-pro",
- generationConfig: config
- )
-
- let _ = vertexAI.generativeModel(
- modelName: "gemini-1.0-pro",
- systemInstruction: systemInstruction
- )
-
- // All arguments passed.
- let genAI = vertexAI.generativeModel(
- modelName: "gemini-1.0-pro",
- generationConfig: config, // Optional
- safetySettings: filters, // Optional
- systemInstruction: systemInstruction, // Optional
- requestOptions: requestOptions // Optional
- )
-
- // Full Typed Usage
- let pngData = Data() // ....
- let contents = [ModelContent(
- role: "user",
- parts: [
- TextPart("Is it a cat?"),
- InlineDataPart(data: pngData, mimeType: "image/png"),
- ]
- )]
-
- do {
- let response = try await genAI.generateContent(contents)
- print(response.text ?? "Couldn't get text... check status")
- } catch {
- print("Error generating content: \(error)")
- }
-
- // Content input combinations.
- let _ = try await genAI.generateContent("Constant String")
- let str = "String Variable"
- let _ = try await genAI.generateContent(str)
- let _ = try await genAI.generateContent([str])
- let _ = try await genAI.generateContent(str, "abc", "def")
- let _ = try await genAI.generateContent(
- str,
- FileDataPart(uri: "gs://test-bucket/image.jpg", mimeType: "image/jpeg")
- )
- #if canImport(UIKit)
- _ = try await genAI.generateContent(UIImage())
- _ = try await genAI.generateContent([UIImage()])
- _ = try await genAI.generateContent([str, UIImage(), TextPart(str)])
- _ = try await genAI.generateContent(str, UIImage(), "def", UIImage())
- _ = try await genAI.generateContent([str, UIImage(), "def", UIImage()])
- _ = try await genAI.generateContent([ModelContent(parts: "def", UIImage()),
- ModelContent(parts: "def", UIImage())])
- #elseif canImport(AppKit)
- _ = try await genAI.generateContent(NSImage())
- _ = try await genAI.generateContent([NSImage()])
- _ = try await genAI.generateContent(str, NSImage(), "def", NSImage())
- _ = try await genAI.generateContent([str, NSImage(), "def", NSImage()])
- #endif
-
- // PartsRepresentable combinations.
- let _ = ModelContent(parts: [TextPart(str)])
- let _ = ModelContent(role: "model", parts: [TextPart(str)])
- let _ = ModelContent(parts: "Constant String")
- let _ = ModelContent(parts: str)
- let _ = ModelContent(parts: [str])
- let _ = ModelContent(parts: [str, InlineDataPart(data: Data(), mimeType: "foo")])
- #if canImport(UIKit)
- _ = ModelContent(role: "user", parts: UIImage())
- _ = ModelContent(role: "user", parts: [UIImage()])
- _ = ModelContent(parts: [str, UIImage()])
- // Note: without explicitly specifying`: [any PartsRepresentable]` this will fail to compile
- // below with "Cannot convert value of type `[Any]` to expected type `[any Part]`.
- let representable2: [any PartsRepresentable] = [str, UIImage()]
- _ = ModelContent(parts: representable2)
- _ = ModelContent(parts: [str, UIImage(), TextPart(str)])
- #elseif canImport(AppKit)
- _ = ModelContent(role: "user", parts: NSImage())
- _ = ModelContent(role: "user", parts: [NSImage()])
- _ = ModelContent(parts: [str, NSImage()])
- // Note: without explicitly specifying`: [any PartsRepresentable]` this will fail to compile
- // below with "Cannot convert value of type `[Any]` to expected type `[any Part]`.
- let representable2: [any PartsRepresentable] = [str, NSImage()]
- _ = ModelContent(parts: representable2)
- _ = ModelContent(parts: [str, NSImage(), TextPart(str)])
- #endif
-
- // countTokens API
- let _: CountTokensResponse = try await genAI.countTokens("What color is the Sky?")
- #if canImport(UIKit)
- let _: CountTokensResponse = try await genAI.countTokens("What color is the Sky?",
- UIImage())
- let _: CountTokensResponse = try await genAI.countTokens([
- ModelContent(parts: "What color is the Sky?", UIImage()),
- ModelContent(parts: UIImage(), "What color is the Sky?", UIImage()),
- ])
- #endif
-
- // Chat
- _ = genAI.startChat()
- _ = genAI.startChat(history: [ModelContent(parts: "abc")])
- }
-
- // Public API tests for GenerateContentResponse.
- func generateContentResponseAPI() {
- let response = GenerateContentResponse(candidates: [])
-
- let _: [Candidate] = response.candidates
- let _: PromptFeedback? = response.promptFeedback
-
- // Usage Metadata
- guard let usageMetadata = response.usageMetadata else { fatalError() }
- let _: Int = usageMetadata.promptTokenCount
- let _: Int = usageMetadata.candidatesTokenCount
- let _: Int = usageMetadata.totalTokenCount
-
- // Computed Properties
- let _: String? = response.text
- let _: [FunctionCallPart] = response.functionCalls
- }
-
- // Result builder alternative
-
- /*
- let pngData = Data() // ....
- let contents = [GenAIContent(role: "user",
- parts: [
- .text("Is it a cat?"),
- .png(pngData)
- ])]
-
- // Turns into...
-
- let contents = GenAIContent {
- Role("user") {
- Text("Is this a cat?")
- Image(png: pngData)
- }
- }
-
- GenAIContent {
- ForEach(myInput) { input in
- Role(input.role) {
- input.contents
- }
- }
- }
-
- // Thoughts: this looks great from a code demo, but since I assume most content will be
- // user generated, the result builder may not be the best API.
- */
-}
diff --git a/Package.swift b/Package.swift
index ec21d0b86d1..db6fe329c2c 100644
--- a/Package.swift
+++ b/Package.swift
@@ -29,14 +29,6 @@ let package = Package(
name: "FirebaseAI",
targets: ["FirebaseAI"]
),
- // Backwards-compatibility library for existing "Vertex AI in Firebase" users.
- .library(
- name: "FirebaseVertexAI",
- targets: [
- "FirebaseAI",
- "FirebaseVertexAI",
- ]
- ),
.library(
name: "FirebaseAnalytics",
targets: ["FirebaseAnalyticsTarget"]
@@ -226,24 +218,6 @@ let package = Package(
.headerSearchPath("../../../"),
]
),
- // Backwards-compatibility targets for existing "Vertex AI in Firebase" users.
- .target(
- name: "FirebaseVertexAI",
- dependencies: [
- "FirebaseAI",
- ],
- path: "FirebaseVertexAI/Sources"
- ),
- .testTarget(
- name: "FirebaseVertexAIUnit",
- dependencies: [
- "FirebaseVertexAI",
- ],
- path: "FirebaseVertexAI/Tests/Unit",
- resources: [
- .process("Resources"),
- ]
- ),
// MARK: - Firebase Core
diff --git a/ReleaseTooling/Sources/FirebaseManifest/FirebaseManifest.swift b/ReleaseTooling/Sources/FirebaseManifest/FirebaseManifest.swift
index d807ec0e69e..20cd0ebfc8a 100755
--- a/ReleaseTooling/Sources/FirebaseManifest/FirebaseManifest.swift
+++ b/ReleaseTooling/Sources/FirebaseManifest/FirebaseManifest.swift
@@ -53,7 +53,6 @@ public let shared = Manifest(
Pod("FirebasePerformance", platforms: ["ios", "tvos"], zip: true),
Pod("FirebaseStorage", zip: true),
Pod("FirebaseMLModelDownloader", isBeta: true, zip: true),
- Pod("FirebaseVertexAI", zip: true),
Pod("Firebase", allowWarnings: true, platforms: ["ios", "tvos", "macos"], zip: true),
Pod("FirebaseCombineSwift", releasing: false, zip: false),
]