Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -69,16 +69,13 @@ struct CountTokensIntegrationTests {
#expect(promptTokensDetails.tokenCount == response.totalTokens)
}

@Test(
/* System instructions are not supported on the v1 Developer API. */
arguments: InstanceConfig.allConfigsExceptGoogleAI_v1
)
@Test(arguments: InstanceConfig.allConfigs)
func countTokens_text_systemInstruction(_ config: InstanceConfig) async throws {
let model = FirebaseAI.componentInstance(config).generativeModel(
modelName: ModelNames.gemini2Flash,
generationConfig: generationConfig,
safetySettings: safetySettings,
systemInstruction: systemInstruction // Not supported on the v1 Developer API
systemInstruction: systemInstruction
)

let response = try await model.countTokens("What is your favourite colour?")
Expand All @@ -96,32 +93,7 @@ struct CountTokensIntegrationTests {
#expect(promptTokensDetails.tokenCount == response.totalTokens)
}

@Test(arguments: [
/* System instructions are not supported on the v1 Developer API. */
InstanceConfig.googleAI_v1_freeTier_bypassProxy,
])
func countTokens_text_systemInstruction_unsupported(_ config: InstanceConfig) async throws {
let model = FirebaseAI.componentInstance(config).generativeModel(
modelName: ModelNames.gemini2Flash,
systemInstruction: systemInstruction // Not supported on the v1 Developer API
)

try await #require(
throws: BackendError.self,
"""
If this test fails (i.e., `countTokens` succeeds), remove \(config) from this test and add it
to `countTokens_text_systemInstruction`.
""",
performing: {
try await model.countTokens("What is your favourite colour?")
}
)
}

@Test(
/* System instructions are not supported on the v1 Developer API. */
arguments: InstanceConfig.allConfigsExceptGoogleAI_v1
)
@Test(arguments: InstanceConfig.allConfigs)
func countTokens_jsonSchema(_ config: InstanceConfig) async throws {
let model = FirebaseAI.componentInstance(config).generativeModel(
modelName: ModelNames.gemini2Flash,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,15 +48,12 @@ struct GenerateContentIntegrationTests {
}

@Test(arguments: [
(InstanceConfig.vertexAI_v1, ModelNames.gemini2FlashLite),
(InstanceConfig.vertexAI_v1_staging, ModelNames.gemini2FlashLite),
(InstanceConfig.vertexAI_v1beta, ModelNames.gemini2FlashLite),
(InstanceConfig.vertexAI_v1beta_staging, ModelNames.gemini2FlashLite),
(InstanceConfig.googleAI_v1beta, ModelNames.gemini2FlashLite),
(InstanceConfig.googleAI_v1beta, ModelNames.gemma3_4B),
(InstanceConfig.googleAI_v1beta_staging, ModelNames.gemini2FlashLite),
(InstanceConfig.googleAI_v1beta_staging, ModelNames.gemma3_4B),
(InstanceConfig.googleAI_v1_freeTier_bypassProxy, ModelNames.gemini2FlashLite),
(InstanceConfig.googleAI_v1beta_freeTier_bypassProxy, ModelNames.gemini2FlashLite),
(InstanceConfig.googleAI_v1beta_freeTier_bypassProxy, ModelNames.gemma3_4B),
])
Expand Down Expand Up @@ -98,19 +95,18 @@ struct GenerateContentIntegrationTests {

@Test(
"Generate an enum and provide a system instruction",
/* System instructions are not supported on the v1 Developer API. */
arguments: InstanceConfig.allConfigsExceptGoogleAI_v1
arguments: InstanceConfig.allConfigs
)
func generateContentEnum(_ config: InstanceConfig) async throws {
let model = FirebaseAI.componentInstance(config).generativeModel(
modelName: ModelNames.gemini2FlashLite,
generationConfig: GenerationConfig(
responseMIMEType: "text/x.enum", // Not supported on the v1 Developer API
responseMIMEType: "text/x.enum",
responseSchema: .enumeration(values: ["Red", "Green", "Blue"])
),
safetySettings: safetySettings,
tools: [], // Not supported on the v1 Developer API
toolConfig: .init(functionCallingConfig: .none()), // Not supported on the v1 Developer API
tools: [],
toolConfig: .init(functionCallingConfig: .none()),
systemInstruction: ModelContent(role: "system", parts: "Always pick blue.")
)
let prompt = "What is your favourite colour?"
Expand All @@ -136,7 +132,6 @@ struct GenerateContentIntegrationTests {
}

@Test(arguments: [
InstanceConfig.vertexAI_v1,
InstanceConfig.vertexAI_v1beta,
InstanceConfig.googleAI_v1beta,
InstanceConfig.googleAI_v1beta_staging,
Expand Down Expand Up @@ -190,15 +185,12 @@ struct GenerateContentIntegrationTests {
// MARK: Streaming Tests

@Test(arguments: [
(InstanceConfig.vertexAI_v1, ModelNames.gemini2FlashLite),
(InstanceConfig.vertexAI_v1_staging, ModelNames.gemini2FlashLite),
(InstanceConfig.vertexAI_v1beta, ModelNames.gemini2FlashLite),
(InstanceConfig.vertexAI_v1beta_staging, ModelNames.gemini2FlashLite),
(InstanceConfig.googleAI_v1beta, ModelNames.gemini2FlashLite),
(InstanceConfig.googleAI_v1beta, ModelNames.gemma3_4B),
(InstanceConfig.googleAI_v1beta_staging, ModelNames.gemini2FlashLite),
(InstanceConfig.googleAI_v1beta_staging, ModelNames.gemma3_4B),
(InstanceConfig.googleAI_v1_freeTier_bypassProxy, ModelNames.gemini2FlashLite),
(InstanceConfig.googleAI_v1beta_freeTier_bypassProxy, ModelNames.gemini2FlashLite),
(InstanceConfig.googleAI_v1beta_freeTier_bypassProxy, ModelNames.gemma3_4B),
])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ import FirebaseCore
import FirebaseStorage
import XCTest

// TODO(#14405): Migrate to Swift Testing and parameterize tests to run on both `v1` and `v1beta`.
// TODO(#14405): Migrate to Swift Testing and parameterize tests to run on both Vertex AI and Google AI.
final class IntegrationTests: XCTestCase {
// Set temperature, topP and topK to lowest allowed values to make responses more deterministic.
let generationConfig = GenerationConfig(
Expand Down
8 changes: 4 additions & 4 deletions FirebaseAI/Tests/TestApp/Tests/Integration/SchemaTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ struct SchemaTests {
storage = Storage.storage()
}

@Test(arguments: InstanceConfig.allConfigsExceptGoogleAI_v1)
@Test(arguments: InstanceConfig.allConfigs)
func generateContentSchemaItems(_ config: InstanceConfig) async throws {
let model = FirebaseAI.componentInstance(config).generativeModel(
modelName: ModelNames.gemini2FlashLite,
Expand All @@ -73,7 +73,7 @@ struct SchemaTests {
#expect(decodedJSON.count <= 5, "Expected at most 5 cities, but got \(decodedJSON.count)")
}

@Test(arguments: InstanceConfig.allConfigsExceptGoogleAI_v1)
@Test(arguments: InstanceConfig.allConfigs)
func generateContentSchemaNumberRange(_ config: InstanceConfig) async throws {
let model = FirebaseAI.componentInstance(config).generativeModel(
modelName: ModelNames.gemini2FlashLite,
Expand All @@ -96,7 +96,7 @@ struct SchemaTests {
#expect(decodedNumber <= 120.0, "Expected a number <= 120, but got \(decodedNumber)")
}

@Test(arguments: InstanceConfig.allConfigsExceptGoogleAI_v1)
@Test(arguments: InstanceConfig.allConfigs)
func generateContentSchemaNumberRangeMultiType(_ config: InstanceConfig) async throws {
struct ProductInfo: Codable {
let productName: String
Expand Down Expand Up @@ -149,7 +149,7 @@ struct SchemaTests {
#expect(rating <= 5, "Expected a rating <= 5, but got \(rating)")
}

@Test(arguments: InstanceConfig.allConfigsExceptGoogleAI_v1)
@Test(arguments: InstanceConfig.allConfigs)
func generateContentAnyOfSchema(_ config: InstanceConfig) async throws {
struct MailingAddress: Decodable {
let streetAddress: String
Expand Down
21 changes: 0 additions & 21 deletions FirebaseAI/Tests/TestApp/Tests/Utilities/InstanceConfig.swift
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,6 @@ import Testing
@testable import struct FirebaseAI.APIConfig

struct InstanceConfig: Equatable, Encodable {
static let vertexAI_v1 = InstanceConfig(
apiConfig: APIConfig(service: .vertexAI(endpoint: .firebaseProxyProd), version: .v1)
)
static let vertexAI_v1_staging = InstanceConfig(
apiConfig: APIConfig(service: .vertexAI(endpoint: .firebaseProxyStaging), version: .v1)
)
static let vertexAI_v1beta = InstanceConfig(
apiConfig: APIConfig(service: .vertexAI(endpoint: .firebaseProxyProd), version: .v1beta)
)
Expand All @@ -38,33 +32,19 @@ struct InstanceConfig: Equatable, Encodable {
static let googleAI_v1beta_staging = InstanceConfig(
apiConfig: APIConfig(service: .googleAI(endpoint: .firebaseProxyStaging), version: .v1beta)
)
static let googleAI_v1_freeTier_bypassProxy = InstanceConfig(
appName: FirebaseAppNames.spark,
apiConfig: APIConfig(service: .googleAI(endpoint: .googleAIBypassProxy), version: .v1)
)
static let googleAI_v1beta_freeTier_bypassProxy = InstanceConfig(
appName: FirebaseAppNames.spark,
apiConfig: APIConfig(service: .googleAI(endpoint: .googleAIBypassProxy), version: .v1beta)
)

static let allConfigs = [
vertexAI_v1,
vertexAI_v1_staging,
vertexAI_v1beta,
vertexAI_v1beta_staging,
googleAI_v1beta,
googleAI_v1beta_staging,
googleAI_v1_freeTier_bypassProxy,
googleAI_v1beta_freeTier_bypassProxy,
]
static let allConfigsExceptGoogleAI_v1 = allConfigs.filter {
$0 != googleAI_v1_freeTier_bypassProxy
}

static let vertexAI_v1_appCheckNotConfigured = InstanceConfig(
appName: FirebaseAppNames.appCheckNotConfigured,
apiConfig: APIConfig(service: .vertexAI(endpoint: .firebaseProxyProd), version: .v1)
)
static let vertexAI_v1beta_appCheckNotConfigured = InstanceConfig(
appName: FirebaseAppNames.appCheckNotConfigured,
apiConfig: APIConfig(service: .vertexAI(endpoint: .firebaseProxyProd), version: .v1beta)
Expand All @@ -75,7 +55,6 @@ struct InstanceConfig: Equatable, Encodable {
)

static let appCheckNotConfiguredConfigs = [
vertexAI_v1_appCheckNotConfigured,
vertexAI_v1beta_appCheckNotConfigured,
googleAI_v1beta_appCheckNotConfigured,
]
Expand Down
Loading