Skip to content

Commit ee1bdc9

Browse files
authored
[AI] Migrate integration tests from deprecated DevAPI Gemini 2.0 (#15809)
1 parent b7228a5 commit ee1bdc9

File tree

3 files changed

+24
-21
lines changed

3 files changed

+24
-21
lines changed

FirebaseAI/Tests/TestApp/Tests/Integration/CountTokensIntegrationTests.swift

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ struct CountTokensIntegrationTests {
4949
func countTokens_text(_ config: InstanceConfig) async throws {
5050
let prompt = "Why is the sky blue?"
5151
let model = FirebaseAI.componentInstance(config).generativeModel(
52-
modelName: ModelNames.gemini2Flash,
52+
modelName: ModelNames.gemini2_5_Flash,
5353
generationConfig: generationConfig,
5454
safetySettings: safetySettings
5555
)
@@ -66,7 +66,7 @@ struct CountTokensIntegrationTests {
6666
@Test(arguments: InstanceConfig.allConfigs)
6767
func countTokens_text_systemInstruction(_ config: InstanceConfig) async throws {
6868
let model = FirebaseAI.componentInstance(config).generativeModel(
69-
modelName: ModelNames.gemini2Flash,
69+
modelName: ModelNames.gemini2_5_Flash,
7070
generationConfig: generationConfig,
7171
safetySettings: safetySettings,
7272
systemInstruction: systemInstruction
@@ -84,7 +84,7 @@ struct CountTokensIntegrationTests {
8484
@Test(arguments: InstanceConfig.allConfigs)
8585
func countTokens_jsonSchema(_ config: InstanceConfig) async throws {
8686
let model = FirebaseAI.componentInstance(config).generativeModel(
87-
modelName: ModelNames.gemini2Flash,
87+
modelName: ModelNames.gemini2_5_Flash,
8888
generationConfig: GenerationConfig(
8989
responseMIMEType: "application/json",
9090
responseSchema: Schema.object(properties: [

FirebaseAI/Tests/TestApp/Tests/Integration/GenerateContentIntegrationTests.swift

Lines changed: 20 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -51,18 +51,16 @@ struct GenerateContentIntegrationTests {
5151
(InstanceConfig.vertexAI_v1beta, ModelNames.gemini2FlashLite),
5252
(InstanceConfig.vertexAI_v1beta_global, ModelNames.gemini2FlashLite),
5353
(InstanceConfig.vertexAI_v1beta_global_appCheckLimitedUse, ModelNames.gemini2FlashLite),
54-
(InstanceConfig.googleAI_v1beta, ModelNames.gemini2FlashLite),
55-
(InstanceConfig.googleAI_v1beta_appCheckLimitedUse, ModelNames.gemini2FlashLite),
5654
(InstanceConfig.googleAI_v1beta, ModelNames.gemini3FlashPreview),
5755
(InstanceConfig.googleAI_v1beta_appCheckLimitedUse, ModelNames.gemini3FlashPreview),
5856
(InstanceConfig.googleAI_v1beta, ModelNames.gemma3_4B),
5957
(InstanceConfig.googleAI_v1beta_freeTier, ModelNames.gemma3_4B),
6058
// Note: The following configs are commented out for easy one-off manual testing.
61-
// (InstanceConfig.googleAI_v1beta_freeTier, ModelNames.gemini2FlashLite),
62-
// (InstanceConfig.googleAI_v1beta_staging, ModelNames.gemini2FlashLite),
59+
// (InstanceConfig.googleAI_v1beta_freeTier, ModelNames.gemini2_5_FlashLite),
60+
// (InstanceConfig.googleAI_v1beta_staging, ModelNames.gemini2_5_FlashLite),
6361
// (InstanceConfig.googleAI_v1beta_staging, ModelNames.gemma3_4B),
6462
// (InstanceConfig.vertexAI_v1beta_staging, ModelNames.gemini2FlashLite),
65-
// (InstanceConfig.googleAI_v1beta_freeTier_bypassProxy, ModelNames.gemini2FlashLite),
63+
// (InstanceConfig.googleAI_v1beta_freeTier_bypassProxy, ModelNames.gemini2_5_FlashLite),
6664
// (InstanceConfig.googleAI_v1beta_freeTier_bypassProxy, ModelNames.gemma3_4B),
6765
])
6866
func generateContent(_ config: InstanceConfig, modelName: String) async throws {
@@ -118,7 +116,7 @@ struct GenerateContentIntegrationTests {
118116
)
119117
func generateContentEnum(_ config: InstanceConfig) async throws {
120118
let model = FirebaseAI.componentInstance(config).generativeModel(
121-
modelName: ModelNames.gemini2FlashLite,
119+
modelName: ModelNames.gemini2_5_FlashLite,
122120
generationConfig: GenerationConfig(
123121
responseMIMEType: "text/x.enum",
124122
responseSchema: .enumeration(values: ["Red", "Green", "Blue"])
@@ -136,7 +134,16 @@ struct GenerateContentIntegrationTests {
136134
#expect(text == "Blue")
137135

138136
let usageMetadata = try #require(response.usageMetadata)
139-
#expect(usageMetadata.promptTokenCount.isEqual(to: 15, accuracy: tokenCountAccuracy))
137+
if case .googleAI = config.apiConfig.service {
138+
#expect(usageMetadata.promptTokenCount.isEqual(to: 11, accuracy: tokenCountAccuracy))
139+
#expect(usageMetadata.candidatesTokensDetails.count == 0)
140+
} else {
141+
#expect(usageMetadata.promptTokenCount.isEqual(to: 15, accuracy: tokenCountAccuracy))
142+
#expect(usageMetadata.candidatesTokensDetails.count == 1)
143+
let candidatesTokensDetails = try #require(usageMetadata.candidatesTokensDetails.first)
144+
#expect(candidatesTokensDetails.modality == .text)
145+
#expect(candidatesTokensDetails.tokenCount == usageMetadata.candidatesTokenCount)
146+
}
140147
#expect(usageMetadata.candidatesTokenCount.isEqual(to: 1, accuracy: tokenCountAccuracy))
141148
#expect(usageMetadata.thoughtsTokenCount == 0)
142149
#expect(usageMetadata.totalTokenCount
@@ -145,10 +152,6 @@ struct GenerateContentIntegrationTests {
145152
let promptTokensDetails = try #require(usageMetadata.promptTokensDetails.first)
146153
#expect(promptTokensDetails.modality == .text)
147154
#expect(promptTokensDetails.tokenCount == usageMetadata.promptTokenCount)
148-
#expect(usageMetadata.candidatesTokensDetails.count == 1)
149-
let candidatesTokensDetails = try #require(usageMetadata.candidatesTokensDetails.first)
150-
#expect(candidatesTokensDetails.modality == .text)
151-
#expect(candidatesTokensDetails.tokenCount == usageMetadata.candidatesTokenCount)
152155
}
153156

154157
@Test(
@@ -501,16 +504,16 @@ struct GenerateContentIntegrationTests {
501504
(InstanceConfig.vertexAI_v1beta, ModelNames.gemini2FlashLite),
502505
(InstanceConfig.vertexAI_v1beta_global, ModelNames.gemini3FlashPreview),
503506
(InstanceConfig.vertexAI_v1beta_global_appCheckLimitedUse, ModelNames.gemini3FlashPreview),
504-
(InstanceConfig.googleAI_v1beta, ModelNames.gemini2FlashLite),
505-
(InstanceConfig.googleAI_v1beta_appCheckLimitedUse, ModelNames.gemini2FlashLite),
507+
(InstanceConfig.googleAI_v1beta, ModelNames.gemini2_5_FlashLite),
508+
(InstanceConfig.googleAI_v1beta_appCheckLimitedUse, ModelNames.gemini2_5_FlashLite),
506509
(InstanceConfig.googleAI_v1beta, ModelNames.gemma3_4B),
507510
// Note: The following configs are commented out for easy one-off manual testing.
508-
// (InstanceConfig.vertexAI_v1beta_staging, ModelNames.gemini2FlashLite),
509-
// (InstanceConfig.googleAI_v1beta_staging, ModelNames.gemini2FlashLite),
511+
// (InstanceConfig.vertexAI_v1beta_staging, ModelNames.gemini2_5_FlashLite),
512+
// (InstanceConfig.googleAI_v1beta_staging, ModelNames.gemini2_5_FlashLite),
510513
// (InstanceConfig.googleAI_v1beta_staging, ModelNames.gemma3_4B),
511514
// (InstanceConfig.googleAI_v1beta_freeTier_bypassProxy, ModelNames.gemini2FlashLite),
512515
// (InstanceConfig.googleAI_v1beta_freeTier_bypassProxy, ModelNames.gemma3_4B),
513-
// (InstanceConfig.googleAI_v1beta_freeTier, ModelNames.gemini2FlashLite),
516+
// (InstanceConfig.googleAI_v1beta_freeTier, ModelNames.gemini2_5_FlashLite),
514517
// (InstanceConfig.googleAI_v1beta_freeTier, ModelNames.gemma3_4B),
515518
])
516519
func generateContentStream(_ config: InstanceConfig, modelName: String) async throws {
@@ -616,7 +619,7 @@ struct GenerateContentIntegrationTests {
616619
@Test(arguments: InstanceConfig.appCheckNotConfiguredConfigs)
617620
func generateContent_appCheckNotConfigured_shouldFail(_ config: InstanceConfig) async throws {
618621
let model = FirebaseAI.componentInstance(config).generativeModel(
619-
modelName: ModelNames.gemini2Flash
622+
modelName: ModelNames.gemini2_5_Flash
620623
)
621624
let prompt = "Where is Google headquarters located? Answer with the city name only."
622625

FirebaseAI/Tests/TestApp/Tests/Integration/IntegrationTests.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ final class IntegrationTests: XCTestCase {
6969
func testCountTokens_text() async throws {
7070
let prompt = "Why is the sky blue?"
7171
model = vertex.generativeModel(
72-
modelName: ModelNames.gemini2Flash,
72+
modelName: ModelNames.gemini2_5_Flash,
7373
generationConfig: generationConfig,
7474
safetySettings: [
7575
SafetySetting(harmCategory: .harassment, threshold: .blockLowAndAbove, method: .severity),

0 commit comments

Comments
 (0)