Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions FirebaseAI/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
- [changed] Deprecated `CountTokensResponse.totalBillableCharacters`; use
`totalTokens` instead. Gemini 2.0 series models and newer are always billed by
token count. (#14934)
- [removed] Removed `CountTokensResponse.totalBillableCharacters` which was
deprecated in 11.15.0. Use `totalTokens` instead. (#XXXX)

# 11.13.0
- [feature] Initial release of the Firebase AI Logic SDK (`FirebaseAI`). This
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,31 +39,11 @@ extension CountTokensRequest: GenerativeAIRequest {
/// The model's response to a count tokens request.
@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
public struct CountTokensResponse: Sendable {
/// Container for deprecated properties or methods.
///
/// This workaround allows deprecated fields to be referenced internally (for example in the
/// `init(from:)` constructor) without introducing compiler warnings.
struct Deprecated {
let totalBillableCharacters: Int?
}

/// The total number of tokens in the input given to the model as a prompt.
public let totalTokens: Int

/// The total number of billable characters in the text input given to the model as a prompt.
///
/// > Important: This does not include billable image, video or other non-text input. See
/// [Vertex AI pricing](https://firebase.google.com/docs/vertex-ai/pricing) for details.
@available(*, deprecated, message: """
Use `totalTokens` instead; Gemini 2.0 series models and newer are always billed by token count.
""")
public var totalBillableCharacters: Int? { deprecated.totalBillableCharacters }

/// The breakdown, by modality, of how many tokens are consumed by the prompt.
public let promptTokensDetails: [ModalityTokenCount]

/// Deprecated properties or methods.
let deprecated: Deprecated
}

// MARK: - Codable Conformances
Expand Down Expand Up @@ -112,7 +92,7 @@ extension CountTokensRequest: Encodable {
extension CountTokensResponse: Decodable {
enum CodingKeys: CodingKey {
case totalTokens
case totalBillableCharacters
// totalBillableCharacters is intentionally omitted.
case promptTokensDetails
}

Expand All @@ -121,8 +101,6 @@ extension CountTokensResponse: Decodable {
totalTokens = try container.decodeIfPresent(Int.self, forKey: .totalTokens) ?? 0
promptTokensDetails =
try container.decodeIfPresent([ModalityTokenCount].self, forKey: .promptTokensDetails) ?? []
let totalBillableCharacters =
try container.decodeIfPresent(Int.self, forKey: .totalBillableCharacters)
deprecated = CountTokensResponse.Deprecated(totalBillableCharacters: totalBillableCharacters)
// totalBillableCharacters is intentionally omitted.
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -57,12 +57,6 @@ struct CountTokensIntegrationTests {
let response = try await model.countTokens(prompt)

#expect(response.totalTokens == 6)
switch config.apiConfig.service {
case .vertexAI:
#expect(response.deprecated.totalBillableCharacters == 16)
case .googleAI:
#expect(response.deprecated.totalBillableCharacters == nil)
}
#expect(response.promptTokensDetails.count == 1)
let promptTokensDetails = try #require(response.promptTokensDetails.first)
#expect(promptTokensDetails.modality == .text)
Expand All @@ -81,12 +75,6 @@ struct CountTokensIntegrationTests {
let response = try await model.countTokens("What is your favourite colour?")

#expect(response.totalTokens == 14)
switch config.apiConfig.service {
case .vertexAI:
#expect(response.deprecated.totalBillableCharacters == 61)
case .googleAI:
#expect(response.deprecated.totalBillableCharacters == nil)
}
#expect(response.promptTokensDetails.count == 1)
let promptTokensDetails = try #require(response.promptTokensDetails.first)
#expect(promptTokensDetails.modality == .text)
Expand Down Expand Up @@ -115,12 +103,10 @@ struct CountTokensIntegrationTests {
switch config.apiConfig.service {
case .vertexAI:
#expect(response.totalTokens == 65)
#expect(response.deprecated.totalBillableCharacters == 170)
case .googleAI:
// The Developer API erroneously ignores the `responseSchema` when counting tokens, resulting
// in a lower total count than Vertex AI.
#expect(response.totalTokens == 34)
#expect(response.deprecated.totalBillableCharacters == nil)
}
#expect(response.promptTokensDetails.count == 1)
let promptTokensDetails = try #require(response.promptTokensDetails.first)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,6 @@ final class IntegrationTests: XCTestCase {
let response = try await model.countTokens(prompt)

XCTAssertEqual(response.totalTokens, 14)
XCTAssertEqual(response.deprecated.totalBillableCharacters, 51)
XCTAssertEqual(response.promptTokensDetails.count, 1)
let promptTokensDetails = try XCTUnwrap(response.promptTokensDetails.first)
XCTAssertEqual(promptTokensDetails.modality, .text)
Expand All @@ -102,7 +101,6 @@ final class IntegrationTests: XCTestCase {
let response = try await model.countTokens(image)

XCTAssertEqual(response.totalTokens, 266)
XCTAssertEqual(response.deprecated.totalBillableCharacters, 35)
XCTAssertEqual(response.promptTokensDetails.count, 2) // Image prompt + system instruction
let textPromptTokensDetails = try XCTUnwrap(response.promptTokensDetails.first {
$0.modality == .text
Expand All @@ -122,7 +120,6 @@ final class IntegrationTests: XCTestCase {
let response = try await model.countTokens(fileData)

XCTAssertEqual(response.totalTokens, 266)
XCTAssertEqual(response.deprecated.totalBillableCharacters, 35)
XCTAssertEqual(response.promptTokensDetails.count, 2) // Image prompt + system instruction
let textPromptTokensDetails = try XCTUnwrap(response.promptTokensDetails.first {
$0.modality == .text
Expand All @@ -141,7 +138,6 @@ final class IntegrationTests: XCTestCase {
let response = try await model.countTokens(fileData)

XCTAssertEqual(response.totalTokens, 266)
XCTAssertEqual(response.deprecated.totalBillableCharacters, 35)
}

func testCountTokens_image_fileData_requiresUserAuth_userSignedIn() async throws {
Expand All @@ -152,7 +148,6 @@ final class IntegrationTests: XCTestCase {
let response = try await model.countTokens(fileData)

XCTAssertEqual(response.totalTokens, 266)
XCTAssertEqual(response.deprecated.totalBillableCharacters, 35)
}

func testCountTokens_image_fileData_requiresUserAuth_wrongUser_permissionDenied() async throws {
Expand Down Expand Up @@ -193,7 +188,6 @@ final class IntegrationTests: XCTestCase {
])

XCTAssertGreaterThan(response.totalTokens, 0)
XCTAssertEqual(response.deprecated.totalBillableCharacters, 71)
XCTAssertEqual(response.promptTokensDetails.count, 1)
let promptTokensDetails = try XCTUnwrap(response.promptTokensDetails.first)
XCTAssertEqual(promptTokensDetails.modality, .text)
Expand Down
4 changes: 0 additions & 4 deletions FirebaseAI/Tests/Unit/GenerativeModelVertexAITests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -1517,7 +1517,6 @@ final class GenerativeModelVertexAITests: XCTestCase {
let response = try await model.countTokens("Why is the sky blue?")

XCTAssertEqual(response.totalTokens, 6)
XCTAssertEqual(response.deprecated.totalBillableCharacters, 16)
}

func testCountTokens_succeeds_detailed() async throws {
Expand All @@ -1530,7 +1529,6 @@ final class GenerativeModelVertexAITests: XCTestCase {
let response = try await model.countTokens("Why is the sky blue?")

XCTAssertEqual(response.totalTokens, 1837)
XCTAssertEqual(response.deprecated.totalBillableCharacters, 117)
XCTAssertEqual(response.promptTokensDetails.count, 2)
XCTAssertEqual(response.promptTokensDetails[0].modality, .image)
XCTAssertEqual(response.promptTokensDetails[0].tokenCount, 1806)
Expand Down Expand Up @@ -1577,7 +1575,6 @@ final class GenerativeModelVertexAITests: XCTestCase {
let response = try await model.countTokens("Why is the sky blue?")

XCTAssertEqual(response.totalTokens, 6)
XCTAssertEqual(response.deprecated.totalBillableCharacters, 16)
}

func testCountTokens_succeeds_noBillableCharacters() async throws {
Expand All @@ -1590,7 +1587,6 @@ final class GenerativeModelVertexAITests: XCTestCase {
let response = try await model.countTokens(InlineDataPart(data: Data(), mimeType: "image/jpeg"))

XCTAssertEqual(response.totalTokens, 258)
XCTAssertNil(response.deprecated.totalBillableCharacters)
}

func testCountTokens_modelNotFound() async throws {
Expand Down
Loading