Skip to content

Commit 8f82f5d

Browse files
authored
[Vertex AI] Make generateContentStream/sendMessageStream throws (#13573)
1 parent 20ec9a3 commit 8f82f5d

File tree

9 files changed

+39
-47
lines changed

9 files changed

+39
-47
lines changed

FirebaseVertexAI/CHANGELOG.md

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,10 @@
22
- [fixed] Resolved a decoding error for citations without a `uri` and added
33
support for decoding `title` fields, which were previously ignored. (#13518)
44
- [changed] **Breaking Change**: The methods for starting streaming requests
5-
(`generateContentStream` and `sendMessageStream`) and creating a chat instance
6-
(`startChat`) are now asynchronous and must be called with `await`. (#13545)
5+
(`generateContentStream` and `sendMessageStream`) are now throwing and
6+
asynchronous and must be called with `try await`. (#13545, #13573)
7+
- [changed] **Breaking Change**: Creating a chat instance (`startChat`) is now
8+
asynchronous and must be called with `await`. (#13545)
79

810
# 10.29.0
911
- [feature] Added community support for watchOS. (#13215)

FirebaseVertexAI/Sample/ChatSample/ViewModels/ConversationViewModel.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ class ConversationViewModel: ObservableObject {
8585
guard let chat else {
8686
throw ChatError.notInitialized
8787
}
88-
let responseStream = await chat.sendMessageStream(text)
88+
let responseStream = try await chat.sendMessageStream(text)
8989
for try await chunk in responseStream {
9090
messages[messages.count - 1].pending = false
9191
if let text = chunk.text {

FirebaseVertexAI/Sample/FunctionCallingSample/ViewModels/FunctionCallingViewModel.swift

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -122,12 +122,12 @@ class FunctionCallingViewModel: ObservableObject {
122122
}
123123
let responseStream: AsyncThrowingStream<GenerateContentResponse, Error>
124124
if functionResponses.isEmpty {
125-
responseStream = await chat.sendMessageStream(text)
125+
responseStream = try await chat.sendMessageStream(text)
126126
} else {
127127
for functionResponse in functionResponses {
128128
messages.insert(functionResponse.chatMessage(), at: messages.count - 1)
129129
}
130-
responseStream = await chat.sendMessageStream(functionResponses.modelContent())
130+
responseStream = try await chat.sendMessageStream(functionResponses.modelContent())
131131
}
132132
for try await chunk in responseStream {
133133
processResponseContent(content: chunk)

FirebaseVertexAI/Sample/GenerativeAIMultimodalSample/ViewModels/PhotoReasoningViewModel.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ class PhotoReasoningViewModel: ObservableObject {
8484
}
8585
}
8686

87-
let outputContentStream = await model.generateContentStream(prompt, images)
87+
let outputContentStream = try await model.generateContentStream(prompt, images)
8888

8989
// stream response
9090
for try await outputContent in outputContentStream {

FirebaseVertexAI/Sample/GenerativeAITextSample/ViewModels/SummarizeViewModel.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ class SummarizeViewModel: ObservableObject {
5050

5151
let prompt = "Summarize the following text for me: \(inputText)"
5252

53-
let outputContentStream = await model.generateContentStream(prompt)
53+
let outputContentStream = try await model.generateContentStream(prompt)
5454

5555
// stream response
5656
for try await outputContent in outputContentStream {

FirebaseVertexAI/Sources/Chat.swift

Lines changed: 6 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ public actor Chat {
8585
/// - Parameter parts: The new content to send as a single chat message.
8686
/// - Returns: A stream containing the model's response or an error if an error occurred.
8787
@available(macOS 12.0, *)
88-
public func sendMessageStream(_ parts: any ThrowingPartsRepresentable...)
88+
public func sendMessageStream(_ parts: any ThrowingPartsRepresentable...) throws
8989
-> AsyncThrowingStream<GenerateContentResponse, Error> {
9090
return try sendMessageStream([ModelContent(parts: parts)])
9191
}
@@ -95,21 +95,16 @@ public actor Chat {
9595
/// - Parameter content: The new content to send as a single chat message.
9696
/// - Returns: A stream containing the model's response or an error if an error occurred.
9797
@available(macOS 12.0, *)
98-
public func sendMessageStream(_ content: @autoclosure () throws -> [ModelContent])
98+
public func sendMessageStream(_ content: @autoclosure () throws -> [ModelContent]) throws
9999
-> AsyncThrowingStream<GenerateContentResponse, Error> {
100100
let resolvedContent: [ModelContent]
101101
do {
102102
resolvedContent = try content()
103103
} catch let underlying {
104-
return AsyncThrowingStream { continuation in
105-
let error: Error
106-
if let contentError = underlying as? ImageConversionError {
107-
error = GenerateContentError.promptImageContentError(underlying: contentError)
108-
} else {
109-
error = GenerateContentError.internalError(underlying: underlying)
110-
}
111-
continuation.finish(throwing: error)
104+
if let contentError = underlying as? ImageConversionError {
105+
throw GenerateContentError.promptImageContentError(underlying: contentError)
112106
}
107+
throw GenerateContentError.internalError(underlying: underlying)
113108
}
114109

115110
return AsyncThrowingStream { continuation in
@@ -121,7 +116,7 @@ public actor Chat {
121116

122117
// Send the history alongside the new message as context.
123118
let request = history + newContent
124-
let stream = await model.generateContentStream(request)
119+
let stream = try await model.generateContentStream(request)
125120
do {
126121
for try await chunk in stream {
127122
// Capture any content that's streaming. This should be populated if there's no error.

FirebaseVertexAI/Sources/GenerativeModel.swift

Lines changed: 5 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -179,7 +179,7 @@ public final actor GenerativeModel {
179179
/// - Returns: A stream wrapping content generated by the model or a ``GenerateContentError``
180180
/// error if an error occurred.
181181
@available(macOS 12.0, *)
182-
public func generateContentStream(_ parts: any ThrowingPartsRepresentable...)
182+
public func generateContentStream(_ parts: any ThrowingPartsRepresentable...) throws
183183
-> AsyncThrowingStream<GenerateContentResponse, Error> {
184184
return try generateContentStream([ModelContent(parts: parts)])
185185
}
@@ -190,21 +190,16 @@ public final actor GenerativeModel {
190190
/// - Returns: A stream wrapping content generated by the model or a ``GenerateContentError``
191191
/// error if an error occurred.
192192
@available(macOS 12.0, *)
193-
public func generateContentStream(_ content: @autoclosure () throws -> [ModelContent])
193+
public func generateContentStream(_ content: @autoclosure () throws -> [ModelContent]) throws
194194
-> AsyncThrowingStream<GenerateContentResponse, Error> {
195195
let evaluatedContent: [ModelContent]
196196
do {
197197
evaluatedContent = try content()
198198
} catch let underlying {
199-
return AsyncThrowingStream { continuation in
200-
let error: Error
201-
if let contentError = underlying as? ImageConversionError {
202-
error = GenerateContentError.promptImageContentError(underlying: contentError)
203-
} else {
204-
error = GenerateContentError.internalError(underlying: underlying)
205-
}
206-
continuation.finish(throwing: error)
199+
if let contentError = underlying as? ImageConversionError {
200+
throw GenerateContentError.promptImageContentError(underlying: contentError)
207201
}
202+
throw GenerateContentError.internalError(underlying: underlying)
208203
}
209204

210205
let generateContentRequest = GenerateContentRequest(model: modelResourceName,

FirebaseVertexAI/Tests/Unit/ChatTests.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ final class ChatTests: XCTestCase {
6464
)
6565
let chat = Chat(model: model, history: [])
6666
let input = "Test input"
67-
let stream = await chat.sendMessageStream(input)
67+
let stream = try await chat.sendMessageStream(input)
6868

6969
// Ensure the values are parsed correctly
7070
for try await value in stream {

FirebaseVertexAI/Tests/Unit/GenerativeModelTests.swift

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -760,7 +760,7 @@ final class GenerativeModelTests: XCTestCase {
760760
)
761761

762762
do {
763-
let stream = await model.generateContentStream("Hi")
763+
let stream = try await model.generateContentStream("Hi")
764764
for try await _ in stream {
765765
XCTFail("No content is there, this shouldn't happen.")
766766
}
@@ -784,7 +784,7 @@ final class GenerativeModelTests: XCTestCase {
784784
)
785785

786786
do {
787-
let stream = await model.generateContentStream(testPrompt)
787+
let stream = try await model.generateContentStream(testPrompt)
788788
for try await _ in stream {
789789
XCTFail("No content is there, this shouldn't happen.")
790790
}
@@ -807,7 +807,7 @@ final class GenerativeModelTests: XCTestCase {
807807
)
808808

809809
do {
810-
let stream = await model.generateContentStream("Hi")
810+
let stream = try await model.generateContentStream("Hi")
811811
for try await _ in stream {
812812
XCTFail("No content is there, this shouldn't happen.")
813813
}
@@ -827,7 +827,7 @@ final class GenerativeModelTests: XCTestCase {
827827
)
828828

829829
do {
830-
let stream = await model.generateContentStream("Hi")
830+
let stream = try await model.generateContentStream("Hi")
831831
for try await _ in stream {
832832
XCTFail("Content shouldn't be shown, this shouldn't happen.")
833833
}
@@ -847,7 +847,7 @@ final class GenerativeModelTests: XCTestCase {
847847
)
848848

849849
do {
850-
let stream = await model.generateContentStream("Hi")
850+
let stream = try await model.generateContentStream("Hi")
851851
for try await _ in stream {
852852
XCTFail("Content shouldn't be shown, this shouldn't happen.")
853853
}
@@ -866,7 +866,7 @@ final class GenerativeModelTests: XCTestCase {
866866
withExtension: "txt"
867867
)
868868

869-
let stream = await model.generateContentStream("Hi")
869+
let stream = try await model.generateContentStream("Hi")
870870
do {
871871
for try await content in stream {
872872
XCTAssertNotNil(content.text)
@@ -887,7 +887,7 @@ final class GenerativeModelTests: XCTestCase {
887887
)
888888

889889
var responses = 0
890-
let stream = await model.generateContentStream("Hi")
890+
let stream = try await model.generateContentStream("Hi")
891891
for try await content in stream {
892892
XCTAssertNotNil(content.text)
893893
responses += 1
@@ -904,7 +904,7 @@ final class GenerativeModelTests: XCTestCase {
904904
)
905905

906906
var responses = 0
907-
let stream = await model.generateContentStream("Hi")
907+
let stream = try await model.generateContentStream("Hi")
908908
for try await content in stream {
909909
XCTAssertNotNil(content.text)
910910
responses += 1
@@ -921,7 +921,7 @@ final class GenerativeModelTests: XCTestCase {
921921
)
922922

923923
var hadUnknown = false
924-
let stream = await model.generateContentStream("Hi")
924+
let stream = try await model.generateContentStream("Hi")
925925
for try await content in stream {
926926
XCTAssertNotNil(content.text)
927927
if let ratings = content.candidates.first?.safetyRatings,
@@ -940,7 +940,7 @@ final class GenerativeModelTests: XCTestCase {
940940
withExtension: "txt"
941941
)
942942

943-
let stream = await model.generateContentStream("Hi")
943+
let stream = try await model.generateContentStream("Hi")
944944
var citations = [Citation]()
945945
var responses = [GenerateContentResponse]()
946946
for try await content in stream {
@@ -996,7 +996,7 @@ final class GenerativeModelTests: XCTestCase {
996996
appCheckToken: appCheckToken
997997
)
998998

999-
let stream = await model.generateContentStream(testPrompt)
999+
let stream = try await model.generateContentStream(testPrompt)
10001000
for try await _ in stream {}
10011001
}
10021002

@@ -1018,7 +1018,7 @@ final class GenerativeModelTests: XCTestCase {
10181018
appCheckToken: AppCheckInteropFake.placeholderTokenValue
10191019
)
10201020

1021-
let stream = await model.generateContentStream(testPrompt)
1021+
let stream = try await model.generateContentStream(testPrompt)
10221022
for try await _ in stream {}
10231023
}
10241024

@@ -1030,7 +1030,7 @@ final class GenerativeModelTests: XCTestCase {
10301030
)
10311031
var responses = [GenerateContentResponse]()
10321032

1033-
let stream = await model.generateContentStream(testPrompt)
1033+
let stream = try await model.generateContentStream(testPrompt)
10341034
for try await response in stream {
10351035
responses.append(response)
10361036
}
@@ -1056,7 +1056,7 @@ final class GenerativeModelTests: XCTestCase {
10561056

10571057
var responseCount = 0
10581058
do {
1059-
let stream = await model.generateContentStream("Hi")
1059+
let stream = try await model.generateContentStream("Hi")
10601060
for try await content in stream {
10611061
XCTAssertNotNil(content.text)
10621062
responseCount += 1
@@ -1076,7 +1076,7 @@ final class GenerativeModelTests: XCTestCase {
10761076
func testGenerateContentStream_nonHTTPResponse() async throws {
10771077
MockURLProtocol.requestHandler = try nonHTTPRequestHandler()
10781078

1079-
let stream = await model.generateContentStream("Hi")
1079+
let stream = try await model.generateContentStream("Hi")
10801080
do {
10811081
for try await content in stream {
10821082
XCTFail("Unexpected content in stream: \(content)")
@@ -1096,7 +1096,7 @@ final class GenerativeModelTests: XCTestCase {
10961096
withExtension: "txt"
10971097
)
10981098

1099-
let stream = await model.generateContentStream(testPrompt)
1099+
let stream = try await model.generateContentStream(testPrompt)
11001100
do {
11011101
for try await content in stream {
11021102
XCTFail("Unexpected content in stream: \(content)")
@@ -1120,7 +1120,7 @@ final class GenerativeModelTests: XCTestCase {
11201120
withExtension: "txt"
11211121
)
11221122

1123-
let stream = await model.generateContentStream(testPrompt)
1123+
let stream = try await model.generateContentStream(testPrompt)
11241124
do {
11251125
for try await content in stream {
11261126
XCTFail("Unexpected content in stream: \(content)")
@@ -1159,7 +1159,7 @@ final class GenerativeModelTests: XCTestCase {
11591159
)
11601160

11611161
var responses = 0
1162-
let stream = await model.generateContentStream(testPrompt)
1162+
let stream = try await model.generateContentStream(testPrompt)
11631163
for try await content in stream {
11641164
XCTAssertNotNil(content.text)
11651165
responses += 1

0 commit comments

Comments
 (0)