19
19
using System . Collections . ObjectModel ;
20
20
using System . Linq ;
21
21
using System . Threading . Tasks ;
22
+ using Firebase . VertexAI . Internal ;
22
23
23
24
namespace Firebase . VertexAI {
24
25
@@ -123,26 +124,10 @@ public IAsyncEnumerable<GenerateContentResponse> SendMessageStreamAsync(
123
124
return SendMessageStreamAsyncInternal ( content ) ;
124
125
}
125
126
126
- private ModelContent GuaranteeRole ( ModelContent content , string role ) {
127
- if ( content . Role == role ) {
128
- return content ;
129
- } else {
130
- return new ModelContent ( role , content . Parts ) ;
131
- }
132
- }
133
-
134
- private ModelContent GuaranteeUser ( ModelContent content ) {
135
- return GuaranteeRole ( content , "user" ) ;
136
- }
137
-
138
- private ModelContent GuaranteeModel ( ModelContent content ) {
139
- return GuaranteeRole ( content , "model" ) ;
140
- }
141
-
142
127
private async Task < GenerateContentResponse > SendMessageAsyncInternal (
143
128
IEnumerable < ModelContent > requestContent ) {
144
129
// Make sure that the requests are set to to role "user".
145
- List < ModelContent > fixedRequests = requestContent . Select ( GuaranteeUser ) . ToList ( ) ;
130
+ List < ModelContent > fixedRequests = requestContent . Select ( VertexAIExtensions . ConvertToUser ) . ToList ( ) ;
146
131
// Set up the context to send in the request
147
132
List < ModelContent > fullRequest = new ( chatHistory ) ;
148
133
fullRequest . AddRange ( fixedRequests ) ;
@@ -157,7 +142,7 @@ private async Task<GenerateContentResponse> SendMessageAsyncInternal(
157
142
ModelContent responseContent = response . Candidates . First ( ) . Content ;
158
143
159
144
chatHistory . AddRange ( fixedRequests ) ;
160
- chatHistory . Add ( GuaranteeModel ( responseContent ) ) ;
145
+ chatHistory . Add ( responseContent . ConvertToModel ( ) ) ;
161
146
}
162
147
163
148
return response ;
@@ -166,7 +151,7 @@ private async Task<GenerateContentResponse> SendMessageAsyncInternal(
166
151
private async IAsyncEnumerable < GenerateContentResponse > SendMessageStreamAsyncInternal (
167
152
IEnumerable < ModelContent > requestContent ) {
168
153
// Make sure that the requests are set to to role "user".
169
- List < ModelContent > fixedRequests = requestContent . Select ( GuaranteeUser ) . ToList ( ) ;
154
+ List < ModelContent > fixedRequests = requestContent . Select ( VertexAIExtensions . ConvertToUser ) . ToList ( ) ;
170
155
// Set up the context to send in the request
171
156
List < ModelContent > fullRequest = new ( chatHistory ) ;
172
157
fullRequest . AddRange ( fixedRequests ) ;
@@ -181,7 +166,7 @@ private async IAsyncEnumerable<GenerateContentResponse> SendMessageStreamAsyncIn
181
166
// but we don't want to save the history anymore.
182
167
if ( response . Candidates . Any ( ) ) {
183
168
ModelContent responseContent = response . Candidates . First ( ) . Content ;
184
- responseContents . Add ( GuaranteeModel ( responseContent ) ) ;
169
+ responseContents . Add ( responseContent . ConvertToModel ( ) ) ;
185
170
} else {
186
171
saveHistory = false ;
187
172
}
0 commit comments