@@ -29,7 +29,7 @@ import { RequestPayload } from "./openai";
2929import { fetch } from "@/app/utils/stream" ;
3030
3131export class GeminiProApi implements LLMApi {
32- path ( path : string ) : string {
32+ path ( path : string , shouldStream = false ) : string {
3333 const accessStore = useAccessStore . getState ( ) ;
3434
3535 let baseUrl = "" ;
@@ -51,15 +51,18 @@ export class GeminiProApi implements LLMApi {
5151 console . log ( "[Proxy Endpoint] " , baseUrl , path ) ;
5252
5353 let chatPath = [ baseUrl , path ] . join ( "/" ) ;
54+ if ( shouldStream ) {
55+ chatPath += chatPath . includes ( "?" ) ? "&alt=sse" : "?alt=sse" ;
56+ }
5457
55- chatPath += chatPath . includes ( "?" ) ? "&alt=sse" : "?alt=sse" ;
5658 return chatPath ;
5759 }
5860 extractMessage ( res : any ) {
5961 console . log ( "[Response] gemini-pro response: " , res ) ;
6062
6163 return (
6264 res ?. candidates ?. at ( 0 ) ?. content ?. parts . at ( 0 ) ?. text ||
65+ res ?. at ( 0 ) ?. candidates ?. at ( 0 ) ?. content ?. parts . at ( 0 ) ?. text ||
6366 res ?. error ?. message ||
6467 ""
6568 ) ;
@@ -166,7 +169,10 @@ export class GeminiProApi implements LLMApi {
166169 options . onController ?.( controller ) ;
167170 try {
168171 // https://github.com/google-gemini/cookbook/blob/main/quickstarts/rest/Streaming_REST.ipynb
169- const chatPath = this . path ( Google . ChatPath ( modelConfig . model ) ) ;
172+ const chatPath = this . path (
173+ Google . ChatPath ( modelConfig . model ) ,
174+ shouldStream ,
175+ ) ;
170176
171177 const chatPayload = {
172178 method : "POST" ,
0 commit comments