@@ -888,20 +888,18 @@ async function* streamGenerateContent(apiParam, model, params, requestOptions) {
888888 yield responseJson ;
889889 }
890890}
891- async function * embedContent ( apiParam , model , params , requestOptions ) {
891+ async function embedContent ( apiParam , model , params , requestOptions ) {
892892 const response = await makeRequest (
893- toURL ( { model, task : "embedContent" , stream : true , apiParam } ) ,
893+ toURL ( { model, task : "embedContent" , stream : false , apiParam } ) ,
894894 JSON . stringify ( params ) ,
895895 requestOptions
896896 ) ;
897897 const body = response . body ;
898898 if ( body == null ) {
899899 return ;
900900 }
901- for await ( const event of body . pipeThrough ( new TextDecoderStream ( ) ) . pipeThrough ( new EventSourceParserStream ( ) ) ) {
902- const responseJson = JSON . parse ( event . data ) ;
903- yield responseJson ;
904- }
901+ const responseJson = await response . json ( ) ;
902+ return responseJson ;
905903}
906904async function makeRequest ( url , body , requestOptions ) {
907905 let response ;
@@ -1214,11 +1212,9 @@ async function embeddingProxyHandler(rawReq) {
12141212 log ?. warn ( "request" , embedContentRequest ) ;
12151213 let geminiResp = [ ] ;
12161214 try {
1217- for await ( const it of embedContent ( apiParam , new GeminiModel ( "text-embedding-004" ) , embedContentRequest ) ) {
1218- const data = it . embedding ?. values ;
1219- geminiResp = data ;
1220- break ;
1221- }
1215+ const it = await embedContent ( apiParam , new GeminiModel ( "text-embedding-004" ) , embedContentRequest ) ;
1216+ const data = it ?. embedding ?. values ;
1217+ geminiResp = data ;
12221218 } catch ( err ) {
12231219 log ?. error ( req ) ;
12241220 log ?. error ( err ?. message ?? err . toString ( ) ) ;
0 commit comments