@@ -21,9 +21,10 @@ import {
2121 SpeechOptions ,
2222} from "../api" ;
2323import { getClientConfig } from "@/app/config/client" ;
24- import { getMessageTextContent } from "@/app/utils" ;
24+ import { getMessageTextContent , isVisionModel } from "@/app/utils" ;
2525import { RequestPayload } from "./openai" ;
2626import { fetch } from "@/app/utils/stream" ;
27+ import { preProcessImageContent } from "@/app/utils/chat" ;
2728
2829interface BasePayload {
2930 model : string ;
@@ -154,9 +155,12 @@ export class ChatGLMApi implements LLMApi {
154155 }
155156
156157 async chat ( options : ChatOptions ) {
158+ const visionModel = isVisionModel ( options . config . model ) ;
157159 const messages : ChatOptions [ "messages" ] = [ ] ;
158160 for ( const v of options . messages ) {
159- const content = getMessageTextContent ( v ) ;
161+ const content = visionModel
162+ ? await preProcessImageContent ( v . content )
163+ : getMessageTextContent ( v ) ;
160164 messages . push ( { role : v . role , content } ) ;
161165 }
162166
@@ -168,7 +172,6 @@ export class ChatGLMApi implements LLMApi {
168172 providerName : options . config . providerName ,
169173 } ,
170174 } ;
171-
172175 const modelType = this . getModelType ( modelConfig . model ) ;
173176 const requestPayload = this . createPayload ( messages , modelConfig , options ) ;
174177 const path = this . path ( this . getModelPath ( modelType ) ) ;
0 commit comments