@@ -176,6 +176,8 @@ export class ZgsmAiHandler extends BaseProvider implements SingleCompletionHandl
176176 } )
177177 }
178178 let stream
179+ let selectedLlm : string | undefined
180+ let selectReason : string | undefined
179181 try {
180182 this . logger . info ( `[RequestID]:` , requestId )
181183 const { data, response } = await this . client . chat . completions
@@ -189,20 +191,31 @@ export class ZgsmAiHandler extends BaseProvider implements SingleCompletionHandl
189191 . withResponse ( )
190192 this . logger . info ( `[ResponseID]:` , response . headers . get ( "x-request-id" ) )
191193
192- stream = data
193194 if ( this . options . zgsmModelId === autoModeModelId ) {
195+ selectedLlm = response . headers . get ( "x-select-llm" ) || ""
196+ selectReason = response . headers . get ( "x-select-reason" ) || ""
197+
198+ if ( selectedLlm ) {
199+ this . logger . info ( `[Selected LLM]:` , selectedLlm )
200+ }
201+ if ( selectReason ) {
202+ this . logger . info ( `[Select Reason]:` , selectReason )
203+ }
204+
194205 const userInputHeader = response . headers . get ( "x-user-input" )
195206 if ( userInputHeader ) {
196207 const decodedUserInput = Buffer . from ( userInputHeader , "base64" ) . toString ( "utf-8" )
197208 this . logger . info ( `[x-user-input]: ${ decodedUserInput } ` )
198209 }
199210 }
211+
212+ stream = data
200213 } catch ( error ) {
201214 throw handleOpenAIError ( error , this . providerName )
202215 }
203216
204217 // 6. Optimize stream processing - use batch processing and buffer
205- yield * this . handleOptimizedStream ( stream , modelInfo )
218+ yield * this . handleOptimizedStream ( stream , modelInfo , selectedLlm , selectReason )
206219 } else {
207220 // Non-streaming processing
208221 const requestOptions = this . buildNonStreamingRequestOptions (
@@ -406,6 +419,8 @@ export class ZgsmAiHandler extends BaseProvider implements SingleCompletionHandl
406419 private async * handleOptimizedStream (
407420 stream : AsyncIterable < OpenAI . Chat . Completions . ChatCompletionChunk > ,
408421 modelInfo : ModelInfo ,
422+ selectedLlm ?: string ,
423+ selectReason ?: string ,
409424 ) : ApiStream {
410425 const matcher = new XmlMatcher (
411426 "think" ,
@@ -423,6 +438,14 @@ export class ZgsmAiHandler extends BaseProvider implements SingleCompletionHandl
423438 let time = Date . now ( )
424439 let isPrinted = false
425440
441+ // Yield selected LLM info if available (for Auto model mode)
442+ if ( selectedLlm && this . options . zgsmModelId === autoModeModelId ) {
443+ yield {
444+ type : "text" ,
445+ text : `[Selected LLM: ${ selectedLlm } ${ selectReason ? ` (${ selectReason } )` : "" } ]` ,
446+ }
447+ }
448+
426449 // chunk
427450 for await ( const chunk of stream ) {
428451 const delta = chunk . choices [ 0 ] ?. delta ?? { }
0 commit comments