@@ -530,7 +530,6 @@ func (m *openAIModel) generateStream(ctx context.Context, openaiReq *openAIReque
530530
531531 scanner := bufio .NewScanner (httpResp .Body )
532532 var textBuffer strings.Builder
533- var reasoningBuffer strings.Builder
534533 var toolCalls []toolCall
535534 var usage * usage
536535
@@ -560,24 +559,6 @@ func (m *openAIModel) generateStream(ctx context.Context, openaiReq *openAIReque
560559 continue
561560 }
562561
563- if delta .ReasoningContent != nil {
564- if text , ok := delta .ReasoningContent .(string ); ok && text != "" {
565- reasoningBuffer .WriteString (text )
566- llmResp := & model.LLMResponse {
567- Content : & genai.Content {
568- Role : "model" ,
569- Parts : []* genai.Part {
570- {Text : text , Thought : true },
571- },
572- },
573- Partial : true ,
574- }
575- if ! yield (llmResp , nil ) {
576- return
577- }
578- }
579- }
580-
581562 if delta .Content != nil {
582563 if text , ok := delta .Content .(string ); ok && text != "" {
583564 textBuffer .WriteString (text )
@@ -623,7 +604,7 @@ func (m *openAIModel) generateStream(ctx context.Context, openaiReq *openAIReque
623604 }
624605
625606 if choice .FinishReason != "" {
626- finalResp := m .buildFinalResponse (textBuffer .String (), reasoningBuffer . String (), toolCalls , usage , choice .FinishReason )
607+ finalResp := m .buildFinalResponse (textBuffer .String (), toolCalls , usage , choice .FinishReason )
627608 yield (finalResp , nil )
628609 return
629610 }
@@ -635,7 +616,7 @@ func (m *openAIModel) generateStream(ctx context.Context, openaiReq *openAIReque
635616 }
636617
637618 if textBuffer .Len () > 0 || len (toolCalls ) > 0 {
638- finalResp := m .buildFinalResponse (textBuffer .String (), reasoningBuffer . String (), toolCalls , usage , "stop" )
619+ finalResp := m .buildFinalResponse (textBuffer .String (), toolCalls , usage , "stop" )
639620 yield (finalResp , nil )
640621 }
641622 }
@@ -751,16 +732,9 @@ func (m *openAIModel) convertResponse(resp *response) (*model.LLMResponse, error
751732 return llmResp , nil
752733}
753734
754- func (m * openAIModel ) buildFinalResponse (text string , reasoningText string , toolCalls []toolCall , usage * usage , finishReason string ) * model.LLMResponse {
735+ func (m * openAIModel ) buildFinalResponse (text string , toolCalls []toolCall , usage * usage , finishReason string ) * model.LLMResponse {
755736 var parts []* genai.Part
756737
757- if reasoningText != "" {
758- parts = append (parts , & genai.Part {
759- Text : reasoningText ,
760- Thought : true ,
761- })
762- }
763-
764738 if text != "" {
765739 parts = append (parts , genai .NewPartFromText (text ))
766740 }
0 commit comments