1919import modelengine .fel .community .model .openai .entity .embed .OpenAiEmbeddingResponse ;
2020import modelengine .fel .community .model .openai .entity .image .OpenAiImageRequest ;
2121import modelengine .fel .community .model .openai .entity .image .OpenAiImageResponse ;
22+ import modelengine .fel .community .model .openai .enums .ModelProcessingState ;
2223import modelengine .fel .community .model .openai .util .HttpUtils ;
2324import modelengine .fel .core .chat .ChatMessage ;
2425import modelengine .fel .core .chat .ChatModel ;
2526import modelengine .fel .core .chat .ChatOption ;
2627import modelengine .fel .core .chat .Prompt ;
28+ import modelengine .fel .core .chat .support .AiMessage ;
2729import modelengine .fel .core .embed .EmbedModel ;
2830import modelengine .fel .core .embed .EmbedOption ;
2931import modelengine .fel .core .embed .Embedding ;
5961import java .util .List ;
6062import java .util .Map ;
6163import java .util .concurrent .ConcurrentHashMap ;
64+ import java .util .concurrent .atomic .AtomicReference ;
6265import java .util .stream .Collectors ;
6366
6467/**
@@ -78,6 +81,7 @@ public class OpenAiModel implements EmbedModel, ChatModel, ImageModel {
7881 .put ("client.http.secure.key-store-file" , Boolean .FALSE )
7982 .put ("client.http.secure.key-store-password" , Boolean .TRUE )
8083 .build ();
84+ private static final String RESPONSE_TEMPLATE = "<think>{0}<//think>{1}" ;
8185
8286 private final HttpClassicClientFactory httpClientFactory ;
8387 private final HttpClassicClientFactory .Config clientConfig ;
@@ -167,11 +171,33 @@ public List<Media> generate(String prompt, ImageOption option) {
167171 }
168172
169173 private Choir <ChatMessage > createChatStream (HttpClassicClientRequest request ) {
174+ AtomicReference <ModelProcessingState > modelProcessingState =
175+ new AtomicReference <>(ModelProcessingState .INITIAL );
170176 return request .<String >exchangeStream (String .class )
171177 .filter (str -> !StringUtils .equals (str , "[DONE]" ))
172178 .map (str -> this .serializer .<OpenAiChatCompletionResponse >deserialize (str ,
173179 OpenAiChatCompletionResponse .class ))
174- .map (OpenAiChatCompletionResponse ::message );
180+ .map (response -> getChatMessage (response , modelProcessingState ));
181+ }
182+
183+ private ChatMessage getChatMessage (OpenAiChatCompletionResponse response ,
184+ AtomicReference <ModelProcessingState > state ) {
185+ // 适配reasoning_content格式返回的模型推理内容,模型生成内容顺序为先reasoning_content后content
186+ // 在第一个reasoning_content chunk之前增加<think>标签,并且在第一个content chunk之前增加</think>标签
187+ if (state .get () == ModelProcessingState .INITIAL && StringUtils .isNotEmpty (response .reasoningContent ().text ())) {
188+ String text = "<think>" + response .reasoningContent ().text ();
189+ state .set (ModelProcessingState .THINKING );
190+ return new AiMessage (text , response .message ().toolCalls ());
191+ }
192+ if (state .get () == ModelProcessingState .THINKING && StringUtils .isNotEmpty (response .message ().text ())) {
193+ String text = "</think>" + response .message ().text ();
194+ state .set (ModelProcessingState .RESPONDING );
195+ return new AiMessage (text , response .message ().toolCalls ());
196+ }
197+ if (state .get () == ModelProcessingState .THINKING ) {
198+ return new AiMessage (response .reasoningContent ().text (), response .message ().toolCalls ());
199+ }
200+ return response .message ();
175201 }
176202
177203 private Choir <ChatMessage > createChatCompletion (HttpClassicClientRequest request ) {
@@ -180,7 +206,13 @@ private Choir<ChatMessage> createChatCompletion(HttpClassicClientRequest request
180206 OpenAiChatCompletionResponse chatCompletionResponse = response .objectEntity ()
181207 .map (ObjectEntity ::object )
182208 .orElseThrow (() -> new FitException ("The response body is abnormal." ));
183- return Choir .just (chatCompletionResponse .message ());
209+ String finalMessage = chatCompletionResponse .message ().text ();
210+ if (StringUtils .isNotBlank (chatCompletionResponse .reasoningContent ().text ())) {
211+ finalMessage = StringUtils .format (RESPONSE_TEMPLATE ,
212+ chatCompletionResponse .reasoningContent ().text (),
213+ finalMessage );
214+ }
215+ return Choir .just (new AiMessage (finalMessage , chatCompletionResponse .message ().toolCalls ()));
184216 } catch (IOException e ) {
185217 throw new FitException (e );
186218 }
0 commit comments