Skip to content

Commit 824bb94

Browse files
committed
[fel] adapter llm chat reasoning_content
1 parent 3e1f251 commit 824bb94

File tree

4 files changed

+103
-9
lines changed

4 files changed

+103
-9
lines changed

framework/fel/java/fel-community/model-openai/src/main/java/modelengine/fel/community/model/openai/OpenAiModel.java

Lines changed: 34 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,11 +19,13 @@
1919
import modelengine.fel.community.model.openai.entity.embed.OpenAiEmbeddingResponse;
2020
import modelengine.fel.community.model.openai.entity.image.OpenAiImageRequest;
2121
import modelengine.fel.community.model.openai.entity.image.OpenAiImageResponse;
22+
import modelengine.fel.community.model.openai.enums.ModelProcessingState;
2223
import modelengine.fel.community.model.openai.util.HttpUtils;
2324
import modelengine.fel.core.chat.ChatMessage;
2425
import modelengine.fel.core.chat.ChatModel;
2526
import modelengine.fel.core.chat.ChatOption;
2627
import modelengine.fel.core.chat.Prompt;
28+
import modelengine.fel.core.chat.support.AiMessage;
2729
import modelengine.fel.core.embed.EmbedModel;
2830
import modelengine.fel.core.embed.EmbedOption;
2931
import modelengine.fel.core.embed.Embedding;
@@ -59,6 +61,7 @@
5961
import java.util.List;
6062
import java.util.Map;
6163
import java.util.concurrent.ConcurrentHashMap;
64+
import java.util.concurrent.atomic.AtomicReference;
6265
import java.util.stream.Collectors;
6366

6467
/**
@@ -78,6 +81,7 @@ public class OpenAiModel implements EmbedModel, ChatModel, ImageModel {
7881
.put("client.http.secure.key-store-file", Boolean.FALSE)
7982
.put("client.http.secure.key-store-password", Boolean.TRUE)
8083
.build();
84+
private static final String RESPONSE_TEMPLATE = "<think>{0}<//think>{1}";
8185

8286
private final HttpClassicClientFactory httpClientFactory;
8387
private final HttpClassicClientFactory.Config clientConfig;
@@ -167,11 +171,33 @@ public List<Media> generate(String prompt, ImageOption option) {
167171
}
168172

169173
private Choir<ChatMessage> createChatStream(HttpClassicClientRequest request) {
174+
AtomicReference<ModelProcessingState> modelProcessingState =
175+
new AtomicReference<>(ModelProcessingState.INITIAL);
170176
return request.<String>exchangeStream(String.class)
171177
.filter(str -> !StringUtils.equals(str, "[DONE]"))
172178
.map(str -> this.serializer.<OpenAiChatCompletionResponse>deserialize(str,
173179
OpenAiChatCompletionResponse.class))
174-
.map(OpenAiChatCompletionResponse::message);
180+
.map(response -> getChatMessage(response, modelProcessingState));
181+
}
182+
183+
private ChatMessage getChatMessage(OpenAiChatCompletionResponse response,
184+
AtomicReference<ModelProcessingState> state) {
185+
// 适配reasoning_content格式返回的模型推理内容,模型生成内容顺序为先reasoning_content后content
186+
// 在第一个reasoning_content chunk之前增加<think>标签,并且在第一个content chunk之前增加</think>标签
187+
if (state.get() == ModelProcessingState.INITIAL && StringUtils.isNotEmpty(response.reasoningContent().text())) {
188+
String text = "<think>" + response.reasoningContent().text();
189+
state.set(ModelProcessingState.THINKING);
190+
return new AiMessage(text, response.message().toolCalls());
191+
}
192+
if (state.get() == ModelProcessingState.THINKING && StringUtils.isNotEmpty(response.message().text())) {
193+
String text = "</think>" + response.message().text();
194+
state.set(ModelProcessingState.RESPONDING);
195+
return new AiMessage(text, response.message().toolCalls());
196+
}
197+
if (state.get() == ModelProcessingState.THINKING) {
198+
return new AiMessage(response.reasoningContent().text(), response.message().toolCalls());
199+
}
200+
return response.message();
175201
}
176202

177203
private Choir<ChatMessage> createChatCompletion(HttpClassicClientRequest request) {
@@ -180,7 +206,13 @@ private Choir<ChatMessage> createChatCompletion(HttpClassicClientRequest request
180206
OpenAiChatCompletionResponse chatCompletionResponse = response.objectEntity()
181207
.map(ObjectEntity::object)
182208
.orElseThrow(() -> new FitException("The response body is abnormal."));
183-
return Choir.just(chatCompletionResponse.message());
209+
String finalMessage = chatCompletionResponse.message().text();
210+
if (StringUtils.isNotBlank(chatCompletionResponse.reasoningContent().text())) {
211+
finalMessage = StringUtils.format(RESPONSE_TEMPLATE,
212+
chatCompletionResponse.reasoningContent().text(),
213+
finalMessage);
214+
}
215+
return Choir.just(new AiMessage(finalMessage, chatCompletionResponse.message().toolCalls()));
184216
} catch (IOException e) {
185217
throw new FitException(e);
186218
}

framework/fel/java/fel-community/model-openai/src/main/java/modelengine/fel/community/model/openai/entity/chat/OpenAiChatCompletionResponse.java

Lines changed: 28 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,6 @@
66

77
package modelengine.fel.community.model.openai.entity.chat;
88

9-
import static modelengine.fitframework.util.ObjectUtils.cast;
10-
119
import modelengine.fel.core.chat.ChatMessage;
1210
import modelengine.fel.core.chat.support.AiMessage;
1311
import modelengine.fel.core.tool.ToolCall;
@@ -16,7 +14,10 @@
1614
import modelengine.fitframework.util.CollectionUtils;
1715
import modelengine.fitframework.util.StringUtils;
1816

17+
import java.util.Collections;
1918
import java.util.List;
19+
import java.util.Optional;
20+
import java.util.function.Function;
2021

2122
/**
2223
* OpenAi API 格式的会话补全响应。
@@ -36,18 +37,38 @@ public class OpenAiChatCompletionResponse {
3637
* @return 表示模型回复的 {@link ChatMessage}。
3738
*/
3839
public ChatMessage message() {
40+
return extractMessage(OpenAiChatMessage::content, OpenAiChatMessage::toolCalls);
41+
}
42+
43+
/**
44+
* 获取响应中的模型推理。
45+
*
46+
* @return 表示模型回复的 {@link ChatMessage}。
47+
*/
48+
public ChatMessage reasoningContent() {
49+
// 目前认为生成reasoning content不会生成tool call
50+
return extractMessage(OpenAiChatMessage::reasoningContent, m -> null);
51+
}
52+
53+
private ChatMessage extractMessage(
54+
Function<OpenAiChatMessage, Object> contentExtractor,
55+
Function<OpenAiChatMessage, List<ToolCall>> toolCallsExtractor) {
3956
if (CollectionUtils.isEmpty(choices)) {
4057
return EMPTY_RESPONSE;
4158
}
4259
OpenAiChatMessage openAiChatMessage = choices.get(0).message;
4360
if (openAiChatMessage == null) {
4461
return EMPTY_RESPONSE;
4562
}
46-
String content = StringUtils.EMPTY;
47-
if (openAiChatMessage.content() instanceof String) {
48-
content = cast(openAiChatMessage.content());
49-
}
50-
List<ToolCall> toolCalls = CollectionUtils.asParent(openAiChatMessage.toolCalls());
63+
64+
String content = Optional.ofNullable(contentExtractor.apply(openAiChatMessage))
65+
.filter(obj -> obj instanceof String)
66+
.map(obj -> (String) obj)
67+
.orElse(StringUtils.EMPTY);
68+
69+
List<ToolCall> toolCalls = Optional.ofNullable(toolCallsExtractor.apply(openAiChatMessage))
70+
.orElse(Collections.emptyList());
71+
5172
return new AiMessage(content, toolCalls);
5273
}
5374

framework/fel/java/fel-community/model-openai/src/main/java/modelengine/fel/community/model/openai/entity/chat/OpenAiChatMessage.java

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,8 @@ public class OpenAiChatMessage {
4040
private String toolCallId;
4141
@Property(name = "tool_calls")
4242
private List<OpenAiToolCall> toolCalls;
43+
@Property(name = "reasoning_content")
44+
private String reasoningContent;
4345

4446
/**
4547
* 将 {@link ChatMessage} 对象转换为 {@link OpenAiChatMessage} 对象。
@@ -79,6 +81,15 @@ public Object content() {
7981
return this.content;
8082
}
8183

84+
/**
85+
* 获取模型推理内容。
86+
*
87+
* @return 表示推理内容的 {@link String}。
88+
*/
89+
public String reasoningContent() {
90+
return this.reasoningContent;
91+
}
92+
8293
/**
8394
* 获取消息的工具调用。
8495
*
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
/*---------------------------------------------------------------------------------------------
2+
* Copyright (c) 2025 Huawei Technologies Co., Ltd. All rights reserved.
3+
* This file is a part of the ModelEngine Project.
4+
* Licensed under the MIT License. See License.txt in the project root for license information.
5+
*--------------------------------------------------------------------------------------------*/
6+
7+
package modelengine.fel.community.model.openai.enums;
8+
9+
/**
10+
* 模型内容生成状态枚举类。
11+
*
12+
* @author 孙怡菲
13+
* @since 2025-04-29
14+
*/
15+
public enum ModelProcessingState {
16+
/**
17+
* 表示初始状态。
18+
*/
19+
INITIAL,
20+
21+
/**
22+
* 表示内部推理状态。
23+
*/
24+
THINKING,
25+
26+
/**
27+
* 表示结果生成状态。
28+
*/
29+
RESPONDING
30+
}

0 commit comments

Comments
 (0)