Skip to content

Commit 6a5ec86

Browse files
authored
fix: Lost content during the answering process (#2256)
1 parent 2ba7a24 commit 6a5ec86

File tree

4 files changed

+53
-8
lines changed

4 files changed

+53
-8
lines changed

apps/application/chat_pipeline/step/chat_step/impl/base_chat_step.py

Lines changed: 21 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -76,10 +76,12 @@ def event_content(response,
7676
all_text = ''
7777
reasoning_content = ''
7878
try:
79+
response_reasoning_content = False
7980
for chunk in response:
8081
reasoning_chunk = reasoning.get_reasoning_content(chunk)
8182
content_chunk = reasoning_chunk.get('content')
8283
if 'reasoning_content' in chunk.additional_kwargs:
84+
response_reasoning_content = True
8385
reasoning_content_chunk = chunk.additional_kwargs.get('reasoning_content', '')
8486
else:
8587
reasoning_content_chunk = reasoning_chunk.get('reasoning_content')
@@ -95,6 +97,21 @@ def event_content(response,
9597
'node_type': 'ai-chat-node',
9698
'real_node_id': 'ai-chat-node',
9799
'reasoning_content': reasoning_content_chunk if reasoning_content_enable else ''})
100+
reasoning_chunk = reasoning.get_end_reasoning_content()
101+
all_text += reasoning_chunk.get('content')
102+
reasoning_content_chunk = ""
103+
if not response_reasoning_content:
104+
reasoning_content_chunk = reasoning_chunk.get(
105+
'reasoning_content')
106+
yield manage.get_base_to_response().to_stream_chunk_response(chat_id, str(chat_record_id), 'ai-chat-node',
107+
[], reasoning_chunk.get('content'),
108+
False,
109+
0, 0, {'node_is_end': False,
110+
'view_type': 'many_view',
111+
'node_type': 'ai-chat-node',
112+
'real_node_id': 'ai-chat-node',
113+
'reasoning_content'
114+
: reasoning_content_chunk if reasoning_content_enable else ''})
98115
# 获取token
99116
if is_ai_chat:
100117
try:
@@ -276,11 +293,13 @@ def execute_block(self, message_list: List[BaseMessage],
276293
response_token = 0
277294
write_context(self, manage, request_token, response_token, chat_result.content)
278295
reasoning_result = reasoning.get_reasoning_content(chat_result)
279-
content = reasoning_result.get('content')
296+
reasoning_result_end = reasoning.get_end_reasoning_content()
297+
content = reasoning_result.get('content') + reasoning_result_end.get('content')
280298
if 'reasoning_content' in chat_result.response_metadata:
281299
reasoning_content = chat_result.response_metadata.get('reasoning_content', '')
282300
else:
283-
reasoning_content = reasoning_result.get('reasoning_content')
301+
reasoning_content = reasoning_result.get('reasoning_content') + reasoning_result_end.get(
302+
'reasoning_content')
284303
post_response_handler.handler(chat_id, chat_record_id, paragraph_list, problem_text,
285304
chat_result.content, manage, self, padding_problem_text, client_id,
286305
reasoning_content=reasoning_content if reasoning_content_enable else '')

apps/application/flow/step_node/ai_chat_step_node/impl/base_chat_node.py

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,10 +55,12 @@ def write_context_stream(node_variable: Dict, workflow_variable: Dict, node: INo
5555
'reasoning_content_start': '<think>'})
5656
reasoning = Reasoning(model_setting.get('reasoning_content_start', '<think>'),
5757
model_setting.get('reasoning_content_end', '</think>'))
58+
response_reasoning_content = False
5859
for chunk in response:
5960
reasoning_chunk = reasoning.get_reasoning_content(chunk)
6061
content_chunk = reasoning_chunk.get('content')
6162
if 'reasoning_content' in chunk.additional_kwargs:
63+
response_reasoning_content = True
6264
reasoning_content_chunk = chunk.additional_kwargs.get('reasoning_content', '')
6365
else:
6466
reasoning_content_chunk = reasoning_chunk.get('reasoning_content')
@@ -69,6 +71,16 @@ def write_context_stream(node_variable: Dict, workflow_variable: Dict, node: INo
6971
yield {'content': content_chunk,
7072
'reasoning_content': reasoning_content_chunk if model_setting.get('reasoning_content_enable',
7173
False) else ''}
74+
75+
reasoning_chunk = reasoning.get_end_reasoning_content()
76+
answer += reasoning_chunk.get('content')
77+
reasoning_content_chunk = ""
78+
if not response_reasoning_content:
79+
reasoning_content_chunk = reasoning_chunk.get(
80+
'reasoning_content')
81+
yield {'content': reasoning_chunk.get('content'),
82+
'reasoning_content': reasoning_content_chunk if model_setting.get('reasoning_content_enable',
83+
False) else ''}
7284
_write_context(node_variable, workflow_variable, node, workflow, answer, reasoning_content)
7385

7486

@@ -86,11 +98,12 @@ def write_context(node_variable: Dict, workflow_variable: Dict, node: INode, wor
8698
'reasoning_content_start': '<think>'})
8799
reasoning = Reasoning(model_setting.get('reasoning_content_start'), model_setting.get('reasoning_content_end'))
88100
reasoning_result = reasoning.get_reasoning_content(response)
89-
content = reasoning_result.get('content')
101+
reasoning_result_end = reasoning.get_end_reasoning_content()
102+
content = reasoning_result.get('content') + reasoning_result_end.get('content')
90103
if 'reasoning_content' in response.response_metadata:
91104
reasoning_content = response.response_metadata.get('reasoning_content', '')
92105
else:
93-
reasoning_content = reasoning_result.get('reasoning_content')
106+
reasoning_content = reasoning_result.get('reasoning_content') + reasoning_result_end.get('reasoning_content')
94107
_write_context(node_variable, workflow_variable, node, workflow, content, reasoning_content)
95108

96109

apps/application/flow/tools.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,17 @@ def __init__(self, reasoning_content_start, reasoning_content_end):
3232
self.reasoning_content_is_end = False
3333
self.reasoning_content_chunk = ""
3434

35+
def get_end_reasoning_content(self):
36+
if not self.reasoning_content_is_start and not self.reasoning_content_is_end:
37+
r = {'content': self.all_content, 'reasoning_content': ''}
38+
self.reasoning_content_chunk = ""
39+
return r
40+
if self.reasoning_content_is_start and not self.reasoning_content_is_end:
41+
r = {'content': '', 'reasoning_content': self.reasoning_content_chunk}
42+
self.reasoning_content_chunk = ""
43+
return r
44+
return {'content': '', 'reasoning_content': ''}
45+
3546
def get_reasoning_content(self, chunk):
3647
# 如果没有开始思考过程标签那么就全是结果
3748
if self.reasoning_content_start_tag is None or len(self.reasoning_content_start_tag) == 0:
@@ -60,8 +71,7 @@ def get_reasoning_content(self, chunk):
6071
return {'content': chunk.content, 'reasoning_content': ''}
6172
# 是否包含结束
6273
if reasoning_content_end_tag_prefix_index > -1:
63-
if len(
64-
self.reasoning_content_chunk) - reasoning_content_end_tag_prefix_index > self.reasoning_content_end_tag_len:
74+
if len(self.reasoning_content_chunk) - reasoning_content_end_tag_prefix_index >= self.reasoning_content_end_tag_len:
6575
reasoning_content_end_tag_index = self.reasoning_content_chunk.find(self.reasoning_content_end_tag)
6676
if reasoning_content_end_tag_index > -1:
6777
reasoning_content_chunk = self.reasoning_content_chunk[0:reasoning_content_end_tag_index]

ui/src/api/type/application.ts

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -158,12 +158,15 @@ export class ChatRecordManage {
158158
get_run_node() {
159159
if (
160160
this.write_node_info &&
161-
(this.write_node_info.current_node.buffer.length > 0 ||
161+
(this.write_node_info.current_node.reasoning_content_buffer.length > 0 ||
162+
this.write_node_info.current_node.buffer.length > 0 ||
162163
!this.write_node_info.current_node.is_end)
163164
) {
164165
return this.write_node_info
165166
}
166-
const run_node = this.node_list.filter((item) => item.buffer.length > 0 || !item.is_end)[0]
167+
const run_node = this.node_list.filter(
168+
(item) => item.reasoning_content_buffer.length > 0 || item.buffer.length > 0 || !item.is_end
169+
)[0]
167170

168171
if (run_node) {
169172
const index = this.node_list.indexOf(run_node)

0 commit comments

Comments
 (0)