22import json
33import time
44import uuid
5- from typing import List , Dict
5+ from typing import Dict
6+
67from application .flow .i_step_node import NodeResult , INode
78from application .flow .step_node .application_node .i_application_node import IApplicationNode
89from application .models import Chat
9- from common .handle .impl .response .openai_to_response import OpenaiToResponse
1010
1111
1212def string_to_uuid (input_str ):
1313 return str (uuid .uuid5 (uuid .NAMESPACE_DNS , input_str ))
1414
1515
16+ def _is_interrupt_exec (node , node_variable : Dict , workflow_variable : Dict ):
17+ return node_variable .get ('is_interrupt_exec' , False )
18+
19+
1620def _write_context (node_variable : Dict , workflow_variable : Dict , node : INode , workflow , answer : str ):
1721 result = node_variable .get ('result' )
22+ node .context ['child_node' ] = node_variable ['child_node' ]
23+ node .context ['is_interrupt_exec' ] = node_variable ['is_interrupt_exec' ]
1824 node .context ['message_tokens' ] = result .get ('usage' , {}).get ('prompt_tokens' , 0 )
1925 node .context ['answer_tokens' ] = result .get ('usage' , {}).get ('completion_tokens' , 0 )
2026 node .context ['answer' ] = answer
@@ -36,17 +42,34 @@ def write_context_stream(node_variable: Dict, workflow_variable: Dict, node: INo
3642 response = node_variable .get ('result' )
3743 answer = ''
3844 usage = {}
45+ node_child_node = {}
46+ is_interrupt_exec = False
3947 for chunk in response :
4048 # 先把流转成字符串
4149 response_content = chunk .decode ('utf-8' )[6 :]
4250 response_content = json .loads (response_content )
43- choices = response_content .get ('choices' )
44- if choices and isinstance (choices , list ) and len (choices ) > 0 :
45- content = choices [0 ].get ('delta' , {}).get ('content' , '' )
46- answer += content
47- yield content
51+ content = response_content .get ('content' , '' )
52+ runtime_node_id = response_content .get ('runtime_node_id' , '' )
53+ chat_record_id = response_content .get ('chat_record_id' , '' )
54+ child_node = response_content .get ('child_node' )
55+ node_type = response_content .get ('node_type' )
56+ real_node_id = response_content .get ('real_node_id' )
57+ node_is_end = response_content .get ('node_is_end' , False )
58+ if node_type == 'form-node' :
59+ is_interrupt_exec = True
60+ answer += content
61+ node_child_node = {'runtime_node_id' : runtime_node_id , 'chat_record_id' : chat_record_id ,
62+ 'child_node' : child_node }
63+ yield {'content' : content ,
64+ 'node_type' : node_type ,
65+ 'runtime_node_id' : runtime_node_id , 'chat_record_id' : chat_record_id ,
66+ 'child_node' : child_node ,
67+ 'real_node_id' : real_node_id ,
68+ 'node_is_end' : node_is_end }
4869 usage = response_content .get ('usage' , {})
4970 node_variable ['result' ] = {'usage' : usage }
71+ node_variable ['is_interrupt_exec' ] = is_interrupt_exec
72+ node_variable ['child_node' ] = node_child_node
5073 _write_context (node_variable , workflow_variable , node , workflow , answer )
5174
5275
@@ -64,6 +87,11 @@ def write_context(node_variable: Dict, workflow_variable: Dict, node: INode, wor
6487
6588
6689class BaseApplicationNode (IApplicationNode ):
90+ def get_answer_text (self ):
91+ if self .answer_text is None :
92+ return None
93+ return {'content' : self .answer_text , 'runtime_node_id' : self .runtime_node_id ,
94+ 'chat_record_id' : self .workflow_params ['chat_record_id' ], 'child_node' : self .context .get ('child_node' )}
6795
6896 def save_context (self , details , workflow_manage ):
6997 self .context ['answer' ] = details .get ('answer' )
@@ -72,7 +100,7 @@ def save_context(self, details, workflow_manage):
72100 self .answer_text = details .get ('answer' )
73101
74102 def execute (self , application_id , message , chat_id , chat_record_id , stream , re_chat , client_id , client_type ,
75- app_document_list = None , app_image_list = None ,
103+ app_document_list = None , app_image_list = None , child_node = None , node_data = None ,
76104 ** kwargs ) -> NodeResult :
77105 from application .serializers .chat_message_serializers import ChatMessageSerializer
78106 # 生成嵌入应用的chat_id
@@ -85,6 +113,14 @@ def execute(self, application_id, message, chat_id, chat_record_id, stream, re_c
85113 app_document_list = []
86114 if app_image_list is None :
87115 app_image_list = []
116+ runtime_node_id = None
117+ record_id = None
118+ child_node_value = None
119+ if child_node is not None :
120+ runtime_node_id = child_node .get ('runtime_node_id' )
121+ record_id = child_node .get ('chat_record_id' )
122+ child_node_value = child_node .get ('child_node' )
123+
88124 response = ChatMessageSerializer (
89125 data = {'chat_id' : current_chat_id , 'message' : message ,
90126 're_chat' : re_chat ,
@@ -94,16 +130,20 @@ def execute(self, application_id, message, chat_id, chat_record_id, stream, re_c
94130 'client_type' : client_type ,
95131 'document_list' : app_document_list ,
96132 'image_list' : app_image_list ,
97- 'form_data' : kwargs }).chat (base_to_response = OpenaiToResponse ())
133+ 'runtime_node_id' : runtime_node_id ,
134+ 'chat_record_id' : record_id ,
135+ 'child_node' : child_node_value ,
136+ 'node_data' : node_data ,
137+ 'form_data' : kwargs }).chat ()
98138 if response .status_code == 200 :
99139 if stream :
100140 content_generator = response .streaming_content
101141 return NodeResult ({'result' : content_generator , 'question' : message }, {},
102- _write_context = write_context_stream )
142+ _write_context = write_context_stream , _is_interrupt = _is_interrupt_exec )
103143 else :
104144 data = json .loads (response .content )
105145 return NodeResult ({'result' : data , 'question' : message }, {},
106- _write_context = write_context )
146+ _write_context = write_context , _is_interrupt = _is_interrupt_exec )
107147
108148 def get_details (self , index : int , ** kwargs ):
109149 global_fields = []
0 commit comments