@@ -67,11 +67,13 @@ def execute(self, model_id, system, prompt, dialogue_number, dialogue_type, hist
6767 image ,
6868 ** kwargs ) -> NodeResult :
6969 image_model = get_model_instance_by_model_user_id (model_id , self .flow_params_serializer .data .get ('user_id' ))
70- history_message = self .get_history_message (history_chat_record , dialogue_number )
70+ # 执行详情中的历史消息不需要图片内容
71+ history_message = self .get_history_message_for_details (history_chat_record , dialogue_number )
7172 self .context ['history_message' ] = history_message
7273 question = self .generate_prompt_question (prompt )
7374 self .context ['question' ] = question .content
74- message_list = self .generate_message_list (image_model , system , prompt , history_message , image )
75+ # 生成消息列表, 真实的history_message
76+ message_list = self .generate_message_list (image_model , system , prompt , self .get_history_message (history_chat_record , dialogue_number ), image )
7577 self .context ['message_list' ] = message_list
7678 self .context ['image_list' ] = image
7779 self .context ['dialogue_type' ] = dialogue_type
@@ -86,6 +88,15 @@ def execute(self, model_id, system, prompt, dialogue_number, dialogue_type, hist
8688 'history_message' : history_message , 'question' : question .content }, {},
8789 _write_context = write_context )
8890
91+ @staticmethod
92+ def get_history_message_for_details (history_chat_record , dialogue_number ):
93+ start_index = len (history_chat_record ) - dialogue_number
94+ history_message = reduce (lambda x , y : [* x , * y ], [
95+ [history_chat_record [index ].get_human_message (), history_chat_record [index ].get_ai_message ()]
96+ for index in
97+ range (start_index if start_index > 0 else 0 , len (history_chat_record ))], [])
98+ return history_message
99+
89100 def get_history_message (self , history_chat_record , dialogue_number ):
90101 start_index = len (history_chat_record ) - dialogue_number
91102 history_message = reduce (lambda x , y : [* x , * y ], [
0 commit comments