@@ -92,15 +92,24 @@ def execute(self, model_id, system, prompt, dialogue_number, dialogue_type, hist
9292 'history_message' : history_message , 'question' : question .content }, {},
9393 _write_context = write_context )
9494
95- @ staticmethod
96- def get_history_message_for_details (history_chat_record , dialogue_number ):
95+
96+ def get_history_message_for_details (self , history_chat_record , dialogue_number ):
9797 start_index = len (history_chat_record ) - dialogue_number
9898 history_message = reduce (lambda x , y : [* x , * y ], [
99- [history_chat_record [index ]. get_human_message ( ), history_chat_record [index ].get_ai_message ()]
99+ [self . generate_history_human_message_for_details ( history_chat_record [index ]), history_chat_record [index ].get_ai_message ()]
100100 for index in
101101 range (start_index if start_index > 0 else 0 , len (history_chat_record ))], [])
102102 return history_message
103103
104+ def generate_history_human_message_for_details (self , chat_record ):
105+ for data in chat_record .details .values ():
106+ if self .node .id == data ['node_id' ] and 'image_list' in data :
107+ image_list = data ['image_list' ]
108+ if len (image_list ) == 0 or data ['dialogue_type' ] == 'WORKFLOW' :
109+ return HumanMessage (content = chat_record .problem_text )
110+ return HumanMessage (content = data ['question' ])
111+ return HumanMessage (content = chat_record .problem_text )
112+
104113 def get_history_message (self , history_chat_record , dialogue_number ):
105114 start_index = len (history_chat_record ) - dialogue_number
106115 history_message = reduce (lambda x , y : [* x , * y ], [
0 commit comments