@@ -63,18 +63,18 @@ def save_context(self, details, workflow_manage):
6363 self .context ['question' ] = details .get ('question' )
6464 self .answer_text = details .get ('answer' )
6565
66- def execute (self , model_id , system , prompt , dialogue_number , history_chat_record , stream , chat_id , chat_record_id ,
66+ def execute (self , model_id , system , prompt , dialogue_number , dialogue_type , history_chat_record , stream , chat_id , chat_record_id ,
6767 image ,
6868 ** kwargs ) -> NodeResult :
6969 image_model = get_model_instance_by_model_user_id (model_id , self .flow_params_serializer .data .get ('user_id' ))
7070 history_message = self .get_history_message (history_chat_record , dialogue_number )
7171 self .context ['history_message' ] = history_message
7272 question = self .generate_prompt_question (prompt )
7373 self .context ['question' ] = question .content
74- # todo 处理上传图片
7574 message_list = self .generate_message_list (image_model , system , prompt , history_message , image )
7675 self .context ['message_list' ] = message_list
7776 self .context ['image_list' ] = image
77+ self .context ['dialogue_type' ] = dialogue_type
7878 if stream :
7979 r = image_model .stream (message_list )
8080 return NodeResult ({'result' : r , 'chat_model' : image_model , 'message_list' : message_list ,
@@ -86,15 +86,31 @@ def execute(self, model_id, system, prompt, dialogue_number, history_chat_record
8686 'history_message' : history_message , 'question' : question .content }, {},
8787 _write_context = write_context )
8888
89- @staticmethod
90- def get_history_message (history_chat_record , dialogue_number ):
89+ def get_history_message (self , history_chat_record , dialogue_number ):
9190 start_index = len (history_chat_record ) - dialogue_number
9291 history_message = reduce (lambda x , y : [* x , * y ], [
93- [history_chat_record [index ]. get_human_message ( ), history_chat_record [index ].get_ai_message ()]
92+ [self . generate_history_human_message ( history_chat_record [index ]), history_chat_record [index ].get_ai_message ()]
9493 for index in
9594 range (start_index if start_index > 0 else 0 , len (history_chat_record ))], [])
9695 return history_message
9796
97+ def generate_history_human_message (self , chat_record ):
98+
99+ for data in chat_record .details .values ():
100+ if self .node .id == data ['node_id' ] and 'image_list' in data :
101+ image_list = data ['image_list' ]
102+ if len (image_list ) == 0 or data ['dialogue_type' ] == 'WORKFLOW' :
103+ return HumanMessage (content = chat_record .problem_text )
104+ file_id = image_list [0 ]['file_id' ]
105+ file = QuerySet (File ).filter (id = file_id ).first ()
106+ base64_image = base64 .b64encode (file .get_byte ()).decode ("utf-8" )
107+ return HumanMessage (
108+ content = [
109+ {'type' : 'text' , 'text' : data ['question' ]},
110+ {'type' : 'image_url' , 'image_url' : {'url' : f'data:image/jpeg;base64,{ base64_image } ' }},
111+ ])
112+ return HumanMessage (content = chat_record .problem_text )
113+
98114 def generate_prompt_question (self , prompt ):
99115 return HumanMessage (self .workflow_manage .generate_prompt (prompt ))
100116
@@ -148,5 +164,6 @@ def get_details(self, index: int, **kwargs):
148164 'answer_tokens' : self .context .get ('answer_tokens' ),
149165 'status' : self .status ,
150166 'err_message' : self .err_message ,
151- 'image_list' : self .context .get ('image_list' )
167+ 'image_list' : self .context .get ('image_list' ),
168+ 'dialogue_type' : self .context .get ('dialogue_type' )
152169 }
0 commit comments