3030
3131class Message (BaseModel ):
3232 id : str
33- query : Optional [str ]
34- timestamp : Optional [int ]
35- response : Optional [str ]
36- urls : Optional [List [str ]]
37- references : Optional [List [Dict ]]
38- collection_id : Optional [str ]
39- embedding_model : Optional [str ]
40- embedding_size : Optional [int ]
41- embedding_score_threshold : Optional [float ]
42- embedding_topk : Optional [int ]
43- llm_model : Optional [str ]
44- llm_prompt_template : Optional [str ]
45- llm_context_window : Optional [int ]
33+ query : Optional [str ] = None
34+ timestamp : Optional [int ] = None
35+ response : Optional [str ] = None
36+ urls : Optional [List [str ]] = None
37+ references : Optional [List [Dict ]] = None
38+ collection_id : Optional [str ] = None
39+ embedding_model : Optional [str ] = None
40+ embedding_size : Optional [int ] = None
41+ embedding_score_threshold : Optional [float ] = None
42+ embedding_topk : Optional [int ] = None
43+ llm_model : Optional [str ] = None
44+ llm_prompt_template : Optional [str ] = None
45+ llm_context_window : Optional [int ] = None
4646
4747
4848KUBE_CHAT_DOC_QA_REFERENCES = "|KUBE_CHAT_DOC_QA_REFERENCES|"
@@ -124,7 +124,7 @@ async def generate_related_question(self, related_question_prompt):
124124 if question :
125125 related_questions .append (question )
126126 else :
127- related_questions = []
127+ related_questions = []
128128 if content == '' :
129129 return related_questions
130130 questions = re .sub (r'\n+' , '\n ' , content ).split ('\n ' )
@@ -137,7 +137,7 @@ async def generate_related_question(self, related_question_prompt):
137137 question = match .group (1 )
138138 related_questions .append (question )
139139 return related_questions
140-
140+
141141 @staticmethod
142142 async def new_human_message (message , message_id ):
143143 return Message (
0 commit comments