|
24 | 24 | finish_record, save_analysis_answer, save_predict_answer, save_predict_data, \ |
25 | 25 | save_select_datasource_answer, save_recommend_question_answer, \ |
26 | 26 | get_old_questions, save_analysis_predict_record, rename_chat, get_chart_config, \ |
27 | | - get_chat_chart_data, list_generate_sql_logs, list_generate_chart_logs, start_log, end_log |
| 27 | + get_chat_chart_data, list_generate_sql_logs, list_generate_chart_logs, start_log, end_log, \ |
| 28 | + get_last_execute_sql_error |
28 | 29 | from apps.chat.models.chat_model import ChatQuestion, ChatRecord, Chat, RenameChat, ChatLog, OperationEnum |
29 | 30 | from apps.datasource.crud.datasource import get_table_schema |
30 | 31 | from apps.datasource.crud.permission import get_row_permission_filters, is_normal_user |
@@ -70,6 +71,8 @@ class LLMService: |
70 | 71 | chunk_list: List[str] = [] |
71 | 72 | future: Future |
72 | 73 |
|
| 74 | + last_execute_sql_error: str = None |
| 75 | + |
73 | 76 | def __init__(self, current_user: CurrentUser, chat_question: ChatQuestion, |
74 | 77 | current_assistant: Optional[CurrentAssistant] = None, no_reasoning: bool = False, |
75 | 78 | config: LLMConfig = None): |
@@ -127,6 +130,15 @@ def __init__(self, current_user: CurrentUser, chat_question: ChatQuestion, |
127 | 130 | llm_instance = LLMFactory.create_llm(self.config) |
128 | 131 | self.llm = llm_instance.llm |
129 | 132 |
|
| 133 | + # get last_execute_sql_error |
| 134 | + last_execute_sql_error = get_last_execute_sql_error(self.session, self.chat_question.chat_id) |
| 135 | + if last_execute_sql_error: |
| 136 | + self.chat_question.error_msg = f'''<error-msg> |
| 137 | +{last_execute_sql_error} |
| 138 | +</error-msg>''' |
| 139 | + else: |
| 140 | + self.chat_question.error_msg = '' |
| 141 | + |
130 | 142 | self.init_messages() |
131 | 143 |
|
132 | 144 | @classmethod |
|
0 commit comments