77 delete_chat , get_chat_chart_data , get_chat_predict_data
88from apps .chat .models .chat_model import CreateChat , ChatRecord , RenameChat , ChatQuestion
99from apps .chat .task .llm import LLMService , run_task , run_analysis_or_predict_task , run_recommend_questions_task
10- from common .core .deps import SessionDep , CurrentUser
10+ from common .core .deps import CurrentAssistant , SessionDep , CurrentUser
1111
1212router = APIRouter (tags = ["Data Q&A" ], prefix = "/chat" )
1313
@@ -94,7 +94,7 @@ async def start_chat(session: SessionDep, current_user: CurrentUser):
9494
9595
9696@router .post ("/recommend_questions/{chat_record_id}" )
97- async def recommend_questions (session : SessionDep , current_user : CurrentUser , chat_record_id : int ):
97+ async def recommend_questions (session : SessionDep , current_user : CurrentUser , chat_record_id : int , current_assistant : CurrentAssistant ):
9898 try :
9999 record = session .query (ChatRecord ).get (chat_record_id )
100100 if not record :
@@ -104,7 +104,7 @@ async def recommend_questions(session: SessionDep, current_user: CurrentUser, ch
104104 )
105105 request_question = ChatQuestion (chat_id = record .chat_id , question = record .question if record .question else '' )
106106
107- llm_service = LLMService (session , current_user , request_question )
107+ llm_service = LLMService (session , current_user , request_question , current_assistant )
108108 llm_service .set_record (record )
109109 except Exception as e :
110110 traceback .print_exc ()
@@ -117,7 +117,7 @@ async def recommend_questions(session: SessionDep, current_user: CurrentUser, ch
117117
118118
119119@router .post ("/question" )
120- async def stream_sql (session : SessionDep , current_user : CurrentUser , request_question : ChatQuestion ):
120+ async def stream_sql (session : SessionDep , current_user : CurrentUser , request_question : ChatQuestion , current_assistant : CurrentAssistant ):
121121 """Stream SQL analysis results
122122
123123 Args:
@@ -130,7 +130,7 @@ async def stream_sql(session: SessionDep, current_user: CurrentUser, request_que
130130 """
131131
132132 try :
133- llm_service = LLMService (session , current_user , request_question )
133+ llm_service = LLMService (session , current_user , request_question , current_assistant )
134134 llm_service .init_record ()
135135 except Exception as e :
136136 traceback .print_exc ()
@@ -143,7 +143,7 @@ async def stream_sql(session: SessionDep, current_user: CurrentUser, request_que
143143
144144
145145@router .post ("/record/{chat_record_id}/{action_type}" )
146- async def analysis_or_predict (session : SessionDep , current_user : CurrentUser , chat_record_id : int , action_type : str ):
146+ async def analysis_or_predict (session : SessionDep , current_user : CurrentUser , chat_record_id : int , action_type : str , current_assistant : CurrentAssistant ):
147147 if action_type != 'analysis' and action_type != 'predict' :
148148 raise HTTPException (
149149 status_code = 404 ,
@@ -166,7 +166,7 @@ async def analysis_or_predict(session: SessionDep, current_user: CurrentUser, ch
166166 request_question = ChatQuestion (chat_id = record .chat_id , question = '' )
167167
168168 try :
169- llm_service = LLMService (session , current_user , request_question )
169+ llm_service = LLMService (session , current_user , request_question , current_assistant )
170170 except Exception as e :
171171 traceback .print_exc ()
172172 raise HTTPException (
0 commit comments