22from fastapi .responses import StreamingResponse
33from sqlmodel import select
44
5- from apps .chat .curd .chat import list_chats , get_chat_with_records , create_chat , save_question , save_answer
6- from apps .chat .models .chat_model import CreateChat , ChatRecord
5+ from apps .chat .curd .chat import list_chats , get_chat_with_records , create_chat , save_question , save_answer , rename_chat , \
6+ delete_chat
7+ from apps .chat .models .chat_model import CreateChat , ChatRecord , RenameChat , Chat
78from apps .chat .schemas .chat_base_schema import LLMConfig
89from apps .chat .schemas .chat_schema import ChatQuestion
910from apps .chat .schemas .llm import AgentService
11+ from apps .datasource .crud .datasource import get_table_obj_by_ds
1012from apps .datasource .models .datasource import CoreDatasource
1113from apps .system .models .system_model import AiModelDetail
1214from common .core .deps import SessionDep , CurrentUser
@@ -32,6 +34,28 @@ async def list_chat(session: SessionDep, current_user: CurrentUser, chart_id: in
3234 )
3335
3436
37+ @router .post ("/rename" )
38+ async def rename (session : SessionDep , chat : RenameChat ):
39+ try :
40+ return rename_chat (session = session , rename_object = chat )
41+ except Exception as e :
42+ raise HTTPException (
43+ status_code = 500 ,
44+ detail = str (e )
45+ )
46+
47+
48+ @router .get ("/delete/{chart_id}" )
49+ async def delete (session : SessionDep , chart_id : int ):
50+ try :
51+ return delete_chat (session = session , chart_id = chart_id )
52+ except Exception as e :
53+ raise HTTPException (
54+ status_code = 500 ,
55+ detail = str (e )
56+ )
57+
58+
3559@router .post ("/start" )
3660async def start_chat (session : SessionDep , current_user : CurrentUser , create_chat_obj : CreateChat ):
3761 try :
@@ -57,26 +81,18 @@ async def stream_sql(session: SessionDep, current_user: CurrentUser, request_que
5781 """
5882 question = request_question .question
5983
60- # Get available AI model
61- aimodel = session .exec (select (AiModelDetail ).where (
62- AiModelDetail .status == True ,
63- AiModelDetail .api_key .is_not (None )
64- )).first ()
65-
66- # Get available datasource
67- ds = session .exec (select (CoreDatasource ).where (
68- CoreDatasource .status == 'Success'
69- )).first ()
70-
71- if not aimodel :
84+ chat = session .query (Chat ).filter (Chat .id == request_question .chat_id ).first ()
85+ if not chat :
7286 raise HTTPException (
7387 status_code = 400 ,
74- detail = "No available AI model configuration found"
88+ detail = f"Chat with id { request_question . chart_id } not found"
7589 )
7690
91+ # Get available datasource
92+ ds = session .query (CoreDatasource ).filter (CoreDatasource .id == chat .datasource ).first ()
7793 if not ds :
7894 raise HTTPException (
79- status_code = 400 ,
95+ status_code = 500 ,
8096 detail = "No available datasource configuration found"
8197 )
8298
@@ -89,6 +105,17 @@ async def stream_sql(session: SessionDep, current_user: CurrentUser, request_que
89105 detail = str (e1 )
90106 )
91107
108+ # Get available AI model
109+ aimodel = session .exec (select (AiModelDetail ).where (
110+ AiModelDetail .status == True ,
111+ AiModelDetail .api_key .is_not (None )
112+ )).first ()
113+ if not aimodel :
114+ raise HTTPException (
115+ status_code = 400 ,
116+ detail = "No available AI model configuration found"
117+ )
118+
92119 # Use Tongyi Qianwen
93120 tongyi_config = LLMConfig (
94121 model_type = "openai" ,
@@ -113,10 +140,39 @@ async def stream_sql(session: SessionDep, current_user: CurrentUser, request_que
113140 """ result = llm_service.generate_sql(question)
114141 return result """
115142
143+ # get schema
144+ schema_str = ""
145+ table_objs = get_table_obj_by_ds (session = session , ds = ds )
146+ db_name = table_objs [0 ].schema
147+ schema_str += f"【DB_ID】 { db_name } \n 【Schema】\n "
148+ for obj in table_objs :
149+ schema_str += f"# Table: { db_name } .{ obj .table .table_name } "
150+ table_comment = ''
151+ if obj .table .custom_comment :
152+ table_comment = obj .table .custom_comment .strip ()
153+ if table_comment == '' :
154+ schema_str += '\n [\n '
155+ else :
156+ schema_str += f", { table_comment } \n [\n "
157+
158+ field_list = []
159+ for field in obj .fields :
160+ field_comment = ''
161+ if field .custom_comment :
162+ field_comment = field .custom_comment .strip ()
163+ if field_comment == '' :
164+ field_list .append (f"({ field .field_name } :{ field .field_type } )" )
165+ else :
166+ field_list .append (f"({ field .field_name } :{ field .field_type } , { field_comment } )" )
167+ schema_str += ",\n " .join (field_list )
168+ schema_str += '\n ]\n '
169+
170+ print (schema_str )
171+
116172 async def event_generator ():
117173 all_text = ''
118174 try :
119- async for chunk in llm_service .async_generate (question ):
175+ async for chunk in llm_service .async_generate (question , schema_str ):
120176 data = json .loads (chunk .replace ('data: ' , '' ))
121177
122178 if data ['type' ] in ['final' , 'tool_result' ]:
0 commit comments