Skip to content

Commit 66b5650

Browse files
committed
fix: chat question for mcp
1 parent 8afb148 commit 66b5650

File tree

1 file changed

+9
-6
lines changed

1 file changed

+9
-6
lines changed

backend/apps/chat/task/llm.py

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -66,9 +66,10 @@ def __init__(self, session: SessionDep, current_user: CurrentUser, chat_question
6666
if not aimodel and aimodel[0]:
6767
raise Exception("No available AI model configuration found")
6868

69-
history_records: List[ChatRecord] = list(filter(lambda r: True if r.first_chat != True else False,
70-
list_records(session=self.session, current_user=current_user,
71-
chart_id=chat_question.chat_id)))
69+
history_records: List[ChatRecord] = list(
70+
map(lambda x: ChatRecord(**x.model_dump()), filter(lambda r: True if r.first_chat != True else False,
71+
list_records(session=self.session, current_user=current_user,
72+
chart_id=chat_question.chat_id))))
7273
# get schema
7374
if ds:
7475
chat_question.db_schema = get_table_schema(session=self.session, ds=ds)
@@ -291,7 +292,6 @@ def select_datasource(self):
291292
full_text = ''
292293
res = self.llm.stream(datasource_msg)
293294
for chunk in res:
294-
print(chunk)
295295
if isinstance(chunk, dict):
296296
full_text += chunk['content']
297297
yield chunk['content']
@@ -553,9 +553,12 @@ def run_task(llm_service: LLMService, session: SessionDep, in_chat: bool = True)
553553
# select datasource if datasource is none
554554
if not llm_service.ds:
555555
ds_res = llm_service.select_datasource()
556-
if in_chat:
557-
for chunk in ds_res:
556+
557+
for chunk in ds_res:
558+
print(chunk)
559+
if in_chat:
558560
yield orjson.dumps({'content': chunk, 'type': 'datasource-result'}).decode() + '\n\n'
561+
if in_chat:
559562
yield orjson.dumps({'id': llm_service.ds.id, 'datasource_name': llm_service.ds.name,
560563
'engine_type': llm_service.ds.type_name, 'type': 'datasource'}).decode() + '\n\n'
561564

0 commit comments

Comments
 (0)