Skip to content

Commit 836cc1c

Browse files
authored
🐛 Fix the title generate of deep thinking model. #1947
2 parents ddb18c7 + e75e19f commit 836cc1c

File tree

3 files changed

+7
-8
lines changed

3 files changed

+7
-8
lines changed

backend/services/conversation_management_service.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -281,7 +281,7 @@ def call_llm_for_title(content: str, tenant_id: str, language: str = LANGUAGE["Z
281281
"content": user_prompt}]
282282

283283
# Call the model
284-
response = llm(messages, max_tokens=10)
284+
response = llm.generate(messages)
285285
if not response or not response.content or not response.content.strip():
286286
return DEFAULT_EN_TITLE if language == LANGUAGE["EN"] else DEFAULT_ZH_TITLE
287287
return remove_think_blocks(response.content.strip())

frontend/const/modelConfig.ts

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,6 @@ export const MODEL_PROVIDER_KEYS = [
4343
"qwen",
4444
"openai",
4545
"siliconflow",
46-
"ponytoken",
4746
"jina",
4847
"deepseek",
4948
"aliyuncs",
@@ -56,7 +55,6 @@ export const PROVIDER_HINTS: Record<ModelProviderKey, string> = {
5655
qwen: "qwen",
5756
openai: "openai",
5857
siliconflow: "siliconflow",
59-
ponytoken: "ponytoken",
6058
jina: "jina",
6159
deepseek: "deepseek",
6260
aliyuncs: "aliyuncs",
@@ -67,7 +65,6 @@ export const PROVIDER_ICON_MAP: Record<ModelProviderKey, string> = {
6765
qwen: "/qwen.png",
6866
openai: "/openai.png",
6967
siliconflow: "/siliconflow.png",
70-
ponytoken: "/ponytoken.png",
7168
jina: "/jina.png",
7269
deepseek: "/deepseek.png",
7370
aliyuncs: "/aliyuncs.png",

test/backend/services/test_conversation_management_service.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -348,7 +348,7 @@ def test_call_llm_for_title(self, mock_get_model_config, mock_get_prompt_templat
348348
mock_llm_instance = mock_openai.return_value
349349
mock_response = MagicMock()
350350
mock_response.content = "AI Discussion"
351-
mock_llm_instance.return_value = mock_response
351+
mock_llm_instance.generate.return_value = mock_response
352352

353353
# Execute
354354
result = call_llm_for_title(
@@ -357,7 +357,7 @@ def test_call_llm_for_title(self, mock_get_model_config, mock_get_prompt_templat
357357
# Assert
358358
self.assertEqual(result, "AI Discussion")
359359
mock_openai.assert_called_once()
360-
mock_llm_instance.assert_called_once()
360+
mock_llm_instance.generate.assert_called_once()
361361
mock_get_prompt_template.assert_called_once_with(language='zh')
362362

363363
@patch('backend.services.conversation_management_service.OpenAIServerModel')
@@ -380,7 +380,7 @@ def test_call_llm_for_title_response_none_zh(self, mock_get_model_config, mock_g
380380
mock_get_prompt_template.return_value = mock_prompt_template
381381

382382
mock_llm_instance = mock_openai.return_value
383-
mock_llm_instance.return_value = None
383+
mock_llm_instance.generate.return_value = None
384384

385385
# Execute
386386
result = call_llm_for_title(
@@ -389,6 +389,7 @@ def test_call_llm_for_title_response_none_zh(self, mock_get_model_config, mock_g
389389
# Assert
390390
self.assertEqual(result, "新对话")
391391
mock_openai.assert_called_once()
392+
mock_llm_instance.generate.assert_called_once()
392393
mock_get_prompt_template.assert_called_once_with(language='zh')
393394

394395
@patch('backend.services.conversation_management_service.OpenAIServerModel')
@@ -411,7 +412,7 @@ def test_call_llm_for_title_response_none_en(self, mock_get_model_config, mock_g
411412
mock_get_prompt_template.return_value = mock_prompt_template
412413

413414
mock_llm_instance = mock_openai.return_value
414-
mock_llm_instance.return_value = None
415+
mock_llm_instance.generate.return_value = None
415416

416417
# Execute
417418
result = call_llm_for_title(
@@ -420,6 +421,7 @@ def test_call_llm_for_title_response_none_en(self, mock_get_model_config, mock_g
420421
# Assert
421422
self.assertEqual(result, "New Conversation")
422423
mock_openai.assert_called_once()
424+
mock_llm_instance.generate.assert_called_once()
423425
mock_get_prompt_template.assert_called_once_with(language='en')
424426

425427
@patch('backend.services.conversation_management_service.rename_conversation')

0 commit comments

Comments
 (0)