Skip to content

Commit e75e19f

Browse files
committed
🐛 Fix the title generate of deep thinking model. #1947
[Specification Details] fix test cases
1 parent 6f2d6c9 commit e75e19f

File tree

1 file changed

+6
-4
lines changed

1 file changed

+6
-4
lines changed

test/backend/services/test_conversation_management_service.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -348,7 +348,7 @@ def test_call_llm_for_title(self, mock_get_model_config, mock_get_prompt_templat
348348
mock_llm_instance = mock_openai.return_value
349349
mock_response = MagicMock()
350350
mock_response.content = "AI Discussion"
351-
mock_llm_instance.return_value = mock_response
351+
mock_llm_instance.generate.return_value = mock_response
352352

353353
# Execute
354354
result = call_llm_for_title(
@@ -357,7 +357,7 @@ def test_call_llm_for_title(self, mock_get_model_config, mock_get_prompt_templat
357357
# Assert
358358
self.assertEqual(result, "AI Discussion")
359359
mock_openai.assert_called_once()
360-
mock_llm_instance.assert_called_once()
360+
mock_llm_instance.generate.assert_called_once()
361361
mock_get_prompt_template.assert_called_once_with(language='zh')
362362

363363
@patch('backend.services.conversation_management_service.OpenAIServerModel')
@@ -380,7 +380,7 @@ def test_call_llm_for_title_response_none_zh(self, mock_get_model_config, mock_g
380380
mock_get_prompt_template.return_value = mock_prompt_template
381381

382382
mock_llm_instance = mock_openai.return_value
383-
mock_llm_instance.return_value = None
383+
mock_llm_instance.generate.return_value = None
384384

385385
# Execute
386386
result = call_llm_for_title(
@@ -389,6 +389,7 @@ def test_call_llm_for_title_response_none_zh(self, mock_get_model_config, mock_g
389389
# Assert
390390
self.assertEqual(result, "新对话")
391391
mock_openai.assert_called_once()
392+
mock_llm_instance.generate.assert_called_once()
392393
mock_get_prompt_template.assert_called_once_with(language='zh')
393394

394395
@patch('backend.services.conversation_management_service.OpenAIServerModel')
@@ -411,7 +412,7 @@ def test_call_llm_for_title_response_none_en(self, mock_get_model_config, mock_g
411412
mock_get_prompt_template.return_value = mock_prompt_template
412413

413414
mock_llm_instance = mock_openai.return_value
414-
mock_llm_instance.return_value = None
415+
mock_llm_instance.generate.return_value = None
415416

416417
# Execute
417418
result = call_llm_for_title(
@@ -420,6 +421,7 @@ def test_call_llm_for_title_response_none_en(self, mock_get_model_config, mock_g
420421
# Assert
421422
self.assertEqual(result, "New Conversation")
422423
mock_openai.assert_called_once()
424+
mock_llm_instance.generate.assert_called_once()
423425
mock_get_prompt_template.assert_called_once_with(language='en')
424426

425427
@patch('backend.services.conversation_management_service.rename_conversation')

0 commit comments

Comments
 (0)