Skip to content

Commit f621f80

Browse files
committed
fix: multi llm test bug
1 parent bc0f25e commit f621f80

File tree

3 files changed

+4
-3
lines changed

3 files changed

+4
-3
lines changed

src/memos/llms/deepseek.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
from collections.abc import Generator
2+
13
from memos.configs.llm import DeepSeekLLMConfig
24
from memos.llms.openai import OpenAILLM
35
from memos.llms.utils import remove_thinking_tags
@@ -43,7 +45,6 @@ def generate_stream(self, messages: MessageList, **kwargs) -> Generator[str, Non
4345
extra_body=self.config.extra_body,
4446
)
4547
# Streaming chunks of text
46-
answer_parts = ""
4748
for chunk in response:
4849
delta = chunk.choices[0].delta
4950
if hasattr(delta, "reasoning_content") and delta.reasoning_content:

tests/llms/test_deepseek.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ def test_deepseek_llm_generate_with_and_without_think_prefix(self):
3838
self.assertEqual(output_with_think, full_content)
3939

4040
# Config with think tag removed
41-
config_without_think = config_with_think.copy(update={"remove_think_prefix": True})
41+
config_without_think = config_with_think.model_copy(update={"remove_think_prefix": True})
4242
llm_without_think = DeepSeekLLM(config_without_think)
4343
llm_without_think.client.chat.completions.create = MagicMock(return_value=mock_response)
4444

tests/llms/test_qwen.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ def test_qwen_llm_generate_with_and_without_think_prefix(self):
4040
self.assertEqual(response_with_think, full_content)
4141

4242
# Create config with remove_think_prefix = True
43-
config_without_think = config_with_think.copy(update={"remove_think_prefix": True})
43+
config_without_think = config_with_think.model_copy(update={"remove_think_prefix": True})
4444

4545
# Instance with think tag removed
4646
llm_without_think = QwenLLM(config_without_think)

0 commit comments

Comments
 (0)