Skip to content

Commit 8c60c2f

Browse files
committed
Fix failing tests
1 parent 53a2944 commit 8c60c2f

File tree

3 files changed

+6
-6
lines changed

3 files changed

+6
-6
lines changed

test/test_cli_mode.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ def test_cli_mode_uses_llamacpp_when_available(self):
108108

109109
assert result == "CLI response from DeepSeek V3"
110110
self.mock_available.assert_called_once_with(TEST_LLAMACPP_MODEL)
111-
self.mock_chat_llamacpp.assert_called_once_with(TEST_LLAMACPP_MODEL, 'Write a function', None, None)
111+
self.mock_chat_llamacpp.assert_called_once_with(TEST_LLAMACPP_MODEL, 'Write a function', system_prompt=None, image_files=None)
112112

113113
def test_cli_mode_fallback_to_ollama_when_unavailable(self):
114114
"""Test CLI mode falls back to ollama when model not available in llama.cpp."""
@@ -119,7 +119,7 @@ def test_cli_mode_fallback_to_ollama_when_unavailable(self):
119119

120120
assert result == "Ollama response from DeepSeek Coder"
121121
self.mock_available.assert_called_once_with(TEST_OLLAMA_MODEL)
122-
self.mock_chat_ollama.assert_called_once_with(TEST_OLLAMA_MODEL, 'Help with coding', None, None)
122+
self.mock_chat_ollama.assert_called_once_with(TEST_OLLAMA_MODEL, 'Help with coding', system_prompt=None, image_files=None)
123123

124124
def test_default_mode_is_cli(self):
125125
"""Test that default mode is CLI when no llama_mode specified."""
@@ -130,7 +130,7 @@ def test_default_mode_is_cli(self):
130130

131131
assert result == "Default CLI mode response"
132132
self.mock_available.assert_called_once_with(TEST_LLAMACPP_MODEL)
133-
self.mock_chat_llamacpp.assert_called_once_with(TEST_LLAMACPP_MODEL, 'Help me', None, None)
133+
self.mock_chat_llamacpp.assert_called_once_with(TEST_LLAMACPP_MODEL, 'Help me', system_prompt=None, image_files=None)
134134

135135

136136
class TestCLIModeIntegration:

test/test_server_mode.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ def test_server_mode_uses_llamacpp_when_available(self):
111111

112112
assert result == "Server response from DeepSeek V3"
113113
self.mock_available.assert_called_once_with(TEST_LLAMACPP_MODEL)
114-
self.mock_chat_server.assert_called_once_with(TEST_LLAMACPP_MODEL, 'Explain code', None, None)
114+
self.mock_chat_server.assert_called_once_with(TEST_LLAMACPP_MODEL, 'Explain code', system_prompt=None, image_files=None)
115115

116116
def test_server_mode_fallback_to_ollama_when_unavailable(self):
117117
"""Test server mode falls back to ollama when model not available in llama.cpp."""
@@ -122,7 +122,7 @@ def test_server_mode_fallback_to_ollama_when_unavailable(self):
122122

123123
assert result == "Ollama fallback response"
124124
self.mock_available.assert_called_once_with(TEST_OLLAMA_MODEL)
125-
self.mock_chat_ollama.assert_called_once_with(TEST_OLLAMA_MODEL, 'Debug code', None, None)
125+
self.mock_chat_ollama.assert_called_once_with(TEST_OLLAMA_MODEL, 'Debug code', system_prompt=None, image_files=None)
126126

127127
def test_server_mode_requires_server_url(self):
128128
"""Test server mode requires LLAMA_SERVER_URL to be set."""

test/test_system_prompt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,4 +77,4 @@ def test_chat_with_model_routing(self, mock_available, mock_chat):
7777
mock_chat.return_value = "result"
7878

7979
chat_with_model(TEST_MODEL, TEST_USER_CONTENT, 'cli', TEST_SYSTEM_PROMPT)
80-
mock_chat.assert_called_once_with(TEST_MODEL, TEST_USER_CONTENT, TEST_SYSTEM_PROMPT, None)
80+
mock_chat.assert_called_once_with(TEST_MODEL, TEST_USER_CONTENT, TEST_SYSTEM_PROMPT, image_files=None)

0 commit comments

Comments
 (0)