Skip to content

Commit d257326

Browse files
authored
Updated test cases to fit system prompt parameter (#5)
1 parent fc6548d commit d257326

File tree

2 files changed

+5
-5
lines changed

2 files changed

+5
-5
lines changed

test/test_cli_mode.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ def test_cli_mode_uses_llamacpp_when_available(self):
108108

109109
assert result == "CLI response from DeepSeek V3"
110110
self.mock_available.assert_called_once_with(TEST_LLAMACPP_MODEL)
111-
self.mock_chat_llamacpp.assert_called_once_with(TEST_LLAMACPP_MODEL, 'Write a function')
111+
self.mock_chat_llamacpp.assert_called_once_with(TEST_LLAMACPP_MODEL, 'Write a function', None)
112112

113113
def test_cli_mode_fallback_to_ollama_when_unavailable(self):
114114
"""Test CLI mode falls back to ollama when model not available in llama.cpp."""
@@ -119,7 +119,7 @@ def test_cli_mode_fallback_to_ollama_when_unavailable(self):
119119

120120
assert result == "Ollama response from DeepSeek Coder"
121121
self.mock_available.assert_called_once_with(TEST_OLLAMA_MODEL)
122-
self.mock_chat_ollama.assert_called_once_with(TEST_OLLAMA_MODEL, 'Help with coding')
122+
self.mock_chat_ollama.assert_called_once_with(TEST_OLLAMA_MODEL, 'Help with coding', None)
123123

124124
def test_default_mode_is_cli(self):
125125
"""Test that default mode is CLI when no llama_mode specified."""
@@ -130,7 +130,7 @@ def test_default_mode_is_cli(self):
130130

131131
assert result == "Default CLI mode response"
132132
self.mock_available.assert_called_once_with(TEST_LLAMACPP_MODEL)
133-
self.mock_chat_llamacpp.assert_called_once_with(TEST_LLAMACPP_MODEL, 'Help me')
133+
self.mock_chat_llamacpp.assert_called_once_with(TEST_LLAMACPP_MODEL, 'Help me', None)
134134

135135

136136
class TestCLIModeIntegration:

test/test_server_mode.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ def test_server_mode_uses_llamacpp_when_available(self):
111111

112112
assert result == "Server response from DeepSeek V3"
113113
self.mock_available.assert_called_once_with(TEST_LLAMACPP_MODEL)
114-
self.mock_chat_server.assert_called_once_with(TEST_LLAMACPP_MODEL, 'Explain code')
114+
self.mock_chat_server.assert_called_once_with(TEST_LLAMACPP_MODEL, 'Explain code', None)
115115

116116
def test_server_mode_fallback_to_ollama_when_unavailable(self):
117117
"""Test server mode falls back to ollama when model not available in llama.cpp."""
@@ -122,7 +122,7 @@ def test_server_mode_fallback_to_ollama_when_unavailable(self):
122122

123123
assert result == "Ollama fallback response"
124124
self.mock_available.assert_called_once_with(TEST_OLLAMA_MODEL)
125-
self.mock_chat_ollama.assert_called_once_with(TEST_OLLAMA_MODEL, 'Debug code')
125+
self.mock_chat_ollama.assert_called_once_with(TEST_OLLAMA_MODEL, 'Debug code', None)
126126

127127
def test_server_mode_requires_server_url(self):
128128
"""Test server mode requires LLAMA_SERVER_URL to be set."""

0 commit comments

Comments
 (0)