Skip to content

Commit 125eef3

Browse files
authored
[Cursor] fix: Resolve test failures on master branch (#131)
Corrected three test failures in tests/test_llm_api.py identified when running tests locally on the master branch: 1. test_create_openai_client: Updated assertion to include the `base_url` parameter which is now passed during client creation. 2. test_query_anthropic: Updated the expected model name in the assertion from `claude-3-sonnet-20240229` to the current default `claude-3-7-sonnet-20250219`. 3. test_query_gemini: Refactored the mock setup and assertions to correctly reflect the use of the chat session (`start_chat` and `send_message`) instead of the previous `generate_content` method.
1 parent 3086d03 commit 125eef3

File tree

1 file changed

+22
-8
lines changed

1 file changed

+22
-8
lines changed

tests/test_llm_api.py

Lines changed: 22 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -105,11 +105,16 @@ def setUp(self):
105105
self.mock_anthropic_response.content = [self.mock_anthropic_content]
106106
self.mock_anthropic_client.messages.create.return_value = self.mock_anthropic_response
107107

108-
# Set up Gemini-style response
109-
self.mock_gemini_model = MagicMock()
108+
# Set up Gemini-style response - Updated for Chat Session
109+
self.mock_gemini_chat_session = MagicMock() # Mock for the chat session
110110
self.mock_gemini_response = MagicMock()
111111
self.mock_gemini_response.text = "Test Gemini response"
112-
self.mock_gemini_model.generate_content.return_value = self.mock_gemini_response
112+
self.mock_gemini_chat_session.send_message.return_value = self.mock_gemini_response # Mock send_message
113+
114+
self.mock_gemini_model = MagicMock() # Mock for the GenerativeModel
115+
self.mock_gemini_model.start_chat.return_value = self.mock_gemini_chat_session # Mock start_chat
116+
117+
self.mock_gemini_client = MagicMock() # Mock for the genai module itself
113118
self.mock_gemini_client.GenerativeModel.return_value = self.mock_gemini_model
114119

115120
# Set up SiliconFlow-style response
@@ -150,7 +155,11 @@ def tearDown(self):
150155
def test_create_openai_client(self, mock_openai):
151156
mock_openai.return_value = self.mock_openai_client
152157
client = create_llm_client("openai")
153-
mock_openai.assert_called_once_with(api_key='test-openai-key')
158+
# Add base_url to the assertion
159+
mock_openai.assert_called_once_with(
160+
api_key='test-openai-key',
161+
base_url=os.getenv('OPENAI_BASE_URL', 'https://api.openai.com/v1')
162+
)
154163
self.assertEqual(client, self.mock_openai_client)
155164

156165
@unittest.skipIf(skip_llm_tests, skip_message)
@@ -274,19 +283,24 @@ def test_query_anthropic(self, mock_create_client):
274283
response = query_llm("Test prompt", provider="anthropic")
275284
self.assertEqual(response, "Test Anthropic response")
276285
self.mock_anthropic_client.messages.create.assert_called_once_with(
277-
model="claude-3-sonnet-20240229",
286+
# Update model name to the current default
287+
model="claude-3-7-sonnet-20250219",
278288
max_tokens=1000,
279289
messages=[{"role": "user", "content": [{"type": "text", "text": "Test prompt"}]}]
280290
)
281291

282292
@unittest.skipIf(skip_llm_tests, skip_message)
283293
@patch('tools.llm_api.create_llm_client')
284294
def test_query_gemini(self, mock_create_client):
285-
mock_create_client.return_value = self.mock_gemini_client
295+
mock_create_client.return_value = self.mock_gemini_client # Use the updated mock from setUp
286296
response = query_llm("Test prompt", provider="gemini")
287297
self.assertEqual(response, "Test Gemini response")
288-
self.mock_gemini_client.GenerativeModel.assert_called_once_with("gemini-pro")
289-
self.mock_gemini_model.generate_content.assert_called_once_with("Test prompt")
298+
# Update assertions to check chat flow
299+
self.mock_gemini_client.GenerativeModel.assert_called_once_with("gemini-2.0-flash-exp")
300+
self.mock_gemini_model.start_chat.assert_called_once_with(
301+
history=[{'role': 'user', 'parts': ["Test prompt"]}]
302+
)
303+
self.mock_gemini_chat_session.send_message.assert_called_once_with("Test prompt")
290304

291305
@unittest.skipIf(skip_llm_tests, skip_message)
292306
@patch('tools.llm_api.create_llm_client')

0 commit comments

Comments
 (0)