Skip to content

Commit 1331aa8

Browse files
authored
fix:clean_tool_calls() provide invalid llm api request (#3360)
1 parent 8130350 commit 1331aa8

File tree

7 files changed

+223
-15
lines changed

7 files changed

+223
-15
lines changed

.github/ISSUE_TEMPLATE/bug_report.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ body:
2626
attributes:
2727
label: What version of camel are you using?
2828
description: Run command `python3 -c 'print(__import__("camel").__version__)'` in your shell and paste the output here.
29-
placeholder: E.g., 0.2.79a1
29+
placeholder: E.g., 0.2.79a2
3030
validations:
3131
required: true
3232

camel/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414

1515
from camel.logger import disable_logging, enable_logging, set_log_level
1616

17-
__version__ = '0.2.79a1'
17+
__version__ = '0.2.79a2'
1818

1919
__all__ = [
2020
'__version__',

camel/memories/agent_memories.py

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -118,15 +118,23 @@ def clean_tool_calls(self) -> None:
118118
indices_to_remove.append(i)
119119
# Mark ASSISTANT messages with tool_calls for removal
120120
elif role == OpenAIBackendRole.ASSISTANT.value:
121-
meta_dict = record.get('meta_dict', {})
122-
if meta_dict and 'tool_calls' in meta_dict:
121+
message_dict = record.get('message', {})
122+
# Check for tool_calls in message
123+
has_tool_calls = 'tool_calls' in message_dict
124+
is_func_calling = (
125+
message_dict.get('__class__') == 'FunctionCallingMessage'
126+
and 'args' in message_dict
127+
)
128+
129+
if has_tool_calls or is_func_calling:
123130
indices_to_remove.append(i)
124131

125132
# Remove records in-place
126133
for i in reversed(indices_to_remove):
127134
del record_dicts[i]
128135

129-
# Save the modified records back to storage
136+
# Clear storage and save the modified records back
137+
self._chat_history_block.storage.clear()
130138
self._chat_history_block.storage.save(record_dicts)
131139

132140
def pop_records(self, count: int) -> List[MemoryRecord]:

docs/conf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727
project = 'CAMEL'
2828
copyright = '2024, CAMEL-AI.org'
2929
author = 'CAMEL-AI.org'
30-
release = '0.2.79a1'
30+
release = '0.2.79a2'
3131

3232
html_favicon = (
3333
'https://raw.githubusercontent.com/camel-ai/camel/master/misc/favicon.png'

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
44

55
[project]
66
name = "camel-ai"
7-
version = "0.2.79a1"
7+
version = "0.2.79a2"
88
description = "Communicative Agents for AI Society Study"
99
authors = [{ name = "CAMEL-AI.org" }]
1010
requires-python = ">=3.10,<3.15"

test/memories/test_agent_memories.py

Lines changed: 201 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,11 +19,13 @@
1919
from camel.memories import (
2020
BaseContextCreator,
2121
ChatHistoryBlock,
22+
ChatHistoryMemory,
2223
LongtermAgentMemory,
2324
MemoryRecord,
2425
VectorDBBlock,
2526
)
26-
from camel.messages import BaseMessage
27+
from camel.messages import BaseMessage, FunctionCallingMessage
28+
from camel.storages.key_value_storages import InMemoryKeyValueStorage
2729
from camel.types import OpenAIBackendRole, RoleType
2830

2931

@@ -121,3 +123,201 @@ def test_clear(
121123
memory.clear()
122124
mock_chat_history_block.clear.assert_called_once()
123125
mock_vector_db_block.clear.assert_called_once()
126+
127+
128+
class TestChatHistoryMemoryCleanToolCalls:
129+
@pytest.fixture
130+
def mock_context_creator(self):
131+
creator = MagicMock(spec=BaseContextCreator)
132+
creator.create_context.return_value = ([], 0)
133+
return creator
134+
135+
def test_clean_tool_calls_removes_function_messages(
136+
self, mock_context_creator
137+
):
138+
r"""Test that clean_tool_calls removes FUNCTION role messages."""
139+
storage = InMemoryKeyValueStorage()
140+
memory = ChatHistoryMemory(mock_context_creator, storage=storage)
141+
142+
# Create records with FUNCTION messages
143+
records = [
144+
MemoryRecord(
145+
message=BaseMessage("user", RoleType.USER, None, "Question"),
146+
role_at_backend=OpenAIBackendRole.USER,
147+
),
148+
MemoryRecord(
149+
message=FunctionCallingMessage(
150+
"assistant",
151+
RoleType.ASSISTANT,
152+
None,
153+
"",
154+
func_name="tool",
155+
result="result",
156+
),
157+
role_at_backend=OpenAIBackendRole.FUNCTION,
158+
),
159+
]
160+
memory.write_records(records)
161+
162+
memory.clean_tool_calls()
163+
remaining = memory.retrieve()
164+
165+
assert len(remaining) == 1
166+
assert (
167+
remaining[0].memory_record.role_at_backend
168+
== OpenAIBackendRole.USER
169+
)
170+
171+
def test_clean_tool_calls_removes_assistant_with_tool_calls(
172+
self, mock_context_creator
173+
):
174+
r"""Test that clean_tool_calls removes ASSISTANT messages with tool
175+
calls.
176+
"""
177+
storage = InMemoryKeyValueStorage()
178+
memory = ChatHistoryMemory(mock_context_creator, storage=storage)
179+
180+
records = [
181+
MemoryRecord(
182+
message=BaseMessage("user", RoleType.USER, None, "Question"),
183+
role_at_backend=OpenAIBackendRole.USER,
184+
),
185+
# Assistant with tool call (has args)
186+
MemoryRecord(
187+
message=FunctionCallingMessage(
188+
"assistant",
189+
RoleType.ASSISTANT,
190+
None,
191+
"",
192+
func_name="add",
193+
args={"a": 1, "b": 2},
194+
tool_call_id="call_123",
195+
),
196+
role_at_backend=OpenAIBackendRole.ASSISTANT,
197+
),
198+
# Function result
199+
MemoryRecord(
200+
message=FunctionCallingMessage(
201+
"function",
202+
RoleType.ASSISTANT,
203+
None,
204+
"",
205+
func_name="add",
206+
result="3",
207+
tool_call_id="call_123",
208+
),
209+
role_at_backend=OpenAIBackendRole.FUNCTION,
210+
),
211+
# Final assistant response (should be kept)
212+
MemoryRecord(
213+
message=BaseMessage(
214+
"assistant", RoleType.ASSISTANT, None, "Answer"
215+
),
216+
role_at_backend=OpenAIBackendRole.ASSISTANT,
217+
),
218+
]
219+
memory.write_records(records)
220+
221+
memory.clean_tool_calls()
222+
remaining = memory.retrieve()
223+
224+
# Should only have USER and final ASSISTANT
225+
assert len(remaining) == 2
226+
assert (
227+
remaining[0].memory_record.role_at_backend
228+
== OpenAIBackendRole.USER
229+
)
230+
assert (
231+
remaining[1].memory_record.role_at_backend
232+
== OpenAIBackendRole.ASSISTANT
233+
)
234+
assert remaining[1].memory_record.message.content == "Answer"
235+
236+
def test_clean_tool_calls_handles_multiple_tool_calls(
237+
self, mock_context_creator
238+
):
239+
r"""Test clean_tool_calls with multiple tool call sequences."""
240+
storage = InMemoryKeyValueStorage()
241+
memory = ChatHistoryMemory(mock_context_creator, storage=storage)
242+
243+
records = [
244+
MemoryRecord(
245+
message=BaseMessage("user", RoleType.USER, None, "Q1"),
246+
role_at_backend=OpenAIBackendRole.USER,
247+
),
248+
MemoryRecord(
249+
message=FunctionCallingMessage(
250+
"assistant",
251+
RoleType.ASSISTANT,
252+
None,
253+
"",
254+
func_name="tool1",
255+
args={"x": 1},
256+
),
257+
role_at_backend=OpenAIBackendRole.ASSISTANT,
258+
),
259+
MemoryRecord(
260+
message=FunctionCallingMessage(
261+
"function",
262+
RoleType.ASSISTANT,
263+
None,
264+
"",
265+
func_name="tool1",
266+
result="R1",
267+
),
268+
role_at_backend=OpenAIBackendRole.FUNCTION,
269+
),
270+
MemoryRecord(
271+
message=BaseMessage(
272+
"assistant", RoleType.ASSISTANT, None, "A1"
273+
),
274+
role_at_backend=OpenAIBackendRole.ASSISTANT,
275+
),
276+
MemoryRecord(
277+
message=BaseMessage("user", RoleType.USER, None, "Q2"),
278+
role_at_backend=OpenAIBackendRole.USER,
279+
),
280+
MemoryRecord(
281+
message=FunctionCallingMessage(
282+
"assistant",
283+
RoleType.ASSISTANT,
284+
None,
285+
"",
286+
func_name="tool2",
287+
args={"y": 2},
288+
),
289+
role_at_backend=OpenAIBackendRole.ASSISTANT,
290+
),
291+
MemoryRecord(
292+
message=FunctionCallingMessage(
293+
"function",
294+
RoleType.ASSISTANT,
295+
None,
296+
"",
297+
func_name="tool2",
298+
result="R2",
299+
),
300+
role_at_backend=OpenAIBackendRole.FUNCTION,
301+
),
302+
MemoryRecord(
303+
message=BaseMessage(
304+
"assistant", RoleType.ASSISTANT, None, "A2"
305+
),
306+
role_at_backend=OpenAIBackendRole.ASSISTANT,
307+
),
308+
]
309+
memory.write_records(records)
310+
311+
memory.clean_tool_calls()
312+
remaining = memory.retrieve()
313+
314+
# Should only have: USER, ASSISTANT, USER, ASSISTANT
315+
assert len(remaining) == 4
316+
expected_roles = [
317+
OpenAIBackendRole.USER,
318+
OpenAIBackendRole.ASSISTANT,
319+
OpenAIBackendRole.USER,
320+
OpenAIBackendRole.ASSISTANT,
321+
]
322+
actual_roles = [r.memory_record.role_at_backend for r in remaining]
323+
assert actual_roles == expected_roles

uv.lock

Lines changed: 7 additions & 7 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)