Skip to content

Commit dc45059

Browse files
Merge pull request #618 from MervinPraison/revert-617-claude/issue-612-20250606_060924
Revert "fix: prevent duplicate Task and Response display when using litellm with memory"
2 parents b924a78 + 9a66956 commit dc45059

File tree

3 files changed

+7
-51
lines changed

3 files changed

+7
-51
lines changed

src/praisonai-agents/praisonaiagents/agent/agent.py

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1108,18 +1108,12 @@ def chat(self, prompt, temperature=0.2, tools=None, output_json=None, output_pyd
11081108
agent_role=self.role,
11091109
agent_tools=[t.__name__ if hasattr(t, '__name__') else str(t) for t in (tools if tools is not None else self.tools)],
11101110
execute_tool_fn=self.execute_tool, # Pass tool execution function
1111-
reasoning_steps=reasoning_steps,
1112-
suppress_display=True # Prevent duplicate displays - Agent will handle display
1111+
reasoning_steps=reasoning_steps
11131112
)
11141113

11151114
self.chat_history.append({"role": "user", "content": prompt})
11161115
self.chat_history.append({"role": "assistant", "content": response_text})
11171116

1118-
# Display interaction for custom LLM (since we suppressed LLM display)
1119-
if self.verbose:
1120-
display_interaction(prompt, response_text, markdown=self.markdown,
1121-
generation_time=time.time() - start_time, console=self.console)
1122-
11231117
# Log completion time if in debug mode
11241118
if logging.getLogger().getEffectiveLevel() == logging.DEBUG:
11251119
total_time = time.time() - start_time

src/praisonai-agents/praisonaiagents/llm/llm.py

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -290,7 +290,6 @@ def get_response(
290290
agent_role: Optional[str] = None,
291291
agent_tools: Optional[List[str]] = None,
292292
execute_tool_fn: Optional[Callable] = None,
293-
suppress_display: bool = False,
294293
**kwargs
295294
) -> str:
296295
"""Enhanced get_response with all OpenAI-like features"""
@@ -455,15 +454,15 @@ def get_response(
455454
final_response = resp
456455

457456
# Optionally display reasoning if present
458-
if verbose and not suppress_display and reasoning_content:
457+
if verbose and reasoning_content:
459458
display_interaction(
460459
original_prompt,
461460
f"Reasoning:\n{reasoning_content}\n\nAnswer:\n{response_text}",
462461
markdown=markdown,
463462
generation_time=time.time() - current_time,
464463
console=console
465464
)
466-
elif verbose and not suppress_display:
465+
else:
467466
display_interaction(
468467
original_prompt,
469468
response_text,
@@ -666,15 +665,15 @@ def get_response(
666665
response_text = resp["choices"][0]["message"]["content"]
667666

668667
# Optionally display reasoning if present
669-
if verbose and not suppress_display and reasoning_content:
668+
if verbose and reasoning_content:
670669
display_interaction(
671670
original_prompt,
672671
f"Reasoning:\n{reasoning_content}\n\nAnswer:\n{response_text}",
673672
markdown=markdown,
674673
generation_time=time.time() - start_time,
675674
console=console
676675
)
677-
elif verbose and not suppress_display:
676+
else:
678677
display_interaction(
679678
original_prompt,
680679
response_text,
@@ -719,7 +718,7 @@ def get_response(
719718
final_response_text = final_response_text.strip()
720719

721720
# Display final response
722-
if verbose and not suppress_display:
721+
if verbose:
723722
display_interaction(
724723
original_prompt,
725724
final_response_text,
@@ -742,7 +741,7 @@ def get_response(
742741
return final_response_text
743742

744743
# No tool calls were made in this iteration, return the response
745-
if verbose and not suppress_display:
744+
if verbose:
746745
display_interaction(
747746
original_prompt,
748747
response_text,

test_duplicate_fix.py

Lines changed: 0 additions & 37 deletions
This file was deleted.

0 commit comments

Comments
 (0)