Skip to content

Commit e138249

Browse files
fix: ensure callback execution in all achat() paths
Adds missing callback execution to tool-based responses and JSON/Pydantic output paths in the achat() method to ensure consistent task metadata propagation across all execution flows. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-authored-by: Mervin Praison <[email protected]>
1 parent 0dc8795 commit e138249

File tree

1 file changed

+6
-2
lines changed
  • src/praisonai-agents/praisonaiagents/agent

1 file changed

+6
-2
lines changed

src/praisonai-agents/praisonaiagents/agent/agent.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1699,6 +1699,8 @@ async def achat(self, prompt: str, temperature=0.2, tools=None, output_json=None
16991699
if logging.getLogger().getEffectiveLevel() == logging.DEBUG:
17001700
total_time = time.time() - start_time
17011701
logging.debug(f"Agent.achat completed in {total_time:.2f} seconds")
1702+
# Execute callback after tool completion
1703+
self._execute_callback_and_display(original_prompt, result, time.time() - start_time, task_name, task_description, task_id)
17021704
return result
17031705
elif output_json or output_pydantic:
17041706
response = await self._openai_client.async_client.chat.completions.create(
@@ -1707,11 +1709,13 @@ async def achat(self, prompt: str, temperature=0.2, tools=None, output_json=None
17071709
temperature=temperature,
17081710
response_format={"type": "json_object"}
17091711
)
1710-
# Return the raw response
1712+
response_text = response.choices[0].message.content
17111713
if logging.getLogger().getEffectiveLevel() == logging.DEBUG:
17121714
total_time = time.time() - start_time
17131715
logging.debug(f"Agent.achat completed in {total_time:.2f} seconds")
1714-
return response.choices[0].message.content
1716+
# Execute callback after JSON/Pydantic completion
1717+
self._execute_callback_and_display(original_prompt, response_text, time.time() - start_time, task_name, task_description, task_id)
1718+
return response_text
17151719
else:
17161720
response = await self._openai_client.async_client.chat.completions.create(
17171721
model=self.llm,

0 commit comments

Comments
 (0)