diff --git a/01_ai_agents_first/16_advanced_handoffs/handsoff_masterclass/01_handoff_obj.py b/01_ai_agents_first/16_advanced_handoffs/handsoff_masterclass/01_handoff_obj.py index dc248f9f..bbe63ef6 100644 --- a/01_ai_agents_first/16_advanced_handoffs/handsoff_masterclass/01_handoff_obj.py +++ b/01_ai_agents_first/16_advanced_handoffs/handsoff_masterclass/01_handoff_obj.py @@ -1,6 +1,6 @@ import os from dotenv import load_dotenv, find_dotenv -from agents import Agent, Runner, AsyncOpenAI, OpenAIChatCompletionsModel, function_tool, handoff +from agents import Agent, Runner, AsyncOpenAI, OpenAIChatCompletionsModel, function_tool, handoff, RunContextWrapper _: bool = load_dotenv(find_dotenv()) @@ -26,6 +26,9 @@ def get_weather(city: str) -> str: """A simple function to get the weather for a user.""" return f"The weather for {city} is sunny." +# Function to call when the handoff is invoked +def news_agent_handoff(ctx: RunContextWrapper[None]): + print(f"\nHanding off to NewsAgent...\n") news_agent: Agent = Agent( name="NewsAgent", @@ -39,11 +42,12 @@ def get_weather(city: str) -> str: instructions="You are weather expert - share weather updates as I travel a lot. For all Tech and News let the NewsAgent handle that part by delegation.", model=llm_model, tools=[get_weather], - handoffs=[handoff(agent=news_agent, on_handoff=)] + handoffs=[handoff(agent=news_agent, on_handoff=news_agent_handoff)] # Added callback function in on_hadoff parameter ) res = Runner.run_sync(weather_agent, "Check if there's any news about OpenAI after GPT-5 launch?") print("\nAGENT NAME", res.last_agent.name) print("\n[RESPONSE:]", res.final_output) -# Now check the trace in \ No newline at end of file +# Now check the trace in +# https://platform.openai.com/traces