Skip to content

Commit f4486af

Browse files
jssmithtconley1428
andauthored
OpenAI Agents - Tools, Handoffs, and Hosted MCP (#225)
* update for plugins * formatting * reference main branch * cleanup * switch to plugins on the runners * move around samples * update README files * formatting update * formatting * timeout adjustments * Ported Toos, Handoffs, and Hosted MCP * Revert uv.lock --------- Co-authored-by: Tim Conley <[email protected]>
1 parent 582235f commit f4486af

21 files changed

+1106
-0
lines changed

openai_agents/handoffs/README.md

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
# Handoffs Examples
2+
3+
Agent handoff patterns with message filtering in Temporal workflows.
4+
5+
*Adapted from [OpenAI Agents SDK handoffs examples](https://github.com/openai/openai-agents-python/tree/main/examples/handoffs)*
6+
7+
Before running these examples, be sure to review the [prerequisites and background on the integration](../README.md).
8+
9+
## Running the Examples
10+
11+
First, start the worker:
12+
```bash
13+
uv run openai_agents/handoffs/run_worker.py
14+
```
15+
16+
Then run the workflow:
17+
18+
### Message Filter Workflow
19+
Demonstrates agent handoffs with message history filtering:
20+
```bash
21+
uv run openai_agents/handoffs/run_message_filter_workflow.py
22+
```
23+
24+
## Workflow Pattern
25+
26+
The workflow demonstrates a 4-step conversation with message filtering:
27+
28+
1. **Introduction**: User greets first agent with name
29+
2. **Tool Usage**: First agent generates random number using function tool
30+
3. **Agent Switch**: Conversation moves to second agent for general questions
31+
4. **Spanish Handoff**: Second agent detects Spanish and hands off to Spanish specialist
32+
33+
During the Spanish handoff, message filtering occurs:
34+
- All tool-related messages are removed from history
35+
- First two messages are dropped (demonstration of selective context)
36+
- Filtered conversation continues with Spanish agent
37+
38+
The workflow returns both the final response and complete message history for inspection.
39+
40+
## Omitted Examples
41+
42+
The following patterns from the [reference repository](https://github.com/openai/openai-agents-python/tree/main/examples/handoffs) are not included in this Temporal adaptation:
43+
44+
- **Message Filter Streaming**: Streaming capabilities are not yet available in the Temporal integration
Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
import asyncio
2+
import json
3+
4+
from temporalio.client import Client
5+
from temporalio.contrib.openai_agents import OpenAIAgentsPlugin
6+
7+
from openai_agents.handoffs.workflows.message_filter_workflow import (
8+
MessageFilterWorkflow,
9+
)
10+
11+
12+
async def main():
13+
# Create client connected to server at the given address
14+
client = await Client.connect(
15+
"localhost:7233",
16+
plugins=[
17+
OpenAIAgentsPlugin(),
18+
],
19+
)
20+
21+
# Execute a workflow
22+
result = await client.execute_workflow(
23+
MessageFilterWorkflow.run,
24+
"Sora",
25+
id="message-filter-workflow",
26+
task_queue="openai-agents-handoffs-task-queue",
27+
)
28+
29+
print(f"Final output: {result.final_output}")
30+
print("\n===Final messages===\n")
31+
32+
# Print the final message history to see the effect of the message filter
33+
for message in result.final_messages:
34+
print(json.dumps(message, indent=2))
35+
36+
37+
if __name__ == "__main__":
38+
asyncio.run(main())
Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
from __future__ import annotations
2+
3+
import asyncio
4+
from datetime import timedelta
5+
6+
from temporalio.client import Client
7+
from temporalio.contrib.openai_agents import ModelActivityParameters, OpenAIAgentsPlugin
8+
from temporalio.worker import Worker
9+
10+
from openai_agents.handoffs.workflows.message_filter_workflow import (
11+
MessageFilterWorkflow,
12+
)
13+
14+
15+
async def main():
16+
# Create client connected to server at the given address
17+
client = await Client.connect(
18+
"localhost:7233",
19+
plugins=[
20+
OpenAIAgentsPlugin(
21+
model_params=ModelActivityParameters(
22+
start_to_close_timeout=timedelta(seconds=60)
23+
)
24+
),
25+
],
26+
)
27+
28+
worker = Worker(
29+
client,
30+
task_queue="openai-agents-handoffs-task-queue",
31+
workflows=[
32+
MessageFilterWorkflow,
33+
],
34+
activities=[
35+
# No custom activities needed for these workflows
36+
],
37+
)
38+
await worker.run()
39+
40+
41+
if __name__ == "__main__":
42+
asyncio.run(main())
Lines changed: 112 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,112 @@
1+
from __future__ import annotations
2+
3+
from dataclasses import dataclass
4+
from typing import List
5+
6+
from agents import Agent, HandoffInputData, Runner, function_tool, handoff
7+
from agents.extensions import handoff_filters
8+
from agents.items import TResponseInputItem
9+
from temporalio import workflow
10+
11+
12+
@dataclass
13+
class MessageFilterResult:
14+
final_output: str
15+
final_messages: List[TResponseInputItem]
16+
17+
18+
@function_tool
19+
def random_number_tool(max: int) -> int:
20+
"""Return a random integer between 0 and the given maximum."""
21+
return workflow.random().randint(0, max)
22+
23+
24+
def spanish_handoff_message_filter(
25+
handoff_message_data: HandoffInputData,
26+
) -> HandoffInputData:
27+
# First, we'll remove any tool-related messages from the message history
28+
handoff_message_data = handoff_filters.remove_all_tools(handoff_message_data)
29+
30+
# Second, we'll also remove the first two items from the history, just for demonstration
31+
history = (
32+
tuple(handoff_message_data.input_history[2:])
33+
if isinstance(handoff_message_data.input_history, tuple)
34+
else handoff_message_data.input_history
35+
)
36+
37+
return HandoffInputData(
38+
input_history=history,
39+
pre_handoff_items=tuple(handoff_message_data.pre_handoff_items),
40+
new_items=tuple(handoff_message_data.new_items),
41+
)
42+
43+
44+
@workflow.defn
45+
class MessageFilterWorkflow:
46+
@workflow.run
47+
async def run(self, user_name: str = "Sora") -> MessageFilterResult:
48+
first_agent = Agent(
49+
name="Assistant",
50+
instructions="Be extremely concise.",
51+
tools=[random_number_tool],
52+
)
53+
54+
spanish_agent = Agent(
55+
name="Spanish Assistant",
56+
instructions="You only speak Spanish and are extremely concise.",
57+
handoff_description="A Spanish-speaking assistant.",
58+
)
59+
60+
second_agent = Agent(
61+
name="Assistant",
62+
instructions=(
63+
"Be a helpful assistant. If the user speaks Spanish, handoff to the Spanish assistant."
64+
),
65+
handoffs=[
66+
handoff(spanish_agent, input_filter=spanish_handoff_message_filter)
67+
],
68+
)
69+
70+
# 1. Send a regular message to the first agent
71+
result = await Runner.run(first_agent, input=f"Hi, my name is {user_name}.")
72+
73+
# 2. Ask it to generate a number
74+
result = await Runner.run(
75+
first_agent,
76+
input=result.to_input_list()
77+
+ [
78+
{
79+
"content": "Can you generate a random number between 0 and 100?",
80+
"role": "user",
81+
}
82+
],
83+
)
84+
85+
# 3. Call the second agent
86+
result = await Runner.run(
87+
second_agent,
88+
input=result.to_input_list()
89+
+ [
90+
{
91+
"content": "I live in New York City. What's the population of the city?",
92+
"role": "user",
93+
}
94+
],
95+
)
96+
97+
# 4. Cause a handoff to occur
98+
result = await Runner.run(
99+
second_agent,
100+
input=result.to_input_list()
101+
+ [
102+
{
103+
"content": "Por favor habla en español. ¿Cuál es mi nombre y dónde vivo?",
104+
"role": "user",
105+
}
106+
],
107+
)
108+
109+
# Return the final result and message history
110+
return MessageFilterResult(
111+
final_output=result.final_output, final_messages=result.to_input_list()
112+
)

openai_agents/hosted_mcp/README.md

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
# Hosted MCP Examples
2+
3+
Integration with hosted MCP (Model Context Protocol) servers using OpenAI agents in Temporal workflows.
4+
5+
*Adapted from [OpenAI Agents SDK hosted_mcp examples](https://github.com/openai/openai-agents-python/tree/main/examples/hosted_mcp)*
6+
7+
Before running these examples, be sure to review the [prerequisites and background on the integration](../README.md).
8+
9+
## Running the Examples
10+
11+
First, start the worker (supports all MCP workflows):
12+
```bash
13+
uv run openai_agents/hosted_mcp/run_worker.py
14+
```
15+
16+
Then run individual examples in separate terminals:
17+
18+
### Simple MCP Connection
19+
Connect to a hosted MCP server without approval requirements (trusted servers):
20+
```bash
21+
uv run openai_agents/hosted_mcp/run_simple_mcp_workflow.py
22+
```
23+
24+
### MCP with Approval Callbacks
25+
Connect to a hosted MCP server with approval workflow for tool execution:
26+
```bash
27+
uv run openai_agents/hosted_mcp/run_approval_mcp_workflow.py
28+
```
29+
30+
## MCP Server Configuration
31+
32+
Both examples default to using the GitMCP server (`https://gitmcp.io/openai/codex`) which provides repository analysis capabilities. The workflows can be easily modified to use different MCP servers by changing the `server_url` parameter.
33+
34+
### Approval Workflow Notes
35+
36+
The approval example demonstrates the callback structure for tool approvals in a Temporal context. In this implementation:
37+
38+
- The approval callback automatically approves requests for demonstration purposes
39+
- In production environments, approvals would typically be handled by communicating with a human user. Because the approval executes in the Temporal workflow, you can use signals or updates to communicate approval status.
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
import asyncio
2+
3+
from temporalio.client import Client
4+
from temporalio.contrib.openai_agents import OpenAIAgentsPlugin
5+
6+
from openai_agents.hosted_mcp.workflows.approval_mcp_workflow import ApprovalMCPWorkflow
7+
8+
9+
async def main():
10+
# Create client connected to server at the given address
11+
client = await Client.connect(
12+
"localhost:7233",
13+
plugins=[
14+
OpenAIAgentsPlugin(),
15+
],
16+
)
17+
18+
# Execute a workflow
19+
result = await client.execute_workflow(
20+
ApprovalMCPWorkflow.run,
21+
"Which language is this repo written in?",
22+
id="approval-mcp-workflow",
23+
task_queue="openai-agents-hosted-mcp-task-queue",
24+
)
25+
26+
print(f"Result: {result}")
27+
28+
29+
if __name__ == "__main__":
30+
asyncio.run(main())
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
import asyncio
2+
3+
from temporalio.client import Client
4+
from temporalio.contrib.openai_agents import OpenAIAgentsPlugin
5+
6+
from openai_agents.hosted_mcp.workflows.simple_mcp_workflow import SimpleMCPWorkflow
7+
8+
9+
async def main():
10+
# Create client connected to server at the given address
11+
client = await Client.connect(
12+
"localhost:7233",
13+
plugins=[
14+
OpenAIAgentsPlugin(),
15+
],
16+
)
17+
18+
# Execute a workflow
19+
result = await client.execute_workflow(
20+
SimpleMCPWorkflow.run,
21+
"Which language is this repo written in?",
22+
id="simple-mcp-workflow",
23+
task_queue="openai-agents-hosted-mcp-task-queue",
24+
)
25+
26+
print(f"Result: {result}")
27+
28+
29+
if __name__ == "__main__":
30+
asyncio.run(main())
Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
from __future__ import annotations
2+
3+
import asyncio
4+
from datetime import timedelta
5+
6+
from temporalio.client import Client
7+
from temporalio.contrib.openai_agents import ModelActivityParameters, OpenAIAgentsPlugin
8+
from temporalio.worker import Worker
9+
10+
from openai_agents.hosted_mcp.workflows.approval_mcp_workflow import ApprovalMCPWorkflow
11+
from openai_agents.hosted_mcp.workflows.simple_mcp_workflow import SimpleMCPWorkflow
12+
13+
14+
async def main():
15+
# Create client connected to server at the given address
16+
client = await Client.connect(
17+
"localhost:7233",
18+
plugins=[
19+
OpenAIAgentsPlugin(
20+
model_params=ModelActivityParameters(
21+
start_to_close_timeout=timedelta(seconds=60)
22+
)
23+
),
24+
],
25+
)
26+
27+
worker = Worker(
28+
client,
29+
task_queue="openai-agents-hosted-mcp-task-queue",
30+
workflows=[
31+
SimpleMCPWorkflow,
32+
ApprovalMCPWorkflow,
33+
],
34+
activities=[
35+
# No custom activities needed for these workflows
36+
],
37+
)
38+
await worker.run()
39+
40+
41+
if __name__ == "__main__":
42+
asyncio.run(main())

0 commit comments

Comments
 (0)