Skip to content

Commit 87ad146

Browse files
authored
docs: fix mcp agent example (#1308)
**Description** Fixes the MCP agent Python example. It was missing an import statement, and without i,t the agent traces were not properly generated. **Related Issues/PRs (if applicable)** Related: #1295 **Special notes for reviewers (if applicable)** N/A Signed-off-by: Ignasi Barrera <[email protected]>
1 parent 252e18c commit 87ad146

File tree

1 file changed

+18
-21
lines changed

1 file changed

+18
-21
lines changed

examples/mcp/agent.py

Lines changed: 18 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
# "openai-agents",
2222
# "httpx",
2323
# "mcp",
24+
# "elastic-opentelemetry",
2425
# "openinference-instrumentation-openai-agents",
2526
# "opentelemetry-instrumentation-httpx",
2627
# "openinference-instrumentation-mcp",
@@ -42,32 +43,28 @@
4243
OpenAIProvider,
4344
RunConfig,
4445
Runner,
45-
Tool,
4646
)
47-
from agents.mcp import MCPServerStreamableHttp, MCPUtil
47+
from agents.mcp import MCPServer, MCPServerStreamableHttp, MCPUtil
4848

49-
50-
async def run_agent(prompt: str, model_name: str, tools: list[Tool]):
51-
model = OpenAIProvider(use_responses=False).get_model(model_name)
52-
agent = Agent(name="Assistant", model=model, tools=tools)
53-
result = await Runner.run(
54-
starting_agent=agent,
55-
input=prompt,
56-
run_config=RunConfig(workflow_name="envoy-ai-gateway"),
57-
)
58-
print(result.final_output)
49+
# Uncomment the following lines to enable agent verbose logging
50+
# from agents import enable_verbose_stdout_logging
51+
# enable_verbose_stdout_logging()
5952

6053

6154
async def main(prompt: str, model_name: str, mcp_url: str):
62-
if not mcp_url:
63-
await run_agent(prompt, model_name, [])
64-
return
65-
66-
async with MCPServerStreamableHttp({"url": mcp_url,"timeout": 300.0},cache_tools_list=True) as server:
67-
tools = await server.list_tools()
68-
util = MCPUtil()
69-
tools = [util.to_function_tool(tool, server, False) for tool in tools]
70-
await run_agent(prompt, model_name, tools)
55+
async with MCPServerStreamableHttp(
56+
name="Envoy AI Gateway MCP",
57+
params={"url": mcp_url, "timeout": 300},
58+
cache_tools_list=True,
59+
) as server:
60+
model = OpenAIProvider(use_responses=False).get_model(model_name)
61+
agent = Agent(name="Assistant", model=model, mcp_servers=[server])
62+
result = await Runner.run(
63+
starting_agent=agent,
64+
input=prompt,
65+
run_config=RunConfig(workflow_name="Envoy AI Gateway Example"),
66+
)
67+
print(result.final_output)
7168

7269

7370
if __name__ == "__main__":

0 commit comments

Comments
 (0)