Skip to content

Commit 9578600

Browse files
committed
Merge branch 'main' into fix/enable-unicode-json-output
2 parents 0542cfc + 003cbfe commit 9578600

File tree

12 files changed

+369
-18
lines changed

12 files changed

+369
-18
lines changed

AGENTS.md

Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
Welcome to the OpenAI Agents SDK repository. This file contains the main points for new contributors.
2+
3+
## Repository overview
4+
5+
- **Source code**: `src/agents/` contains the implementation.
6+
- **Tests**: `tests/` with a short guide in `tests/README.md`.
7+
- **Examples**: under `examples/`.
8+
- **Documentation**: markdown pages live in `docs/` with `mkdocs.yml` controlling the site.
9+
- **Utilities**: developer commands are defined in the `Makefile`.
10+
- **PR template**: `.github/PULL_REQUEST_TEMPLATE/pull_request_template.md` describes the information every PR must include.
11+
12+
## Local workflow
13+
14+
1. Format, lint and type‑check your changes:
15+
16+
```bash
17+
make format
18+
make lint
19+
make mypy
20+
```
21+
22+
2. Run the tests:
23+
24+
```bash
25+
make tests
26+
```
27+
28+
To run a single test, use `uv run pytest -s -k <test_name>`.
29+
30+
3. Build the documentation (optional but recommended for docs changes):
31+
32+
```bash
33+
make build-docs
34+
```
35+
36+
Coverage can be generated with `make coverage`.
37+
38+
## Snapshot tests
39+
40+
Some tests rely on inline snapshots. See `tests/README.md` for details on updating them:
41+
42+
```bash
43+
make snapshots-fix # update existing snapshots
44+
make snapshots-create # create new snapshots
45+
```
46+
47+
Run `make tests` again after updating snapshots to ensure they pass.
48+
49+
## Style notes
50+
51+
- Write comments as full sentences and end them with a period.
52+
53+
## Pull request expectations
54+
55+
PRs should use the template located at `.github/PULL_REQUEST_TEMPLATE/pull_request_template.md`. Provide a summary, test plan and issue number if applicable, then check that:
56+
57+
- New tests are added when needed.
58+
- Documentation is updated.
59+
- `make lint` and `make format` have been run.
60+
- The full test suite passes.
61+
62+
Commit messages should be concise and written in the imperative mood. Small, focused commits are preferred.
63+
64+
## What reviewers look for
65+
66+
- Tests covering new behaviour.
67+
- Consistent style: code formatted with `ruff format`, imports sorted, and type hints passing `mypy`.
68+
- Clear documentation for any public API changes.
69+
- Clean history and a helpful PR description.
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
# MCP Streamable HTTP Example
2+
3+
This example uses a local Streamable HTTP server in [server.py](server.py).
4+
5+
Run the example via:
6+
7+
```
8+
uv run python examples/mcp/streamablehttp_example/main.py
9+
```
10+
11+
## Details
12+
13+
The example uses the `MCPServerStreamableHttp` class from `agents.mcp`. The server runs in a sub-process at `https://localhost:8000/mcp`.
Lines changed: 83 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,83 @@
1+
import asyncio
2+
import os
3+
import shutil
4+
import subprocess
5+
import time
6+
from typing import Any
7+
8+
from agents import Agent, Runner, gen_trace_id, trace
9+
from agents.mcp import MCPServer, MCPServerStreamableHttp
10+
from agents.model_settings import ModelSettings
11+
12+
13+
async def run(mcp_server: MCPServer):
14+
agent = Agent(
15+
name="Assistant",
16+
instructions="Use the tools to answer the questions.",
17+
mcp_servers=[mcp_server],
18+
model_settings=ModelSettings(tool_choice="required"),
19+
)
20+
21+
# Use the `add` tool to add two numbers
22+
message = "Add these numbers: 7 and 22."
23+
print(f"Running: {message}")
24+
result = await Runner.run(starting_agent=agent, input=message)
25+
print(result.final_output)
26+
27+
# Run the `get_weather` tool
28+
message = "What's the weather in Tokyo?"
29+
print(f"\n\nRunning: {message}")
30+
result = await Runner.run(starting_agent=agent, input=message)
31+
print(result.final_output)
32+
33+
# Run the `get_secret_word` tool
34+
message = "What's the secret word?"
35+
print(f"\n\nRunning: {message}")
36+
result = await Runner.run(starting_agent=agent, input=message)
37+
print(result.final_output)
38+
39+
40+
async def main():
41+
async with MCPServerStreamableHttp(
42+
name="Streamable HTTP Python Server",
43+
params={
44+
"url": "http://localhost:8000/mcp",
45+
},
46+
) as server:
47+
trace_id = gen_trace_id()
48+
with trace(workflow_name="Streamable HTTP Example", trace_id=trace_id):
49+
print(f"View trace: https://platform.openai.com/traces/trace?trace_id={trace_id}\n")
50+
await run(server)
51+
52+
53+
if __name__ == "__main__":
54+
# Let's make sure the user has uv installed
55+
if not shutil.which("uv"):
56+
raise RuntimeError(
57+
"uv is not installed. Please install it: https://docs.astral.sh/uv/getting-started/installation/"
58+
)
59+
60+
# We'll run the Streamable HTTP server in a subprocess. Usually this would be a remote server, but for this
61+
# demo, we'll run it locally at http://localhost:8000/mcp
62+
process: subprocess.Popen[Any] | None = None
63+
try:
64+
this_dir = os.path.dirname(os.path.abspath(__file__))
65+
server_file = os.path.join(this_dir, "server.py")
66+
67+
print("Starting Streamable HTTP server at http://localhost:8000/mcp ...")
68+
69+
# Run `uv run server.py` to start the Streamable HTTP server
70+
process = subprocess.Popen(["uv", "run", server_file])
71+
# Give it 3 seconds to start
72+
time.sleep(3)
73+
74+
print("Streamable HTTP server started. Running example...\n\n")
75+
except Exception as e:
76+
print(f"Error starting Streamable HTTP server: {e}")
77+
exit(1)
78+
79+
try:
80+
asyncio.run(main())
81+
finally:
82+
if process:
83+
process.terminate()
Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
import random
2+
3+
import requests
4+
from mcp.server.fastmcp import FastMCP
5+
6+
# Create server
7+
mcp = FastMCP("Echo Server")
8+
9+
10+
@mcp.tool()
11+
def add(a: int, b: int) -> int:
12+
"""Add two numbers"""
13+
print(f"[debug-server] add({a}, {b})")
14+
return a + b
15+
16+
17+
@mcp.tool()
18+
def get_secret_word() -> str:
19+
print("[debug-server] get_secret_word()")
20+
return random.choice(["apple", "banana", "cherry"])
21+
22+
23+
@mcp.tool()
24+
def get_current_weather(city: str) -> str:
25+
print(f"[debug-server] get_current_weather({city})")
26+
27+
endpoint = "https://wttr.in"
28+
response = requests.get(f"{endpoint}/{city}")
29+
return response.text
30+
31+
32+
if __name__ == "__main__":
33+
mcp.run(transport="streamable-http")

examples/research_bot/agents/search_agent.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33

44
INSTRUCTIONS = (
55
"You are a research assistant. Given a search term, you search the web for that term and "
6-
"produce a concise summary of the results. The summary must 2-3 paragraphs and less than 300 "
6+
"produce a concise summary of the results. The summary must be 2-3 paragraphs and less than 300 "
77
"words. Capture the main points. Write succinctly, no need to have complete sentences or good "
88
"grammar. This will be consumed by someone synthesizing a report, so its vital you capture the "
99
"essence and ignore any fluff. Do not include any additional commentary other than the summary "

pyproject.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "openai-agents"
3-
version = "0.0.14"
3+
version = "0.0.15"
44
description = "OpenAI Agents SDK"
55
readme = "README.md"
66
requires-python = ">=3.9"
@@ -13,7 +13,7 @@ dependencies = [
1313
"typing-extensions>=4.12.2, <5",
1414
"requests>=2.0, <3",
1515
"types-requests>=2.0, <3",
16-
"mcp>=1.6.0, <2; python_version >= '3.10'",
16+
"mcp>=1.8.0, <2; python_version >= '3.10'",
1717
]
1818
classifiers = [
1919
"Typing :: Typed",

src/agents/extensions/models/litellm_model.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -273,6 +273,8 @@ async def _fetch_response(
273273
extra_kwargs["extra_query"] = model_settings.extra_query
274274
if model_settings.metadata:
275275
extra_kwargs["metadata"] = model_settings.metadata
276+
if model_settings.extra_body and isinstance(model_settings.extra_body, dict):
277+
extra_kwargs.update(model_settings.extra_body)
276278

277279
ret = await litellm.acompletion(
278280
model=self.model,

src/agents/mcp/__init__.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,8 @@
55
MCPServerSseParams,
66
MCPServerStdio,
77
MCPServerStdioParams,
8+
MCPServerStreamableHttp,
9+
MCPServerStreamableHttpParams,
810
)
911
except ImportError:
1012
pass
@@ -17,5 +19,7 @@
1719
"MCPServerSseParams",
1820
"MCPServerStdio",
1921
"MCPServerStdioParams",
22+
"MCPServerStreamableHttp",
23+
"MCPServerStreamableHttpParams",
2024
"MCPUtil",
2125
]

0 commit comments

Comments
 (0)