Skip to content

Add first-class verbosity support for GPT-5 #1403

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions src/agents/handoffs.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,9 +119,9 @@ class Handoff(Generic[TContext, TAgent]):
True, as it increases the likelihood of correct JSON input.
"""

is_enabled: bool | Callable[
[RunContextWrapper[Any], AgentBase[Any]], MaybeAwaitable[bool]
] = True
is_enabled: bool | Callable[[RunContextWrapper[Any], AgentBase[Any]], MaybeAwaitable[bool]] = (
True
)
"""Whether the handoff is enabled. Either a bool or a Callable that takes the run context and
agent and returns whether the handoff is enabled. You can use this to dynamically enable/disable
a handoff based on your context/state."""
Expand Down
7 changes: 7 additions & 0 deletions src/agents/model_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,13 @@ class ModelSettings:
"""Additional output data to include in the model response.
[include parameter](https://platform.openai.com/docs/api-reference/responses/create#responses-create-include)"""

verbosity: Literal["low", "medium", "high"] | None = None
"""Controls response verbosity for supported models.
In Responses API this is sent as `text.verbosity`;
in Chat Completions it is top-level `verbosity`.
Values: "low", "medium", "high". Defaults to provider/model behavior if not set.
"""

extra_query: Query | None = None
"""Additional query fields to provide with the request.
Defaults to None if not provided."""
Expand Down
15 changes: 14 additions & 1 deletion src/agents/models/openai_chatcompletions.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,6 +271,19 @@ async def _fetch_response(
self._get_client(), model_settings, stream=stream
)

# Carry verbosity for Chat Completions using extra_body until
# official client types include it as a top-level param.
from typing import Any, cast
base_extra_body = cast(dict[str, Any], model_settings.extra_body or {})
extra_body = {
**base_extra_body,
**(
{"verbosity": model_settings.verbosity}
if model_settings.verbosity is not None
else {}
),
}

ret = await self._get_client().chat.completions.create(
model=self.model,
messages=converted_messages,
Expand All @@ -289,7 +302,7 @@ async def _fetch_response(
reasoning_effort=self._non_null_or_not_given(reasoning_effort),
extra_headers={**HEADERS, **(model_settings.extra_headers or {})},
extra_query=model_settings.extra_query,
extra_body=model_settings.extra_body,
extra_body=extra_body,
metadata=self._non_null_or_not_given(model_settings.metadata),
**(model_settings.extra_args or {}),
)
Expand Down
16 changes: 15 additions & 1 deletion src/agents/models/openai_responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,6 +247,20 @@ async def _fetch_response(
converted_tools = Converter.convert_tools(tools, handoffs)
response_format = Converter.get_response_format(output_schema)

# Merge verbosity into the `text` param alongside any response format.
# Responses API expects verbosity under the `text` object.
if model_settings.verbosity is not None:
if response_format is NOT_GIVEN:
text_param: ResponseTextConfigParam | Any = {"verbosity": model_settings.verbosity}
else:
# response_format is a dict; augment it without mutating the original
from typing import cast

rf = cast(ResponseTextConfigParam, response_format)
text_param = {**rf, "verbosity": model_settings.verbosity}
else:
text_param = response_format

include: list[ResponseIncludable] = converted_tools.includes
if model_settings.response_include is not None:
include = list({*include, *model_settings.response_include})
Expand Down Expand Up @@ -282,7 +296,7 @@ async def _fetch_response(
extra_headers={**_HEADERS, **(model_settings.extra_headers or {})},
extra_query=model_settings.extra_query,
extra_body=model_settings.extra_body,
text=response_format,
text=text_param,
store=self._non_null_or_not_given(model_settings.store),
reasoning=self._non_null_or_not_given(model_settings.reasoning),
metadata=self._non_null_or_not_given(model_settings.metadata),
Expand Down
1 change: 0 additions & 1 deletion src/agents/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -1034,7 +1034,6 @@ async def _get_single_step_result_from_streamed_response(
run_config: RunConfig,
tool_use_tracker: AgentToolUseTracker,
) -> SingleStepResult:

original_input = streamed_result.input
pre_step_items = streamed_result.new_items
event_queue = streamed_result._event_queue
Expand Down
4 changes: 2 additions & 2 deletions src/agents/tracing/processors.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,8 +70,8 @@ def set_api_key(self, api_key: str):
client.
"""
# Clear the cached property if it exists
if 'api_key' in self.__dict__:
del self.__dict__['api_key']
if "api_key" in self.__dict__:
del self.__dict__["api_key"]

# Update the private attribute
self._api_key = api_key
Expand Down
5 changes: 2 additions & 3 deletions tests/test_agent_clone_shallow_copy.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
def greet(name: str) -> str:
return f"Hello, {name}!"


def test_agent_clone_shallow_copy():
"""Test that clone creates shallow copy with tools.copy() workaround"""
target_agent = Agent(name="Target")
Expand All @@ -16,9 +17,7 @@ def test_agent_clone_shallow_copy():
)

cloned = original.clone(
name="Cloned",
tools=original.tools.copy(),
handoffs=original.handoffs.copy()
name="Cloned", tools=original.tools.copy(), handoffs=original.handoffs.copy()
)

# Basic assertions
Expand Down
1 change: 1 addition & 0 deletions tests/test_stream_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ async def foo() -> str:
await asyncio.sleep(3)
return "success!"


@pytest.mark.asyncio
async def test_stream_events_main():
model = FakeModel()
Expand Down
Loading