Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 2 additions & 6 deletions src/agents/extensions/models/litellm_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
from ...logger import logger
from ...model_settings import ModelSettings
from ...models.chatcmpl_converter import Converter
from ...models.chatcmpl_helpers import HEADERS, USER_AGENT_OVERRIDE
from ...models.chatcmpl_helpers import HEADERS, HEADERS_OVERRIDE
from ...models.chatcmpl_stream_handler import ChatCmplStreamHandler
from ...models.fake_id import FAKE_RESPONSES_ID
from ...models.interface import Model, ModelTracing
Expand Down Expand Up @@ -385,11 +385,7 @@ def _remove_not_given(self, value: Any) -> Any:
return value

def _merge_headers(self, model_settings: ModelSettings):
merged = {**HEADERS, **(model_settings.extra_headers or {})}
ua_ctx = USER_AGENT_OVERRIDE.get()
if ua_ctx is not None:
merged["User-Agent"] = ua_ctx
return merged
return {**HEADERS, **(model_settings.extra_headers or {}), **(HEADERS_OVERRIDE.get() or {})}


class LitellmConverter:
Expand Down
4 changes: 2 additions & 2 deletions src/agents/models/chatcmpl_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@
_USER_AGENT = f"Agents/Python {__version__}"
HEADERS = {"User-Agent": _USER_AGENT}

USER_AGENT_OVERRIDE: ContextVar[str | None] = ContextVar(
"openai_chatcompletions_user_agent_override", default=None
HEADERS_OVERRIDE: ContextVar[dict[str, str] | None] = ContextVar(
"openai_chatcompletions_headers_override", default=None
)
Comment on lines 10 to 15

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

[P1] Preserve USER_AGENT_OVERRIDE for backward compatibility

The context variable used by external callers to override the chat-completions User-Agent was renamed from USER_AGENT_OVERRIDE to HEADERS_OVERRIDE with no alias left behind. Because the original name was public (no leading underscore) and tests imported it directly, any existing client code that still does from agents.models.chatcmpl_helpers import USER_AGENT_OVERRIDE will now raise ImportError/AttributeError and lose the ability to override headers. Consider keeping the old symbol as an alias (USER_AGENT_OVERRIDE = HEADERS_OVERRIDE) or deprecating it gradually to avoid an unnecessary breaking change.

Useful? React with 👍 / 👎.



Expand Down
12 changes: 6 additions & 6 deletions src/agents/models/openai_chatcompletions.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from ..usage import Usage
from ..util._json import _to_dump_compatible
from .chatcmpl_converter import Converter
from .chatcmpl_helpers import HEADERS, USER_AGENT_OVERRIDE, ChatCmplHelpers
from .chatcmpl_helpers import HEADERS, HEADERS_OVERRIDE, ChatCmplHelpers
from .chatcmpl_stream_handler import ChatCmplStreamHandler
from .fake_id import FAKE_RESPONSES_ID
from .interface import Model, ModelTracing
Expand Down Expand Up @@ -351,8 +351,8 @@ def _get_client(self) -> AsyncOpenAI:
return self._client

def _merge_headers(self, model_settings: ModelSettings):
merged = {**HEADERS, **(model_settings.extra_headers or {})}
ua_ctx = USER_AGENT_OVERRIDE.get()
if ua_ctx is not None:
merged["User-Agent"] = ua_ctx
return merged
return {
**HEADERS,
**(model_settings.extra_headers or {}),
**(HEADERS_OVERRIDE.get() or {}),
}
16 changes: 8 additions & 8 deletions src/agents/models/openai_responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,9 +50,9 @@
_USER_AGENT = f"Agents/Python {__version__}"
_HEADERS = {"User-Agent": _USER_AGENT}

# Override for the User-Agent header used by the Responses API.
_USER_AGENT_OVERRIDE: ContextVar[str | None] = ContextVar(
"openai_responses_user_agent_override", default=None
# Override headers used by the Responses API.
_HEADERS_OVERRIDE: ContextVar[dict[str, str] | None] = ContextVar(
"openai_responses_headers_override", default=None
)


Expand Down Expand Up @@ -334,11 +334,11 @@ def _get_client(self) -> AsyncOpenAI:
return self._client

def _merge_headers(self, model_settings: ModelSettings):
merged = {**_HEADERS, **(model_settings.extra_headers or {})}
ua_ctx = _USER_AGENT_OVERRIDE.get()
if ua_ctx is not None:
merged["User-Agent"] = ua_ctx
return merged
return {
**_HEADERS,
**(model_settings.extra_headers or {}),
**(_HEADERS_OVERRIDE.get() or {}),
}


@dataclass
Expand Down
6 changes: 3 additions & 3 deletions tests/models/test_litellm_user_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import pytest

from agents import ModelSettings, ModelTracing, __version__
from agents.models.chatcmpl_helpers import USER_AGENT_OVERRIDE
from agents.models.chatcmpl_helpers import HEADERS_OVERRIDE


@pytest.mark.allow_call_model_methods
Expand Down Expand Up @@ -65,7 +65,7 @@ async def acompletion(**kwargs):
model = LitellmModel(model="gpt-4")

if override_ua is not None:
token = USER_AGENT_OVERRIDE.set(override_ua)
token = HEADERS_OVERRIDE.set({"User-Agent": override_ua})
else:
token = None
try:
Expand All @@ -83,7 +83,7 @@ async def acompletion(**kwargs):
)
finally:
if token is not None:
USER_AGENT_OVERRIDE.reset(token)
HEADERS_OVERRIDE.reset(token)

assert "extra_headers" in called_kwargs
assert called_kwargs["extra_headers"]["User-Agent"] == expected_ua
6 changes: 3 additions & 3 deletions tests/test_openai_chatcompletions.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
__version__,
generation_span,
)
from agents.models.chatcmpl_helpers import USER_AGENT_OVERRIDE, ChatCmplHelpers
from agents.models.chatcmpl_helpers import HEADERS_OVERRIDE, ChatCmplHelpers
from agents.models.fake_id import FAKE_RESPONSES_ID


Expand Down Expand Up @@ -402,7 +402,7 @@ def __init__(self):
model = OpenAIChatCompletionsModel(model="gpt-4", openai_client=DummyChatClient()) # type: ignore

if override_ua is not None:
token = USER_AGENT_OVERRIDE.set(override_ua)
token = HEADERS_OVERRIDE.set({"User-Agent": override_ua})
else:
token = None

Expand All @@ -420,7 +420,7 @@ def __init__(self):
)
finally:
if token is not None:
USER_AGENT_OVERRIDE.reset(token)
HEADERS_OVERRIDE.reset(token)

assert "extra_headers" in called_kwargs
assert called_kwargs["extra_headers"]["User-Agent"] == expected_ua
Expand Down
6 changes: 3 additions & 3 deletions tests/test_openai_responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from openai.types.responses import ResponseCompletedEvent

from agents import ModelSettings, ModelTracing, __version__
from agents.models.openai_responses import _USER_AGENT_OVERRIDE as RESP_UA, OpenAIResponsesModel
from agents.models.openai_responses import _HEADERS_OVERRIDE as RESP_HEADERS, OpenAIResponsesModel
from tests.fake_model import get_response_obj


Expand Down Expand Up @@ -41,7 +41,7 @@ def __init__(self):
model = OpenAIResponsesModel(model="gpt-4", openai_client=DummyResponsesClient()) # type: ignore

if override_ua is not None:
token = RESP_UA.set(override_ua)
token = RESP_HEADERS.set({"User-Agent": override_ua})
else:
token = None

Expand All @@ -59,7 +59,7 @@ def __init__(self):
pass
finally:
if token is not None:
RESP_UA.reset(token)
RESP_HEADERS.reset(token)

assert "extra_headers" in called_kwargs
assert called_kwargs["extra_headers"]["User-Agent"] == expected_ua