diff --git a/pydantic_ai_slim/pydantic_ai/models/__init__.py b/pydantic_ai_slim/pydantic_ai/models/__init__.py index f24f836d0..839df22af 100644 --- a/pydantic_ai_slim/pydantic_ai/models/__init__.py +++ b/pydantic_ai_slim/pydantic_ai/models/__init__.py @@ -194,6 +194,13 @@ 'gpt-4o-mini-search-preview-2025-03-11', 'gpt-4o-search-preview', 'gpt-4o-search-preview-2025-03-11', + 'gpt-5', + 'gpt-5-2025-08-07', + 'gpt-5-chat-latest', + 'gpt-5-mini', + 'gpt-5-mini-2025-08-07', + 'gpt-5-nano', + 'gpt-5-nano-2025-08-07', 'grok:grok-4', 'grok:grok-4-0709', 'grok:grok-3', @@ -313,11 +320,18 @@ 'openai:gpt-4o-mini-search-preview-2025-03-11', 'openai:gpt-4o-search-preview', 'openai:gpt-4o-search-preview-2025-03-11', + 'openai:gpt-5', + 'openai:gpt-5-2025-08-07', 'openai:o1', + 'openai:gpt-5-chat-latest', 'openai:o1-2024-12-17', + 'openai:gpt-5-mini', 'openai:o1-mini', + 'openai:gpt-5-mini-2025-08-07', 'openai:o1-mini-2024-09-12', + 'openai:gpt-5-nano', 'openai:o1-preview', + 'openai:gpt-5-nano-2025-08-07', 'openai:o1-preview-2024-09-12', 'openai:o1-pro', 'openai:o1-pro-2025-03-19', diff --git a/pydantic_ai_slim/pydantic_ai/models/openai.py b/pydantic_ai_slim/pydantic_ai/models/openai.py index f15e98b2d..37e400962 100644 --- a/pydantic_ai_slim/pydantic_ai/models/openai.py +++ b/pydantic_ai_slim/pydantic_ai/models/openai.py @@ -59,6 +59,10 @@ from openai.types.chat.chat_completion_content_part_image_param import ImageURL from openai.types.chat.chat_completion_content_part_input_audio_param import InputAudio from openai.types.chat.chat_completion_content_part_param import File, FileFile + from openai.types.chat.chat_completion_message_function_tool_call import ChatCompletionMessageFunctionToolCall + from openai.types.chat.chat_completion_message_function_tool_call_param import ( + ChatCompletionMessageFunctionToolCallParam, + ) from openai.types.chat.chat_completion_prediction_content_param import ChatCompletionPredictionContentParam from openai.types.chat.completion_create_params import ( WebSearchOptions, @@ -416,6 +420,7 @@ def _process_response(self, response: chat.ChatCompletion | str) -> ModelRespons items.extend(split_content_into_text_and_thinking(choice.message.content, self.profile.thinking_tags)) if choice.message.tool_calls is not None: for c in choice.message.tool_calls: + c = cast(ChatCompletionMessageFunctionToolCall, c) part = ToolCallPart(c.function.name, c.function.arguments, tool_call_id=c.id) part.tool_call_id = _guard_tool_call_id(part) items.append(part) @@ -474,7 +479,7 @@ async def _map_messages(self, messages: list[ModelMessage]) -> list[chat.ChatCom openai_messages.append(item) elif isinstance(message, ModelResponse): texts: list[str] = [] - tool_calls: list[chat.ChatCompletionMessageToolCallParam] = [] + tool_calls: list[ChatCompletionMessageFunctionToolCallParam] = [] for item in message.parts: if isinstance(item, TextPart): texts.append(item.content) @@ -505,8 +510,8 @@ async def _map_messages(self, messages: list[ModelMessage]) -> list[chat.ChatCom return openai_messages @staticmethod - def _map_tool_call(t: ToolCallPart) -> chat.ChatCompletionMessageToolCallParam: - return chat.ChatCompletionMessageToolCallParam( + def _map_tool_call(t: ToolCallPart) -> ChatCompletionMessageFunctionToolCallParam: + return ChatCompletionMessageFunctionToolCallParam( id=_guard_tool_call_id(t=t), type='function', function={'name': t.tool_name, 'arguments': t.args_as_json_str()}, diff --git a/pydantic_ai_slim/pyproject.toml b/pydantic_ai_slim/pyproject.toml index 47c758af5..d5e23bf8e 100644 --- a/pydantic_ai_slim/pyproject.toml +++ b/pydantic_ai_slim/pyproject.toml @@ -62,7 +62,7 @@ dependencies = [ # WARNING if you add optional groups, please update docs/install.md logfire = ["logfire>=3.11.0"] # Models -openai = ["openai>=1.92.0"] +openai = ["openai>=1.99.3"] cohere = ["cohere>=5.16.0; platform_system != 'Emscripten'"] vertexai = ["google-auth>=2.36.0", "requests>=2.32.2"] google = ["google-genai>=1.28.0"] diff --git a/tests/models/cassettes/test_model_names/test_known_model_names.yaml b/tests/models/cassettes/test_model_names/test_known_model_names.yaml index e66c3747b..0f9062d7d 100644 --- a/tests/models/cassettes/test_model_names/test_known_model_names.yaml +++ b/tests/models/cassettes/test_model_names/test_known_model_names.yaml @@ -14,12 +14,18 @@ interactions: uri: https://us.inference.heroku.com/available-models response: headers: + cache-control: + - no-cache, no-store, must-revalidate content-length: - '760' content-security-policy: - default-src 'none'; frame-ancestors 'none' content-type: - application/json + expires: + - '0' + pragma: + - no-cache strict-transport-security: - max-age=63072000 parsed_body: diff --git a/tests/models/test_openai.py b/tests/models/test_openai.py index d59f2063a..0f1d26cca 100644 --- a/tests/models/test_openai.py +++ b/tests/models/test_openai.py @@ -60,6 +60,7 @@ ChoiceDeltaToolCallFunction, ) from openai.types.chat.chat_completion_message import ChatCompletionMessage + from openai.types.chat.chat_completion_message_function_tool_call import ChatCompletionMessageFunctionToolCall from openai.types.chat.chat_completion_message_tool_call import Function from openai.types.chat.chat_completion_token_logprob import ChatCompletionTokenLogprob from openai.types.completion_usage import CompletionUsage, PromptTokensDetails @@ -241,7 +242,7 @@ async def test_request_structured_response(allow_model_requests: None): content=None, role='assistant', tool_calls=[ - chat.ChatCompletionMessageToolCall( + ChatCompletionMessageFunctionToolCall( id='123', function=Function(arguments='{"response": [1, 2, 123]}', name='final_result'), type='function', @@ -292,7 +293,7 @@ async def test_request_tool_call(allow_model_requests: None): content=None, role='assistant', tool_calls=[ - chat.ChatCompletionMessageToolCall( + ChatCompletionMessageFunctionToolCall( id='1', function=Function(arguments='{"loc_name": "San Fransisco"}', name='get_location'), type='function', @@ -311,7 +312,7 @@ async def test_request_tool_call(allow_model_requests: None): content=None, role='assistant', tool_calls=[ - chat.ChatCompletionMessageToolCall( + ChatCompletionMessageFunctionToolCall( id='2', function=Function(arguments='{"loc_name": "London"}', name='get_location'), type='function', @@ -722,7 +723,7 @@ async def test_parallel_tool_calls(allow_model_requests: None, parallel_tool_cal content=None, role='assistant', tool_calls=[ - chat.ChatCompletionMessageToolCall( + ChatCompletionMessageFunctionToolCall( id='123', function=Function(arguments='{"response": [1, 2, 3]}', name='final_result'), type='function', diff --git a/uv.lock b/uv.lock index 8bc9238ef..a94a6fc87 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.9" resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -2578,7 +2578,7 @@ wheels = [ [[package]] name = "openai" -version = "1.97.0" +version = "1.99.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -2590,9 +2590,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e0/c6/b8d66e4f3b95493a8957065b24533333c927dc23817abe397f13fe589c6e/openai-1.97.0.tar.gz", hash = "sha256:0be349569ccaa4fb54f97bb808423fd29ccaeb1246ee1be762e0c81a47bae0aa", size = 493850, upload-time = "2025-07-16T16:37:35.196Z" } +sdist = { url = "https://files.pythonhosted.org/packages/11/45/38a87bd6949236db5ae3132f41d5861824702b149f86d2627d6900919103/openai-1.99.6.tar.gz", hash = "sha256:f48f4239b938ef187062f3d5199a05b69711d8b600b9a9b6a3853cd271799183", size = 505364, upload-time = "2025-08-09T15:20:54.438Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/91/1f1cf577f745e956b276a8b1d3d76fa7a6ee0c2b05db3b001b900f2c71db/openai-1.97.0-py3-none-any.whl", hash = "sha256:a1c24d96f4609f3f7f51c9e1c2606d97cc6e334833438659cfd687e9c972c610", size = 764953, upload-time = "2025-07-16T16:37:33.135Z" }, + { url = "https://files.pythonhosted.org/packages/d6/dd/9aa956485c2856346b3181542fbb0aea4e5b457fa7a523944726746da8da/openai-1.99.6-py3-none-any.whl", hash = "sha256:e40d44b2989588c45ce13819598788b77b8fb80ba2f7ae95ce90d14e46f1bd26", size = 786296, upload-time = "2025-08-09T15:20:51.95Z" }, ] [[package]] @@ -3512,7 +3512,7 @@ requires-dist = [ { name = "logfire", marker = "extra == 'logfire'", specifier = ">=3.11.0" }, { name = "mcp", marker = "python_full_version >= '3.10' and extra == 'mcp'", specifier = ">=1.10.0" }, { name = "mistralai", marker = "extra == 'mistral'", specifier = ">=1.9.2" }, - { name = "openai", marker = "extra == 'openai'", specifier = ">=1.92.0" }, + { name = "openai", marker = "extra == 'openai'", specifier = ">=1.99.3" }, { name = "opentelemetry-api", specifier = ">=1.28.0" }, { name = "prompt-toolkit", marker = "extra == 'cli'", specifier = ">=3" }, { name = "pydantic", specifier = ">=2.10" },