diff --git a/libs/partners/openai/langchain_openai/chat_models/base.py b/libs/partners/openai/langchain_openai/chat_models/base.py index 0dae21a19c3d4..d42eb2540cd49 100644 --- a/libs/partners/openai/langchain_openai/chat_models/base.py +++ b/libs/partners/openai/langchain_openai/chat_models/base.py @@ -811,14 +811,15 @@ def validate_temperature(cls, values: dict[str, Any]) -> Any: (Defaults to 1) """ model = values.get("model_name") or values.get("model") or "" + model_lower = model.lower() # For o1 models, set temperature=1 if not provided - if model.startswith("o1") and "temperature" not in values: + if model_lower.startswith("o1") and "temperature" not in values: values["temperature"] = 1 # For gpt-5 models, handle temperature restrictions # Note that gpt-5-chat models do support temperature - if model.startswith("gpt-5") and "chat" not in model: + if model_lower.startswith("gpt-5") and "chat" not in model_lower: temperature = values.get("temperature") if temperature is not None and temperature != 1: # For gpt-5 (non-chat), only temperature=1 is supported @@ -1649,12 +1650,9 @@ def _get_encoding_model(self) -> tuple[str, tiktoken.Encoding]: try: encoding = tiktoken.encoding_for_model(model) except KeyError: + model_lower = self.model_name.lower() encoder = "cl100k_base" - if ( - self.model_name.startswith("gpt-4o") - or self.model_name.startswith("gpt-4.1") - or self.model_name.startswith("gpt-5") - ): + if model_lower.startswith(("gpt-4o", "gpt-4.1", "gpt-5")): encoder = "o200k_base" encoding = tiktoken.get_encoding(encoder) return model, encoding diff --git a/libs/partners/openai/tests/unit_tests/chat_models/test_base.py b/libs/partners/openai/tests/unit_tests/chat_models/test_base.py index 9dd15931c4d72..382c902cf0899 100644 --- a/libs/partners/openai/tests/unit_tests/chat_models/test_base.py +++ b/libs/partners/openai/tests/unit_tests/chat_models/test_base.py @@ -3002,3 +3002,33 @@ def test_gpt_5_temperature(use_responses_api: bool) -> None: messages = [HumanMessage(content="Hello")] payload = llm._get_request_payload(messages) assert payload["temperature"] == 0.5 # gpt-5-chat is exception + + +@pytest.mark.parametrize("use_responses_api", [False, True]) +@pytest.mark.parametrize( + "model_name", + [ + "GPT-5-NANO", + "GPT-5-2025-01-01", + "Gpt-5-Turbo", + "gPt-5-mini", + ], +) +def test_gpt_5_temperature_case_insensitive( + use_responses_api: bool, model_name: str +) -> None: + llm = ChatOpenAI( + model=model_name, temperature=0.5, use_responses_api=use_responses_api + ) + + messages = [HumanMessage(content="Hello")] + payload = llm._get_request_payload(messages) + assert "temperature" not in payload + + for chat_model in ["GPT-5-CHAT", "Gpt-5-Chat", "gpt-5-chat"]: + llm = ChatOpenAI( + model=chat_model, temperature=0.7, use_responses_api=use_responses_api + ) + messages = [HumanMessage(content="Hello")] + payload = llm._get_request_payload(messages) + assert payload["temperature"] == 0.7