diff --git a/libs/partners/openai/langchain_openai/chat_models/base.py b/libs/partners/openai/langchain_openai/chat_models/base.py index 082a167c97d..fefa81ea420 100644 --- a/libs/partners/openai/langchain_openai/chat_models/base.py +++ b/libs/partners/openai/langchain_openai/chat_models/base.py @@ -716,7 +716,8 @@ class BaseChatOpenAI(BaseChatModel): """Validate temperature parameter for different models. - o1 models only allow temperature=1 - - gpt-5 models only allow temperature=1 or unset (defaults to 1) + - gpt-5 models (excluding gpt-5-chat) only allow temperature=1 or unset + (defaults to 1) """ model = values.get("model_name") or values.get("model") or "" @@ -725,10 +726,12 @@ class BaseChatOpenAI(BaseChatModel): values["temperature"] = 1 # For gpt-5 models, handle temperature restrictions - if model.startswith("gpt-5"): + # Note that gpt-5-chat models do support temperature + if model.startswith("gpt-5") and "chat" not in model: temperature = values.get("temperature") if temperature is not None and temperature != 1: - # For gpt-5, only temperature=1 is supported, so remove non-defaults + # For gpt-5 (non-chat), only temperature=1 is supported + # So we remove any non-defaults values.pop("temperature", None) return values @@ -3520,7 +3523,7 @@ def _construct_responses_api_payload( # Remove temperature parameter for models that don't support it in responses API model = payload.get("model", "") - if model.startswith("gpt-5"): + if model.startswith("gpt-5") and "chat" not in model: # gpt-5-chat supports payload.pop("temperature", None) payload["input"] = _construct_responses_api_input(messages) diff --git a/libs/partners/openai/tests/unit_tests/chat_models/test_base.py b/libs/partners/openai/tests/unit_tests/chat_models/test_base.py index 761f2c6f101..68caee5d63f 100644 --- a/libs/partners/openai/tests/unit_tests/chat_models/test_base.py +++ b/libs/partners/openai/tests/unit_tests/chat_models/test_base.py @@ -2678,3 +2678,21 @@ def test_extra_body_with_model_kwargs() -> None: assert payload["extra_body"]["ttl"] == 600 assert payload["custom_non_openai_param"] == "test_value" assert payload["temperature"] == 0.5 + + +@pytest.mark.parametrize("use_responses_api", [False, True]) +def test_gpt_5_temperature(use_responses_api: bool) -> None: + llm = ChatOpenAI( + model="gpt-5-nano", temperature=0.5, use_responses_api=use_responses_api + ) + + messages = [HumanMessage(content="Hello")] + payload = llm._get_request_payload(messages) + assert "temperature" not in payload # not supported for gpt-5 family models + + llm = ChatOpenAI( + model="gpt-5-chat", temperature=0.5, use_responses_api=use_responses_api + ) + messages = [HumanMessage(content="Hello")] + payload = llm._get_request_payload(messages) + assert payload["temperature"] == 0.5 # gpt-5-chat is exception