fix(openai): allow temperature parameter for gpt-5-chat models (#32624)

This commit is contained in:
Alex Naidis 2025-08-21 22:40:10 +02:00 committed by GitHub
parent 61bc1bf9cc
commit 21f7a9a9e5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 25 additions and 4 deletions

View File

@ -716,7 +716,8 @@ class BaseChatOpenAI(BaseChatModel):
"""Validate temperature parameter for different models.
- o1 models only allow temperature=1
- gpt-5 models only allow temperature=1 or unset (defaults to 1)
- gpt-5 models (excluding gpt-5-chat) only allow temperature=1 or unset
(defaults to 1)
"""
model = values.get("model_name") or values.get("model") or ""
@ -725,10 +726,12 @@ class BaseChatOpenAI(BaseChatModel):
values["temperature"] = 1
# For gpt-5 models, handle temperature restrictions
if model.startswith("gpt-5"):
# Note that gpt-5-chat models do support temperature
if model.startswith("gpt-5") and "chat" not in model:
temperature = values.get("temperature")
if temperature is not None and temperature != 1:
# For gpt-5, only temperature=1 is supported, so remove non-defaults
# For gpt-5 (non-chat), only temperature=1 is supported
# So we remove any non-defaults
values.pop("temperature", None)
return values
@ -3520,7 +3523,7 @@ def _construct_responses_api_payload(
# Remove temperature parameter for models that don't support it in responses API
model = payload.get("model", "")
if model.startswith("gpt-5"):
if model.startswith("gpt-5") and "chat" not in model: # gpt-5-chat supports
payload.pop("temperature", None)
payload["input"] = _construct_responses_api_input(messages)

View File

@ -2678,3 +2678,21 @@ def test_extra_body_with_model_kwargs() -> None:
assert payload["extra_body"]["ttl"] == 600
assert payload["custom_non_openai_param"] == "test_value"
assert payload["temperature"] == 0.5
@pytest.mark.parametrize("use_responses_api", [False, True])
def test_gpt_5_temperature(use_responses_api: bool) -> None:
llm = ChatOpenAI(
model="gpt-5-nano", temperature=0.5, use_responses_api=use_responses_api
)
messages = [HumanMessage(content="Hello")]
payload = llm._get_request_payload(messages)
assert "temperature" not in payload # not supported for gpt-5 family models
llm = ChatOpenAI(
model="gpt-5-chat", temperature=0.5, use_responses_api=use_responses_api
)
messages = [HumanMessage(content="Hello")]
payload = llm._get_request_payload(messages)
assert payload["temperature"] == 0.5 # gpt-5-chat is exception