mirror of
https://github.com/hwchase17/langchain.git
synced 2025-08-24 12:00:52 +00:00
fix(openai): allow temperature parameter for gpt-5-chat models (#32624)
This commit is contained in:
parent
61bc1bf9cc
commit
21f7a9a9e5
@ -716,7 +716,8 @@ class BaseChatOpenAI(BaseChatModel):
|
|||||||
"""Validate temperature parameter for different models.
|
"""Validate temperature parameter for different models.
|
||||||
|
|
||||||
- o1 models only allow temperature=1
|
- o1 models only allow temperature=1
|
||||||
- gpt-5 models only allow temperature=1 or unset (defaults to 1)
|
- gpt-5 models (excluding gpt-5-chat) only allow temperature=1 or unset
|
||||||
|
(defaults to 1)
|
||||||
"""
|
"""
|
||||||
model = values.get("model_name") or values.get("model") or ""
|
model = values.get("model_name") or values.get("model") or ""
|
||||||
|
|
||||||
@ -725,10 +726,12 @@ class BaseChatOpenAI(BaseChatModel):
|
|||||||
values["temperature"] = 1
|
values["temperature"] = 1
|
||||||
|
|
||||||
# For gpt-5 models, handle temperature restrictions
|
# For gpt-5 models, handle temperature restrictions
|
||||||
if model.startswith("gpt-5"):
|
# Note that gpt-5-chat models do support temperature
|
||||||
|
if model.startswith("gpt-5") and "chat" not in model:
|
||||||
temperature = values.get("temperature")
|
temperature = values.get("temperature")
|
||||||
if temperature is not None and temperature != 1:
|
if temperature is not None and temperature != 1:
|
||||||
# For gpt-5, only temperature=1 is supported, so remove non-defaults
|
# For gpt-5 (non-chat), only temperature=1 is supported
|
||||||
|
# So we remove any non-defaults
|
||||||
values.pop("temperature", None)
|
values.pop("temperature", None)
|
||||||
|
|
||||||
return values
|
return values
|
||||||
@ -3520,7 +3523,7 @@ def _construct_responses_api_payload(
|
|||||||
|
|
||||||
# Remove temperature parameter for models that don't support it in responses API
|
# Remove temperature parameter for models that don't support it in responses API
|
||||||
model = payload.get("model", "")
|
model = payload.get("model", "")
|
||||||
if model.startswith("gpt-5"):
|
if model.startswith("gpt-5") and "chat" not in model: # gpt-5-chat supports
|
||||||
payload.pop("temperature", None)
|
payload.pop("temperature", None)
|
||||||
|
|
||||||
payload["input"] = _construct_responses_api_input(messages)
|
payload["input"] = _construct_responses_api_input(messages)
|
||||||
|
@ -2678,3 +2678,21 @@ def test_extra_body_with_model_kwargs() -> None:
|
|||||||
assert payload["extra_body"]["ttl"] == 600
|
assert payload["extra_body"]["ttl"] == 600
|
||||||
assert payload["custom_non_openai_param"] == "test_value"
|
assert payload["custom_non_openai_param"] == "test_value"
|
||||||
assert payload["temperature"] == 0.5
|
assert payload["temperature"] == 0.5
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("use_responses_api", [False, True])
|
||||||
|
def test_gpt_5_temperature(use_responses_api: bool) -> None:
|
||||||
|
llm = ChatOpenAI(
|
||||||
|
model="gpt-5-nano", temperature=0.5, use_responses_api=use_responses_api
|
||||||
|
)
|
||||||
|
|
||||||
|
messages = [HumanMessage(content="Hello")]
|
||||||
|
payload = llm._get_request_payload(messages)
|
||||||
|
assert "temperature" not in payload # not supported for gpt-5 family models
|
||||||
|
|
||||||
|
llm = ChatOpenAI(
|
||||||
|
model="gpt-5-chat", temperature=0.5, use_responses_api=use_responses_api
|
||||||
|
)
|
||||||
|
messages = [HumanMessage(content="Hello")]
|
||||||
|
payload = llm._get_request_payload(messages)
|
||||||
|
assert payload["temperature"] == 0.5 # gpt-5-chat is exception
|
||||||
|
Loading…
Reference in New Issue
Block a user