fix(openai): Fix Azure OpenAI Responses API model field issue (#32649)

This commit is contained in:
JunHyungKang
2025-09-08 23:08:35 +09:00
committed by GitHub
parent 5b0a55ad35
commit 6ea06ca972
4 changed files with 61 additions and 3 deletions

View File

@@ -754,6 +754,26 @@ class AzureChatOpenAI(BaseChatOpenAI):
return chat_result
def _get_request_payload(
self,
input_: LanguageModelInput,
*,
stop: Optional[list[str]] = None,
**kwargs: Any,
) -> dict:
"""Get the request payload, using deployment name for Azure Responses API."""
payload = super()._get_request_payload(input_, stop=stop, **kwargs)
# For Azure Responses API, use deployment name instead of model name
if (
self._use_responses_api(payload)
and not payload.get("model")
and self.deployment_name
):
payload["model"] = self.deployment_name
return payload
def _stream(self, *args: Any, **kwargs: Any) -> Iterator[ChatGenerationChunk]:
"""Route to Chat Completions or Responses API."""
if self._use_responses_api({**kwargs, **self.model_kwargs}):

View File

@@ -3541,7 +3541,7 @@ def _construct_responses_api_payload(
payload["reasoning"] = {"effort": payload.pop("reasoning_effort")}
# Remove temperature parameter for models that don't support it in responses API
model = payload.get("model", "")
model = payload.get("model") or ""
if model.startswith("gpt-5") and "chat" not in model: # gpt-5-chat supports
payload.pop("temperature", None)