From eec4df0d0a4a9387aa9bbdb456d951b3004b2b3c Mon Sep 17 00:00:00 2001 From: Bagatur Date: Tue, 26 Nov 2024 15:55:47 -0800 Subject: [PATCH] openai[patch]: azure max completion tokens fix --- .../openai/langchain_openai/chat_models/azure.py | 12 ++++++++++++ .../integration_tests/chat_models/test_azure.py | 10 ++++++++++ 2 files changed, 22 insertions(+) diff --git a/libs/partners/openai/langchain_openai/chat_models/azure.py b/libs/partners/openai/langchain_openai/chat_models/azure.py index 2e1e5f8abfe..0805dfad91e 100644 --- a/libs/partners/openai/langchain_openai/chat_models/azure.py +++ b/libs/partners/openai/langchain_openai/chat_models/azure.py @@ -737,3 +737,15 @@ class AzureChatOpenAI(BaseChatOpenAI): ) return chat_result + + @property + def _default_params(self) -> Dict[str, Any]: + """Get the default parameters for calling OpenAI API.""" + params = super()._default_params + if ( + "o1" in params["model"] + and "max_tokens" in params["model"] + and "max_completion_tokens" not in params["model"] + ): + params["max_completion_tokens"] = params.pop("max_tokens") + return params diff --git a/libs/partners/openai/tests/integration_tests/chat_models/test_azure.py b/libs/partners/openai/tests/integration_tests/chat_models/test_azure.py index 4ed531ad119..153152b144f 100644 --- a/libs/partners/openai/tests/integration_tests/chat_models/test_azure.py +++ b/libs/partners/openai/tests/integration_tests/chat_models/test_azure.py @@ -262,3 +262,13 @@ async def test_json_mode_async(llm: AzureChatOpenAI) -> None: assert isinstance(full, AIMessageChunk) assert isinstance(full.content, str) assert json.loads(full.content) == {"a": 1} + + +def test_o1_max_tokens() -> None: + response = ChatOpenAI(model="o1-mini", max_tokens=10).invoke("how are you") # type: ignore[call-arg] + assert isinstance(response, AIMessage) + + response = ChatOpenAI(model="gpt-4o", max_completion_tokens=10).invoke( + "how are you" + ) + assert isinstance(response, AIMessage)