From 85e36b0f50d8e01adf8801afa8bcb89d5013bca4 Mon Sep 17 00:00:00 2001 From: Krista Pratico Date: Thu, 27 Jun 2024 09:23:05 -0700 Subject: [PATCH] partners[openai]: only add stream_options to kwargs if requested (#23552) - **Description:** This PR https://github.com/langchain-ai/langchain/pull/22854 added the ability to pass `stream_options` through to the openai service to get token usage information in the response. Currently OpenAI supports this parameter, but Azure OpenAI does not yet. For users who proxy their calls to both services through ChatOpenAI, this breaks when targeting Azure OpenAI (see related discussion opened in openai-python: https://github.com/openai/openai-python/issues/1469#issuecomment-2192658630). > Error code: 400 - {'error': {'code': None, 'message': 'Unrecognized request argument supplied: stream_options', 'param': None, 'type': 'invalid_request_error'}} This PR fixes the issue by only adding `stream_options` to the request if it's actually requested by the user (i.e. set to True). If I'm not mistaken, we have a test case that already covers this scenario: https://github.com/langchain-ai/langchain/blob/master/libs/partners/openai/tests/integration_tests/chat_models/test_base.py#L398-L399 - **Issue:** Issue opened in openai-python: https://github.com/openai/openai-python/issues/1469 - **Dependencies:** N/A - **Twitter handle:** N/A --------- Co-authored-by: Chester Curme --- .../openai/langchain_openai/chat_models/base.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/libs/partners/openai/langchain_openai/chat_models/base.py b/libs/partners/openai/langchain_openai/chat_models/base.py index 1c50e80b902..b9c758024ab 100644 --- a/libs/partners/openai/langchain_openai/chat_models/base.py +++ b/libs/partners/openai/langchain_openai/chat_models/base.py @@ -1672,7 +1672,13 @@ class ChatOpenAI(BaseChatOpenAI): ) -> Iterator[ChatGenerationChunk]: """Set default stream_options.""" stream_usage = self._should_stream_usage(stream_usage, **kwargs) - kwargs["stream_options"] = {"include_usage": stream_usage} + # Note: stream_options is not a valid parameter for Azure OpenAI. + # To support users proxying Azure through ChatOpenAI, here we only specify + # stream_options if include_usage is set to True. + # See https://learn.microsoft.com/en-us/azure/ai-services/openai/whats-new + # for release notes. + if stream_usage: + kwargs["stream_options"] = {"include_usage": stream_usage} return super()._stream(*args, **kwargs) @@ -1681,7 +1687,8 @@ class ChatOpenAI(BaseChatOpenAI): ) -> AsyncIterator[ChatGenerationChunk]: """Set default stream_options.""" stream_usage = self._should_stream_usage(stream_usage, **kwargs) - kwargs["stream_options"] = {"include_usage": stream_usage} + if stream_usage: + kwargs["stream_options"] = {"include_usage": stream_usage} async for chunk in super()._astream(*args, **kwargs): yield chunk