From 659817834369799ff7dfba62d79ae6b9e7e8ec7e Mon Sep 17 00:00:00 2001 From: Bagatur <22008038+baskaryan@users.noreply.github.com> Date: Mon, 11 Sep 2023 18:05:24 -0700 Subject: [PATCH] Chat model stream readability nit (#10469) --- libs/langchain/langchain/chat_models/konko.py | 3 ++- libs/langchain/langchain/chat_models/litellm.py | 6 ++++-- libs/langchain/langchain/chat_models/openai.py | 6 ++++-- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/libs/langchain/langchain/chat_models/konko.py b/libs/langchain/langchain/chat_models/konko.py index b7b9bc65810..e27ee42057d 100644 --- a/libs/langchain/langchain/chat_models/konko.py +++ b/libs/langchain/langchain/chat_models/konko.py @@ -222,7 +222,8 @@ class ChatKonko(ChatOpenAI): stream: Optional[bool] = None, **kwargs: Any, ) -> ChatResult: - if stream if stream is not None else self.streaming: + should_stream = stream if stream is not None else self.streaming + if should_stream: generation: Optional[ChatGenerationChunk] = None for chunk in self._stream( messages=messages, stop=stop, run_manager=run_manager, **kwargs diff --git a/libs/langchain/langchain/chat_models/litellm.py b/libs/langchain/langchain/chat_models/litellm.py index 9d263872fff..f9ecf67073a 100644 --- a/libs/langchain/langchain/chat_models/litellm.py +++ b/libs/langchain/langchain/chat_models/litellm.py @@ -318,7 +318,8 @@ class ChatLiteLLM(BaseChatModel): stream: Optional[bool] = None, **kwargs: Any, ) -> ChatResult: - if stream if stream is not None else self.streaming: + should_stream = stream if stream is not None else self.streaming + if should_stream: generation: Optional[ChatGenerationChunk] = None for chunk in self._stream( messages=messages, stop=stop, run_manager=run_manager, **kwargs @@ -418,7 +419,8 @@ class ChatLiteLLM(BaseChatModel): stream: Optional[bool] = None, **kwargs: Any, ) -> ChatResult: - if stream if stream is not None else self.streaming: + should_stream = stream if stream is not None else self.streaming + if should_stream: generation: Optional[ChatGenerationChunk] = None async for chunk in self._astream( messages=messages, stop=stop, run_manager=run_manager, **kwargs diff --git a/libs/langchain/langchain/chat_models/openai.py b/libs/langchain/langchain/chat_models/openai.py index 5d944852d66..47f29eaf2ae 100644 --- a/libs/langchain/langchain/chat_models/openai.py +++ b/libs/langchain/langchain/chat_models/openai.py @@ -328,7 +328,8 @@ class ChatOpenAI(BaseChatModel): stream: Optional[bool] = None, **kwargs: Any, ) -> ChatResult: - if stream if stream is not None else self.streaming: + should_stream = stream if stream is not None else self.streaming + if should_stream: generation: Optional[ChatGenerationChunk] = None for chunk in self._stream( messages=messages, stop=stop, run_manager=run_manager, **kwargs @@ -408,7 +409,8 @@ class ChatOpenAI(BaseChatModel): stream: Optional[bool] = None, **kwargs: Any, ) -> ChatResult: - if stream if stream is not None else self.streaming: + should_stream = stream if stream is not None else self.streaming + if should_stream: generation: Optional[ChatGenerationChunk] = None async for chunk in self._astream( messages=messages, stop=stop, run_manager=run_manager, **kwargs