Chat model stream readability nit (#10469)

This commit is contained in:
Bagatur
2023-09-11 18:05:24 -07:00
committed by GitHub
parent d45b042d3e
commit 6598178343
3 changed files with 10 additions and 5 deletions

View File

@@ -222,7 +222,8 @@ class ChatKonko(ChatOpenAI):
stream: Optional[bool] = None,
**kwargs: Any,
) -> ChatResult:
if stream if stream is not None else self.streaming:
should_stream = stream if stream is not None else self.streaming
if should_stream:
generation: Optional[ChatGenerationChunk] = None
for chunk in self._stream(
messages=messages, stop=stop, run_manager=run_manager, **kwargs

View File

@@ -318,7 +318,8 @@ class ChatLiteLLM(BaseChatModel):
stream: Optional[bool] = None,
**kwargs: Any,
) -> ChatResult:
if stream if stream is not None else self.streaming:
should_stream = stream if stream is not None else self.streaming
if should_stream:
generation: Optional[ChatGenerationChunk] = None
for chunk in self._stream(
messages=messages, stop=stop, run_manager=run_manager, **kwargs
@@ -418,7 +419,8 @@ class ChatLiteLLM(BaseChatModel):
stream: Optional[bool] = None,
**kwargs: Any,
) -> ChatResult:
if stream if stream is not None else self.streaming:
should_stream = stream if stream is not None else self.streaming
if should_stream:
generation: Optional[ChatGenerationChunk] = None
async for chunk in self._astream(
messages=messages, stop=stop, run_manager=run_manager, **kwargs

View File

@@ -328,7 +328,8 @@ class ChatOpenAI(BaseChatModel):
stream: Optional[bool] = None,
**kwargs: Any,
) -> ChatResult:
if stream if stream is not None else self.streaming:
should_stream = stream if stream is not None else self.streaming
if should_stream:
generation: Optional[ChatGenerationChunk] = None
for chunk in self._stream(
messages=messages, stop=stop, run_manager=run_manager, **kwargs
@@ -408,7 +409,8 @@ class ChatOpenAI(BaseChatModel):
stream: Optional[bool] = None,
**kwargs: Any,
) -> ChatResult:
if stream if stream is not None else self.streaming:
should_stream = stream if stream is not None else self.streaming
if should_stream:
generation: Optional[ChatGenerationChunk] = None
async for chunk in self._astream(
messages=messages, stop=stop, run_manager=run_manager, **kwargs