From ba467f1a36c2b24a2447d6ccbef585f94c17ff81 Mon Sep 17 00:00:00 2001 From: Subhrajyoty Roy Date: Thu, 26 Sep 2024 22:17:28 +0530 Subject: [PATCH] community[patch]: callback before yield for gigachat (#26881) **Description:** Moves yield to after callback for `_stream` and `_astream` function for the gigachat model in the community llm package **Issue:** #16913 --- libs/community/langchain_community/llms/gigachat.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/community/langchain_community/llms/gigachat.py b/libs/community/langchain_community/llms/gigachat.py index 468d68cd9f4..a867bc19437 100644 --- a/libs/community/langchain_community/llms/gigachat.py +++ b/libs/community/langchain_community/llms/gigachat.py @@ -311,9 +311,9 @@ class GigaChat(_BaseGigaChat, BaseLLM): for chunk in self._client.stream(payload): if chunk.choices: content = chunk.choices[0].delta.content - yield GenerationChunk(text=content) if run_manager: run_manager.on_llm_new_token(content) + yield GenerationChunk(text=content) async def _astream( self, @@ -327,9 +327,9 @@ class GigaChat(_BaseGigaChat, BaseLLM): async for chunk in self._client.astream(payload): if chunk.choices: content = chunk.choices[0].delta.content - yield GenerationChunk(text=content) if run_manager: await run_manager.on_llm_new_token(content) + yield GenerationChunk(text=content) model_config = ConfigDict( extra="allow",