From 9fcd2035568459d1d84327e2d2d3256915df26d3 Mon Sep 17 00:00:00 2001 From: nikitajoyn <101583363+nikitajoyn@users.noreply.github.com> Date: Mon, 9 Dec 2024 20:14:58 +0100 Subject: [PATCH] partners/mistralai: Fix KeyError in Vertex AI stream (#28624) - **Description:** Streaming response from Mistral model using Vertex AI raises KeyError when trying to access `choices` key, that the last chunk doesn't have. The fix is to access the key safely using `get()`. - **Issue:** https://github.com/langchain-ai/langchain/issues/27886 - **Dependencies:** - **Twitter handle:** --- libs/partners/mistralai/langchain_mistralai/chat_models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/partners/mistralai/langchain_mistralai/chat_models.py b/libs/partners/mistralai/langchain_mistralai/chat_models.py index 9e68e23ae45..be973f3b9ec 100644 --- a/libs/partners/mistralai/langchain_mistralai/chat_models.py +++ b/libs/partners/mistralai/langchain_mistralai/chat_models.py @@ -595,7 +595,7 @@ class ChatMistralAI(BaseChatModel): for chunk in self.completion_with_retry( messages=message_dicts, run_manager=run_manager, **params ): - if len(chunk["choices"]) == 0: + if len(chunk.get("choices", [])) == 0: continue new_chunk = _convert_chunk_to_message_chunk(chunk, default_chunk_class) # make future chunks same type as first chunk @@ -621,7 +621,7 @@ class ChatMistralAI(BaseChatModel): async for chunk in await acompletion_with_retry( self, messages=message_dicts, run_manager=run_manager, **params ): - if len(chunk["choices"]) == 0: + if len(chunk.get("choices", [])) == 0: continue new_chunk = _convert_chunk_to_message_chunk(chunk, default_chunk_class) # make future chunks same type as first chunk