partners/mistralai: Fix KeyError in Vertex AI stream (#28624)

- **Description:** Streaming response from Mistral model using Vertex AI
raises KeyError when trying to access `choices` key, that the last chunk
doesn't have. The fix is to access the key safely using `get()`.
  - **Issue:** https://github.com/langchain-ai/langchain/issues/27886
  - **Dependencies:**
  - **Twitter handle:**
This commit is contained in:
nikitajoyn 2024-12-09 20:14:58 +01:00 committed by GitHub
parent bdb4cf7cc0
commit 9fcd203556
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -595,7 +595,7 @@ class ChatMistralAI(BaseChatModel):
for chunk in self.completion_with_retry( for chunk in self.completion_with_retry(
messages=message_dicts, run_manager=run_manager, **params messages=message_dicts, run_manager=run_manager, **params
): ):
if len(chunk["choices"]) == 0: if len(chunk.get("choices", [])) == 0:
continue continue
new_chunk = _convert_chunk_to_message_chunk(chunk, default_chunk_class) new_chunk = _convert_chunk_to_message_chunk(chunk, default_chunk_class)
# make future chunks same type as first chunk # make future chunks same type as first chunk
@ -621,7 +621,7 @@ class ChatMistralAI(BaseChatModel):
async for chunk in await acompletion_with_retry( async for chunk in await acompletion_with_retry(
self, messages=message_dicts, run_manager=run_manager, **params self, messages=message_dicts, run_manager=run_manager, **params
): ):
if len(chunk["choices"]) == 0: if len(chunk.get("choices", [])) == 0:
continue continue
new_chunk = _convert_chunk_to_message_chunk(chunk, default_chunk_class) new_chunk = _convert_chunk_to_message_chunk(chunk, default_chunk_class)
# make future chunks same type as first chunk # make future chunks same type as first chunk