From 181bb91ce06fd6755e2b2dca5552c8a83e9c6a54 Mon Sep 17 00:00:00 2001 From: Alexey Bondarenko Date: Wed, 10 Sep 2025 21:13:28 +0600 Subject: [PATCH] fix(ollama): Fix handling message content lists (#32881) The Ollama chat model adapter does not support all of the possible message content formats. That leads to Ollama model adapter crashing on some messages from different models (e.g. Gemini 2.5 Flash). These changes should fix one known scenario - when `content` is a list containing a string. --- libs/partners/ollama/langchain_ollama/chat_models.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/libs/partners/ollama/langchain_ollama/chat_models.py b/libs/partners/ollama/langchain_ollama/chat_models.py index 1c44db0bd1e..2a40c147377 100644 --- a/libs/partners/ollama/langchain_ollama/chat_models.py +++ b/libs/partners/ollama/langchain_ollama/chat_models.py @@ -661,8 +661,10 @@ class ChatOllama(BaseChatModel): if isinstance(message.content, str): content = message.content else: - for content_part in cast(list[dict], message.content): - if content_part.get("type") == "text": + for content_part in message.content: + if isinstance(content_part, str): + content += f"\n{content_part}" + elif content_part.get("type") == "text": content += f"\n{content_part['text']}" elif content_part.get("type") == "tool_use": continue