From e72f3c26a03b5f3645000191dbd19bfdd323543c Mon Sep 17 00:00:00 2001 From: Jacob Lee Date: Thu, 10 Apr 2025 23:12:57 -0700 Subject: [PATCH] fix(ollama): Remove redundant message from response_metadata (#30778) --- libs/partners/ollama/langchain_ollama/chat_models.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/libs/partners/ollama/langchain_ollama/chat_models.py b/libs/partners/ollama/langchain_ollama/chat_models.py index 3801fc4d0d8..a163ce82f80 100644 --- a/libs/partners/ollama/langchain_ollama/chat_models.py +++ b/libs/partners/ollama/langchain_ollama/chat_models.py @@ -740,7 +740,9 @@ class ChatOllama(BaseChatModel): tool_calls=_get_tool_calls_from_response(stream_resp), ), generation_info=( - dict(stream_resp) if stream_resp.get("done") is True else None + dict(stream_resp).pop("message", None) + if stream_resp.get("done") is True + else None ), ) if chunk.generation_info and ( @@ -792,7 +794,9 @@ class ChatOllama(BaseChatModel): tool_calls=_get_tool_calls_from_response(stream_resp), ), generation_info=( - dict(stream_resp) if stream_resp.get("done") is True else None + dict(stream_resp).pop("message", None) + if stream_resp.get("done") is True + else None ), ) if chunk.generation_info and (