From e3f15f0a47422f015082cd3e5dd99adf1d5be293 Mon Sep 17 00:00:00 2001 From: ccurme Date: Mon, 7 Apr 2025 16:27:58 -0400 Subject: [PATCH] ollama[patch]: add model_name to response metadata (#30706) Fixes [this standard test](https://python.langchain.com/api_reference/standard_tests/integration_tests/langchain_tests.integration_tests.chat_models.ChatModelIntegrationTests.html#langchain_tests.integration_tests.chat_models.ChatModelIntegrationTests.test_usage_metadata). --- libs/partners/ollama/langchain_ollama/chat_models.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/libs/partners/ollama/langchain_ollama/chat_models.py b/libs/partners/ollama/langchain_ollama/chat_models.py index f9c6ca060d1..3801fc4d0d8 100644 --- a/libs/partners/ollama/langchain_ollama/chat_models.py +++ b/libs/partners/ollama/langchain_ollama/chat_models.py @@ -743,6 +743,10 @@ class ChatOllama(BaseChatModel): dict(stream_resp) if stream_resp.get("done") is True else None ), ) + if chunk.generation_info and ( + model := chunk.generation_info.get("model") + ): + chunk.generation_info["model_name"] = model # backwards compat if self.extract_reasoning: message, is_thinking = self._extract_reasoning( chunk.message, is_thinking @@ -791,6 +795,10 @@ class ChatOllama(BaseChatModel): dict(stream_resp) if stream_resp.get("done") is True else None ), ) + if chunk.generation_info and ( + model := chunk.generation_info.get("model") + ): + chunk.generation_info["model_name"] = model # backwards compat if self.extract_reasoning: message, is_thinking = self._extract_reasoning( chunk.message, is_thinking