mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-21 22:29:51 +00:00
ollama[patch]: add model_name to response metadata (#30706)
Fixes [this standard test](https://python.langchain.com/api_reference/standard_tests/integration_tests/langchain_tests.integration_tests.chat_models.ChatModelIntegrationTests.html#langchain_tests.integration_tests.chat_models.ChatModelIntegrationTests.test_usage_metadata).
This commit is contained in:
parent
e106e9602f
commit
e3f15f0a47
@ -743,6 +743,10 @@ class ChatOllama(BaseChatModel):
|
||||
dict(stream_resp) if stream_resp.get("done") is True else None
|
||||
),
|
||||
)
|
||||
if chunk.generation_info and (
|
||||
model := chunk.generation_info.get("model")
|
||||
):
|
||||
chunk.generation_info["model_name"] = model # backwards compat
|
||||
if self.extract_reasoning:
|
||||
message, is_thinking = self._extract_reasoning(
|
||||
chunk.message, is_thinking
|
||||
@ -791,6 +795,10 @@ class ChatOllama(BaseChatModel):
|
||||
dict(stream_resp) if stream_resp.get("done") is True else None
|
||||
),
|
||||
)
|
||||
if chunk.generation_info and (
|
||||
model := chunk.generation_info.get("model")
|
||||
):
|
||||
chunk.generation_info["model_name"] = model # backwards compat
|
||||
if self.extract_reasoning:
|
||||
message, is_thinking = self._extract_reasoning(
|
||||
chunk.message, is_thinking
|
||||
|
Loading…
Reference in New Issue
Block a user