fix(ollama): Remove redundant message from response_metadata (#30778)

This commit is contained in:
Jacob Lee
2025-04-10 23:12:57 -07:00
committed by GitHub
parent f3c3ec9aec
commit e72f3c26a0

View File

@@ -740,7 +740,9 @@ class ChatOllama(BaseChatModel):
tool_calls=_get_tool_calls_from_response(stream_resp),
),
generation_info=(
dict(stream_resp) if stream_resp.get("done") is True else None
dict(stream_resp).pop("message", None)
if stream_resp.get("done") is True
else None
),
)
if chunk.generation_info and (
@@ -792,7 +794,9 @@ class ChatOllama(BaseChatModel):
tool_calls=_get_tool_calls_from_response(stream_resp),
),
generation_info=(
dict(stream_resp) if stream_resp.get("done") is True else None
dict(stream_resp).pop("message", None)
if stream_resp.get("done") is True
else None
),
)
if chunk.generation_info and (