From 78d97b49d9c72a1bd97deeb39dd53d43b8bc488f Mon Sep 17 00:00:00 2001 From: Isaac Francisco <78627776+isahers1@users.noreply.github.com> Date: Mon, 29 Jul 2024 13:00:02 -0700 Subject: [PATCH] [partner]: ollama llm fix (#24790) --- libs/partners/ollama/langchain_ollama/llms.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/libs/partners/ollama/langchain_ollama/llms.py b/libs/partners/ollama/langchain_ollama/llms.py index e2bf73cfabd..fd7b49dcd90 100644 --- a/libs/partners/ollama/langchain_ollama/llms.py +++ b/libs/partners/ollama/langchain_ollama/llms.py @@ -304,11 +304,7 @@ class OllamaLLM(BaseLLM): for stream_resp in self._create_generate_stream(prompt, stop, **kwargs): if not isinstance(stream_resp, str): chunk = GenerationChunk( - text=( - stream_resp["message"]["content"] - if "message" in stream_resp - else "" - ), + text=(stream_resp.get("response", "")), generation_info=( dict(stream_resp) if stream_resp.get("done") is True else None ), @@ -330,11 +326,7 @@ class OllamaLLM(BaseLLM): async for stream_resp in self._acreate_generate_stream(prompt, stop, **kwargs): if not isinstance(stream_resp, str): chunk = GenerationChunk( - text=( - stream_resp["message"]["content"] - if "message" in stream_resp - else "" - ), + text=(stream_resp.get("response", "")), generation_info=( dict(stream_resp) if stream_resp.get("done") is True else None ),