[partner]: ollama llm fix (#24790)

This commit is contained in:
Isaac Francisco 2024-07-29 13:00:02 -07:00 committed by GitHub
parent 4bb1a11e02
commit 78d97b49d9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -304,11 +304,7 @@ class OllamaLLM(BaseLLM):
for stream_resp in self._create_generate_stream(prompt, stop, **kwargs): for stream_resp in self._create_generate_stream(prompt, stop, **kwargs):
if not isinstance(stream_resp, str): if not isinstance(stream_resp, str):
chunk = GenerationChunk( chunk = GenerationChunk(
text=( text=(stream_resp.get("response", "")),
stream_resp["message"]["content"]
if "message" in stream_resp
else ""
),
generation_info=( generation_info=(
dict(stream_resp) if stream_resp.get("done") is True else None dict(stream_resp) if stream_resp.get("done") is True else None
), ),
@ -330,11 +326,7 @@ class OllamaLLM(BaseLLM):
async for stream_resp in self._acreate_generate_stream(prompt, stop, **kwargs): async for stream_resp in self._acreate_generate_stream(prompt, stop, **kwargs):
if not isinstance(stream_resp, str): if not isinstance(stream_resp, str):
chunk = GenerationChunk( chunk = GenerationChunk(
text=( text=(stream_resp.get("response", "")),
stream_resp["message"]["content"]
if "message" in stream_resp
else ""
),
generation_info=( generation_info=(
dict(stream_resp) if stream_resp.get("done") is True else None dict(stream_resp) if stream_resp.get("done") is True else None
), ),