From b1d84fb317579acba04fdb12211e7b843359549b Mon Sep 17 00:00:00 2001 From: xuyuan23 <643854343@qq.com> Date: Wed, 26 Jul 2023 17:02:48 +0800 Subject: [PATCH] fix: fix bard LLM request param error fix bard LLM request param error, use 'content' as the input --- pilot/model/proxy/proxy_llms/bard.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pilot/model/proxy/proxy_llms/bard.py b/pilot/model/proxy/proxy_llms/bard.py index 12d743476..72070dac6 100644 --- a/pilot/model/proxy/proxy_llms/bard.py +++ b/pilot/model/proxy/proxy_llms/bard.py @@ -31,10 +31,9 @@ def bard_generate_stream(model, tokenizer, params, device, context_len=2048): history.remove(last_user_input) history.append(last_user_input) - response = bardapi.core.Bard(token).get_answer(last_user_input) + response = bardapi.core.Bard(token).get_answer(last_user_input["content"]) if response is not None and response.get("content") is not None: yield str(response["content"]) yield f"bard response error: {str(response)}" -print(bard_generate_stream("bard_proxy_llm", None, {"input": "hi"}, None, 2048))