From 1ff4cfdc42a133887187560a30e85b1d4e3655c2 Mon Sep 17 00:00:00 2001
From: xuyuan23 <643854343@qq.com>
Date: Mon, 31 Jul 2023 17:49:10 +0800
Subject: [PATCH 1/4] feat: support bard proxy server
support bard proxy server
Close #384
---
pilot/model/proxy/llms/bard.py | 24 ++++++++++++++++++------
1 file changed, 18 insertions(+), 6 deletions(-)
diff --git a/pilot/model/proxy/llms/bard.py b/pilot/model/proxy/llms/bard.py
index badb0e912..7d3670ad6 100644
--- a/pilot/model/proxy/llms/bard.py
+++ b/pilot/model/proxy/llms/bard.py
@@ -1,4 +1,5 @@
import bardapi
+import requests
from typing import List
from pilot.configs.config import Config
from pilot.scene.base_message import ModelMessage, ModelMessageRoleType
@@ -7,8 +8,6 @@ CFG = Config()
def bard_generate_stream(model, tokenizer, params, device, context_len=2048):
- token = CFG.bard_proxy_api_key
-
history = []
messages: List[ModelMessage] = params["messages"]
for message in messages:
@@ -35,8 +34,21 @@ def bard_generate_stream(model, tokenizer, params, device, context_len=2048):
for msg in history:
if msg.get("content"):
msgs.append(msg["content"])
- response = bardapi.core.Bard(token).get_answer("\n".join(msgs))
- if response is not None and response.get("content") is not None:
- yield str(response["content"])
+
+ if CFG.proxy_server_url is not None:
+ headers = {"Content-Type": "application/json"}
+ payloads = {"input": "\n".join(msgs)}
+ response = requests.post(
+ CFG.proxy_server_url, headers=headers, json=payloads, stream=False
+ )
+ if response.ok is True:
+ yield response.text
+ else:
+ yield f"bard proxy url request failed!, response = {str(response)}"
else:
- yield f"bard response error: {str(response)}"
+ response = bardapi.core.Bard(CFG.bard_proxy_api_key).get_answer("\n".join(msgs))
+
+ if response is not None and response.get("content") is not None:
+ yield str(response["content"])
+ else:
+ yield f"bard response error: {str(response)}"
From 7492c20aaf818d220849cf50e4d59949cc60adae Mon Sep 17 00:00:00 2001
From: xuyuan23 <643854343@qq.com>
Date: Mon, 31 Jul 2023 18:41:26 +0800
Subject: [PATCH 2/4] docs: update bard proxy use docs.
update bard proxy use documents.
---
docs/modules/llms.md | 10 +++++++++-
1 file changed, 9 insertions(+), 1 deletion(-)
diff --git a/docs/modules/llms.md b/docs/modules/llms.md
index dd7cbc064..843893c67 100644
--- a/docs/modules/llms.md
+++ b/docs/modules/llms.md
@@ -128,9 +128,17 @@ PROXY_SERVER_URL={your-openai-proxy-server/v1/chat/completions}
```
### 2. Bard Proxy
-- If your environment deploying DB-GPT has access to https://bard.google.com/ (F12-> application-> __Secure-1PSID), then modify the .env configuration file as below will work.
+- If your environment deploying DB-GPT has access to Bard (F12-> application-> __Secure-1PSID), then modify the .env configuration file as below will work.
```
LLM_MODEL=bard_proxyllm
MODEL_SERVER=127.0.0.1:8000
BARD_PROXY_API_KEY={your-bard-key}
+# PROXY_SERVER_URL={your-bard-proxy-server/v1/chat/completions}
+```
+
+- If you want to use your own bard proxy server like Bard-Proxy, so that you can deploy DB-GPT on your PC easily.
+```
+LLM_MODEL=bard_proxyllm
+MODEL_SERVER=127.0.0.1:8000
+PROXY_SERVER_URL={your-bard-proxy-server/v1/chat/completions}
```
\ No newline at end of file
From 3c2690f769e0bc368f4e293b1d3fa8a343a9f513 Mon Sep 17 00:00:00 2001
From: xuyuan23 <643854343@qq.com>
Date: Mon, 31 Jul 2023 19:10:03 +0800
Subject: [PATCH 3/4] docs: update bard-proxy project address.
update bard-proxy project address.
---
docs/modules/llms.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/modules/llms.md b/docs/modules/llms.md
index 843893c67..474f7bb9a 100644
--- a/docs/modules/llms.md
+++ b/docs/modules/llms.md
@@ -136,7 +136,7 @@ BARD_PROXY_API_KEY={your-bard-key}
# PROXY_SERVER_URL={your-bard-proxy-server/v1/chat/completions}
```
-- If you want to use your own bard proxy server like Bard-Proxy, so that you can deploy DB-GPT on your PC easily.
+- If you want to use your own bard proxy server like Bard-Proxy, so that you can deploy DB-GPT on your PC easily.
```
LLM_MODEL=bard_proxyllm
MODEL_SERVER=127.0.0.1:8000
From dea677004a3636b0867cf49e014009c1d7500e01 Mon Sep 17 00:00:00 2001
From: xuyuan23 <643854343@qq.com>
Date: Mon, 31 Jul 2023 19:19:39 +0800
Subject: [PATCH 4/4] fix: simply the code and optimization
simply the code and optimization
---
pilot/model/proxy/llms/bard.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pilot/model/proxy/llms/bard.py b/pilot/model/proxy/llms/bard.py
index 7d3670ad6..73f959512 100644
--- a/pilot/model/proxy/llms/bard.py
+++ b/pilot/model/proxy/llms/bard.py
@@ -41,7 +41,7 @@ def bard_generate_stream(model, tokenizer, params, device, context_len=2048):
response = requests.post(
CFG.proxy_server_url, headers=headers, json=payloads, stream=False
)
- if response.ok is True:
+ if response.ok:
yield response.text
else:
yield f"bard proxy url request failed!, response = {str(response)}"