chore(model): Update the default model to glm-4-9b-chat (#1629)

This commit is contained in:
Fangyin Cheng
2024-06-13 09:43:34 +08:00
committed by GitHub
parent e11087aa7e
commit 162e2c9b1c
15 changed files with 157 additions and 132 deletions

View File

@@ -194,7 +194,7 @@ class Config(metaclass=Singleton):
self.CHAT_HISTORY_STORE_TYPE = os.getenv("CHAT_HISTORY_STORE_TYPE", "db")
### LLM Model Service Configuration
self.LLM_MODEL = os.getenv("LLM_MODEL", "vicuna-13b-v1.5")
self.LLM_MODEL = os.getenv("LLM_MODEL", "glm-4-9b-chat")
self.LLM_MODEL_PATH = os.getenv("LLM_MODEL_PATH")
### Proxy llm backend, this configuration is only valid when "LLM_MODEL=proxyllm"

View File

@@ -26,6 +26,8 @@ def baidu_search(
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:112.0) "
"Gecko/20100101 Firefox/112.0"
}
if num_results < 8:
num_results = 8
url = f"https://www.baidu.com/s?wd={query}&rn={num_results}"
response = requests.get(url, headers=headers)
response.encoding = "utf-8"