mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-07-23 20:26:15 +00:00
deps(tongyi): fix tongyi dependencies and add tongyi proxy config (#2467)
# Description deps(tongyi): fix tongyi dependencies and add tongyi proxy config # How Has This Been Tested? ```bash uv sync --all-packages --frozen --extra "base" --extra "proxy_tongyi" --extra "rag" --extra "storage_chromadb" --extra "dbgpts" ``` ```bash uv run python packages/dbgpt-app/src/dbgpt_app/dbgpt_server.py --config .\configs\dbgpt-proxy-tongyi.toml ```
This commit is contained in:
commit
0d48f37ed9
33
configs/dbgpt-proxy-tongyi.toml
Normal file
33
configs/dbgpt-proxy-tongyi.toml
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
[system]
|
||||||
|
# Load language from environment variable(It is set by the hook)
|
||||||
|
language = "${env:DBGPT_LANG:-en}"
|
||||||
|
api_keys = []
|
||||||
|
encrypt_key = "your_secret_key"
|
||||||
|
|
||||||
|
# Server Configurations
|
||||||
|
[service.web]
|
||||||
|
host = "0.0.0.0"
|
||||||
|
port = 5670
|
||||||
|
|
||||||
|
[service.web.database]
|
||||||
|
type = "sqlite"
|
||||||
|
path = "pilot/meta_data/dbgpt.db"
|
||||||
|
|
||||||
|
[rag.storage]
|
||||||
|
[rag.storage.vector]
|
||||||
|
type = "Chroma"
|
||||||
|
persist_path = "pilot/data"
|
||||||
|
|
||||||
|
# Model Configurations
|
||||||
|
[models]
|
||||||
|
[[models.llms]]
|
||||||
|
name = "qwen-plus"
|
||||||
|
provider = "${env:LLM_MODEL_PROVIDER:proxy/tongyi}"
|
||||||
|
api_base = "https://dashscope.aliyuncs.com/compatible-mode/v1"
|
||||||
|
api_key = "${env:DASHSCOPE_API_KEY}"
|
||||||
|
|
||||||
|
[[models.embeddings]]
|
||||||
|
name = "text-embedding-v3"
|
||||||
|
provider = "${env:EMBEDDING_MODEL_PROVIDER:proxy/tongyi}"
|
||||||
|
api_url = "https://dashscope.aliyuncs.com/compatible-mode/v1/embeddings"
|
||||||
|
api_key = "${env:DASHSCOPE_API_KEY}"
|
@ -121,7 +121,12 @@ llama_cpp_server = [
|
|||||||
]
|
]
|
||||||
proxy_ollama = ["ollama"]
|
proxy_ollama = ["ollama"]
|
||||||
proxy_zhipuai = ["zhipuai>=2.1.5"]
|
proxy_zhipuai = ["zhipuai>=2.1.5"]
|
||||||
proxy_tongyi = ["openai"] # tongyi supported by openai package
|
proxy_tongyi = [
|
||||||
|
# tongyi supported by openai package
|
||||||
|
"openai",
|
||||||
|
# For tongyi proxy embedding model
|
||||||
|
"dashscope",
|
||||||
|
]
|
||||||
proxy_qianfan = ["qianfan"]
|
proxy_qianfan = ["qianfan"]
|
||||||
proxy_openai = [
|
proxy_openai = [
|
||||||
"openai>=1.59.6",
|
"openai>=1.59.6",
|
||||||
|
@ -74,10 +74,6 @@ storage_chromadb = [
|
|||||||
]
|
]
|
||||||
storage_elasticsearch = ["elasticsearch"]
|
storage_elasticsearch = ["elasticsearch"]
|
||||||
storage_obvector = ["pyobvector"]
|
storage_obvector = ["pyobvector"]
|
||||||
proxy_tongyi = [
|
|
||||||
# For tongyi proxy embedding model
|
|
||||||
"dashscope"
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.uv]
|
[tool.uv]
|
||||||
managed = true
|
managed = true
|
||||||
|
Loading…
Reference in New Issue
Block a user