mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-07-23 20:26:15 +00:00
deps:refactor dependencies installation method (#2381)
This commit is contained in:
parent
904e8e0393
commit
0c7dae71da
@ -89,6 +89,7 @@ uv --version
|
||||
```bash
|
||||
# Use uv to install dependencies needed for OpenAI proxy
|
||||
uv sync --all-packages --frozen \
|
||||
--extra "base" \
|
||||
--extra "proxy_openai" \
|
||||
--extra "rag" \
|
||||
--extra "storage_chromadb" \
|
||||
@ -129,9 +130,10 @@ uv run python packages/dbgpt-app/src/dbgpt_app/dbgpt_server.py --config configs/
|
||||
# Use uv to install dependencies needed for GLM4
|
||||
# Install core dependencies and select desired extensions
|
||||
uv sync --all-packages --frozen \
|
||||
--extra "base" \
|
||||
--extra "hf" \
|
||||
--extra "rag" \
|
||||
--extra "storage_chromadb" \
|
||||
--extra "hf" \
|
||||
--extra "quant_bnb" \
|
||||
--extra "dbgpts"
|
||||
```
|
||||
|
@ -42,7 +42,6 @@ auto = [
|
||||
"torchvision>=0.17.1; sys_platform != 'darwin' or platform_machine != 'x86_64'",
|
||||
"torchvision>=0.17.1,<0.18.0; sys_platform == 'darwin' and platform_machine == 'x86_64'",
|
||||
]
|
||||
|
||||
cpu = [
|
||||
# Starting from PyTorch 2.3.0, the minimum requirement for macOS is macOS 11.0+ ARM64.
|
||||
"torch>=2.2.1; sys_platform != 'darwin' or platform_machine != 'x86_64'",
|
||||
@ -52,7 +51,6 @@ cpu = [
|
||||
"torchvision>=0.17.1; sys_platform != 'darwin' or platform_machine != 'x86_64'",
|
||||
"torchvision>=0.17.1,<0.18.0; sys_platform == 'darwin' and platform_machine == 'x86_64'",
|
||||
]
|
||||
|
||||
cuda118 = [
|
||||
"torch>=2.2.1; sys_platform == 'win32' or sys_platform == 'linux'",
|
||||
"torchvision>=0.17.1; sys_platform == 'win32' or sys_platform == 'linux'",
|
||||
@ -69,7 +67,6 @@ cuda124 = [
|
||||
"torchvision>=0.17.1; sys_platform == 'win32' or sys_platform == 'linux'",
|
||||
"torchaudio>=2.2.1; sys_platform == 'win32' or sys_platform == 'linux'",
|
||||
]
|
||||
|
||||
#rocm60 = [
|
||||
# "torch>=2.3.0,<2.4.0;sys_platform == 'linux'",
|
||||
# "torchvision>=0.18.0,<0.19.0;sys_platform == 'linux'",
|
||||
@ -77,7 +74,7 @@ cuda124 = [
|
||||
#]
|
||||
vllm = [
|
||||
# Just support GPU version on Linux
|
||||
"vllm>=0.7.0; sys_platform == 'linux'"
|
||||
"vllm>=0.7.0; sys_platform == 'linux'",
|
||||
]
|
||||
#vllm_pascal = [
|
||||
# # https://github.com/sasha0552/pascal-pkgs-ci
|
||||
@ -178,7 +175,6 @@ torch = [
|
||||
{ index = "pytorch-cu124", marker = "platform_system == 'Linux'", extra = "cuda124" },
|
||||
# { index = "pytorch-rocm60", marker = "platform_system == 'Linux'", extra = "rocm60" },
|
||||
]
|
||||
|
||||
torchvision = [
|
||||
{ index = "pytorch-cpu", marker = "platform_system == 'Darwin'" },
|
||||
{ index = "pytorch-cpu", marker = "platform_system == 'Windows'", extra = "cpu" },
|
||||
|
@ -33,22 +33,13 @@ build-backend = "hatchling.build"
|
||||
cache = [
|
||||
"rocksdict",
|
||||
]
|
||||
|
||||
observability = [
|
||||
"opentelemetry-api",
|
||||
"opentelemetry-sdk",
|
||||
"opentelemetry-exporter-otlp",
|
||||
]
|
||||
|
||||
proxy_openai = [
|
||||
"dbgpt[client,cli,agent,simple_framework,framework,code,proxy_openai]",
|
||||
"dbgpt-ext"
|
||||
]
|
||||
|
||||
default = [
|
||||
"dbgpt[client,cli,agent,simple_framework,framework,code]",
|
||||
"dbgpt-ext[rag]",
|
||||
"dbgpt-ext[storage_chromadb]",
|
||||
base = [
|
||||
"dbgpt[client,cli,agent,simple_framework,framework,code]"
|
||||
]
|
||||
dbgpts = [
|
||||
# For build dbgpts apps, we will be removed in the future.
|
||||
|
@ -113,11 +113,11 @@ code = [
|
||||
"lyric-component-ts-transpiling>=0.1.6",
|
||||
]
|
||||
llama_cpp = [
|
||||
"llama-cpp-python"
|
||||
"llama-cpp-python",
|
||||
]
|
||||
llama_cpp_server = [
|
||||
"llama-cpp-server-py-core>=0.1.3",
|
||||
"llama-cpp-server-py>=0.1.3"
|
||||
"llama-cpp-server-py>=0.1.3",
|
||||
]
|
||||
proxy_ollama = ["ollama"]
|
||||
proxy_zhipuai = ["zhipuai>=2.1.5"]
|
||||
|
@ -69,7 +69,10 @@ datasource_duckdb = [
|
||||
# datasource_doris = ["pydoris>=1.0.2,<2.0.0"]
|
||||
storage_milvus = ["pymilvus"]
|
||||
storage_weaviate = ["weaviate-client"]
|
||||
storage_chromadb = ["chromadb>=0.4.22"]
|
||||
storage_chromadb = [
|
||||
"onnxruntime>=1.14.1,<=1.18.1",
|
||||
"chromadb>=0.4.22"
|
||||
]
|
||||
storage_elasticsearch = ["elasticsearch"]
|
||||
storage_obvector = ["pyobvector"]
|
||||
proxy_tongyi = [
|
||||
|
@ -31,7 +31,6 @@ dev-dependencies = []
|
||||
libro = ["libro>=0.1.25"]
|
||||
dbgpts = ["poetry"]
|
||||
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["src/dbgpt_serve"]
|
||||
exclude = [
|
||||
|
Loading…
Reference in New Issue
Block a user