Merge branch 'main' of https://github.com/eosphoros-ai/DB-GPT into more_tutorials

This commit is contained in:
aries_ckt 2025-03-02 19:23:54 +08:00
commit cab859a28c
8 changed files with 1914 additions and 697 deletions

View File

@ -90,6 +90,7 @@ uv --version
```bash
# Use uv to install dependencies needed for OpenAI proxy
uv sync --all-packages --frozen \
--extra "base" \
--extra "proxy_openai" \
--extra "rag" \
--extra "storage_chromadb" \
@ -177,9 +178,10 @@ uv run python packages/dbgpt-app/src/dbgpt_app/dbgpt_server.py --config configs/
# Use uv to install dependencies needed for GLM4
# Install core dependencies and select desired extensions
uv sync --all-packages --frozen \
--extra "base" \
--extra "hf" \
--extra "rag" \
--extra "storage_chromadb" \
--extra "hf" \
--extra "quant_bnb" \
--extra "dbgpts"
```

View File

@ -42,7 +42,6 @@ auto = [
"torchvision>=0.17.1; sys_platform != 'darwin' or platform_machine != 'x86_64'",
"torchvision>=0.17.1,<0.18.0; sys_platform == 'darwin' and platform_machine == 'x86_64'",
]
cpu = [
# Starting from PyTorch 2.3.0, the minimum requirement for macOS is macOS 11.0+ ARM64.
"torch>=2.2.1; sys_platform != 'darwin' or platform_machine != 'x86_64'",
@ -52,7 +51,6 @@ cpu = [
"torchvision>=0.17.1; sys_platform != 'darwin' or platform_machine != 'x86_64'",
"torchvision>=0.17.1,<0.18.0; sys_platform == 'darwin' and platform_machine == 'x86_64'",
]
cuda118 = [
"torch>=2.2.1; sys_platform == 'win32' or sys_platform == 'linux'",
"torchvision>=0.17.1; sys_platform == 'win32' or sys_platform == 'linux'",
@ -69,7 +67,6 @@ cuda124 = [
"torchvision>=0.17.1; sys_platform == 'win32' or sys_platform == 'linux'",
"torchaudio>=2.2.1; sys_platform == 'win32' or sys_platform == 'linux'",
]
#rocm60 = [
# "torch>=2.3.0,<2.4.0;sys_platform == 'linux'",
# "torchvision>=0.18.0,<0.19.0;sys_platform == 'linux'",
@ -77,7 +74,7 @@ cuda124 = [
#]
vllm = [
# Just support GPU version on Linux
"vllm>=0.7.0; sys_platform == 'linux'"
"vllm>=0.7.0; sys_platform == 'linux'",
]
#vllm_pascal = [
# # https://github.com/sasha0552/pascal-pkgs-ci
@ -178,7 +175,6 @@ torch = [
{ index = "pytorch-cu124", marker = "platform_system == 'Linux'", extra = "cuda124" },
# { index = "pytorch-rocm60", marker = "platform_system == 'Linux'", extra = "rocm60" },
]
torchvision = [
{ index = "pytorch-cpu", marker = "platform_system == 'Darwin'" },
{ index = "pytorch-cpu", marker = "platform_system == 'Windows'", extra = "cpu" },

View File

@ -33,22 +33,13 @@ build-backend = "hatchling.build"
cache = [
"rocksdict",
]
observability = [
"opentelemetry-api",
"opentelemetry-sdk",
"opentelemetry-exporter-otlp",
]
proxy_openai = [
"dbgpt[client,cli,agent,simple_framework,framework,code,proxy_openai]",
"dbgpt-ext"
]
default = [
"dbgpt[client,cli,agent,simple_framework,framework,code]",
"dbgpt-ext[rag]",
"dbgpt-ext[storage_chromadb]",
base = [
"dbgpt[client,cli,agent,simple_framework,framework,code]"
]
dbgpts = [
# For build dbgpts apps, we will be removed in the future.

View File

@ -113,11 +113,11 @@ code = [
"lyric-component-ts-transpiling>=0.1.6",
]
llama_cpp = [
"llama-cpp-python"
"llama-cpp-python",
]
llama_cpp_server = [
"llama-cpp-server-py-core>=0.1.3",
"llama-cpp-server-py>=0.1.3"
"llama-cpp-server-py>=0.1.3",
]
proxy_ollama = ["ollama"]
proxy_zhipuai = ["zhipuai>=2.1.5"]

View File

@ -69,7 +69,10 @@ datasource_duckdb = [
# datasource_doris = ["pydoris>=1.0.2,<2.0.0"]
storage_milvus = ["pymilvus"]
storage_weaviate = ["weaviate-client"]
storage_chromadb = ["chromadb>=0.4.22"]
storage_chromadb = [
"onnxruntime>=1.14.1,<=1.18.1",
"chromadb>=0.4.22"
]
storage_elasticsearch = ["elasticsearch"]
storage_obvector = ["pyobvector"]
proxy_tongyi = [

View File

@ -36,9 +36,13 @@ class ClickhouseParameters(BaseDatasourceParameters):
__type__ = "clickhouse"
host: str = field(metadata={"help": _("Database host, e.g., localhost")})
port: int = field(metadata={"help": _("Database port, e.g., 3306")})
port: int = field(metadata={"help": _("Database port, e.g., 8123")})
user: str = field(metadata={"help": _("Database user to connect")})
database: str = field(metadata={"help": _("Database name")})
engine: str = field(
default="MergeTree",
metadata={"help": _("Storage engine, e.g., MergeTree")}
)
password: str = field(
default="${env:DBGPT_DB_PASSWORD}",
metadata={
@ -118,6 +122,7 @@ class ClickhouseConnector(RDBMSConnector):
parameters.http_pool_num_pools,
parameters.connect_timeout,
parameters.distributed_ddl_task_timeout,
parameters.engine,
)
@classmethod
@ -132,7 +137,7 @@ class ClickhouseConnector(RDBMSConnector):
http_pool_num_pools: int = 12,
connect_timeout: int = 15,
distributed_ddl_task_timeout: int = 300,
engine_args: Optional[dict] = None,
engine: str = "MergeTree",
**kwargs: Any,
) -> "ClickhouseConnector":
"""Create a new ClickhouseConnector from host, port, user, pwd, db_name."""
@ -156,7 +161,7 @@ class ClickhouseConnector(RDBMSConnector):
)
cls.client = client
return cls(client, **kwargs)
return cls(client, engine=engine, **kwargs)
def get_table_names(self):
"""Get all table names."""

View File

@ -31,7 +31,6 @@ dev-dependencies = []
libro = ["libro>=0.1.25"]
dbgpts = ["poetry"]
[tool.hatch.build.targets.wheel]
packages = ["src/dbgpt_serve"]
exclude = [

2567
uv.lock

File diff suppressed because one or more lines are too long