From 95fce1bab261636389fe3619b8ffc4794f4b469f Mon Sep 17 00:00:00 2001 From: FangYin Cheng Date: Wed, 23 Aug 2023 10:56:37 +0800 Subject: [PATCH 1/3] fix: Fix docker build error --- docker/base/build_image.sh | 4 ++-- setup.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker/base/build_image.sh b/docker/base/build_image.sh index 101dfebad..03a3185c4 100755 --- a/docker/base/build_image.sh +++ b/docker/base/build_image.sh @@ -4,7 +4,7 @@ SCRIPT_LOCATION=$0 cd "$(dirname "$SCRIPT_LOCATION")" WORK_DIR=$(pwd) -BASE_IMAGE="nvidia/cuda:11.8.0-devel-ubuntu22.04" +BASE_IMAGE="nvidia/cuda:11.8.0-runtime-ubuntu22.04" IMAGE_NAME="eosphorosai/dbgpt" # zh: https://pypi.tuna.tsinghua.edu.cn/simple PIP_INDEX_URL="https://pypi.org/simple" @@ -14,7 +14,7 @@ BUILD_LOCAL_CODE="false" LOAD_EXAMPLES="true" usage () { - echo "USAGE: $0 [--base-image nvidia/cuda:11.8.0-devel-ubuntu22.04] [--image-name db-gpt]" + echo "USAGE: $0 [--base-image nvidia/cuda:11.8.0-runtime-ubuntu22.04] [--image-name db-gpt]" echo " [-b|--base-image base image name] Base image name" echo " [-n|--image-name image name] Current image name, default: db-gpt" echo " [-i|--pip-index-url pip index url] Pip index url, default: https://pypi.org/simple" diff --git a/setup.py b/setup.py index b12c35dfa..a9ee31213 100644 --- a/setup.py +++ b/setup.py @@ -216,8 +216,8 @@ def torch_requires( cuda_version = get_cuda_version() if not cuda_version: torch_pkgs = [ - f"torch=={torch_version}+cpu", - f"torchvision=={torchvision_version}+cpu", + f"torch=={torch_version}", + f"torchvision=={torchvision_version}", f"torchaudio=={torchaudio_version}", ] else: From bbfad9084591ed56e4eea0f440b3092ee16b4d04 Mon Sep 17 00:00:00 2001 From: FangYin Cheng Date: Wed, 23 Aug 2023 14:57:56 +0800 Subject: [PATCH 2/3] feat(CICD): Add latest tag for docker image --- .github/workflows/docker-image-publish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker-image-publish.yml b/.github/workflows/docker-image-publish.yml index bbc37b57d..c2d36bf0e 100644 --- a/.github/workflows/docker-image-publish.yml +++ b/.github/workflows/docker-image-publish.yml @@ -32,4 +32,4 @@ jobs: file: ./docker/base/Dockerfile platforms: linux/amd64 push: true - tags: eosphorosai/dbgpt:${{ github.ref_name }} \ No newline at end of file + tags: eosphorosai/dbgpt:${{ github.ref_name }},eosphorosai/dbgpt:latest \ No newline at end of file From 57153692150a4f7f3d98dfcffe73915c87e7db65 Mon Sep 17 00:00:00 2001 From: FangYin Cheng Date: Wed, 23 Aug 2023 16:08:04 +0800 Subject: [PATCH 3/3] doc: Modify docker install document --- docs/getting_started/install/docker/docker.md | 68 ++++++++++++++----- 1 file changed, 52 insertions(+), 16 deletions(-) diff --git a/docs/getting_started/install/docker/docker.md b/docs/getting_started/install/docker/docker.md index 07aae5349..377ad0297 100644 --- a/docs/getting_started/install/docker/docker.md +++ b/docs/getting_started/install/docker/docker.md @@ -3,46 +3,82 @@ Docker Install ### Docker (Experimental) -#### 1. Building Docker image +#### 1. Preparing docker images + +**Pull docker image from the [Eosphoros AI Docker Hub](https://hub.docker.com/u/eosphorosai)** ```bash -$ bash docker/build_all_images.sh +docker pull eosphorosai/dbgpt:latest +``` + +**(Optional) Building Docker image** + +```bash +bash docker/build_all_images.sh ``` Review images by listing them: ```bash -$ docker images|grep db-gpt +docker images|grep "eosphorosai/dbgpt" ``` Output should look something like the following: ``` -db-gpt-allinone latest e1ffd20b85ac 45 minutes ago 14.5GB -db-gpt latest e36fb0cca5d9 3 hours ago 14GB +eosphorosai/dbgpt-allinone latest 349d49726588 27 seconds ago 15.1GB +eosphorosai/dbgpt latest eb3cdc5b4ead About a minute ago 14.5GB ``` +`eosphorosai/dbgpt` is the base image, which contains the project's base dependencies and a sqlite database. `eosphorosai/dbgpt-allinone` build from `eosphorosai/dbgpt`, which contains a mysql database. + You can pass some parameters to docker/build_all_images.sh. ```bash -$ bash docker/build_all_images.sh \ ---base-image nvidia/cuda:11.8.0-devel-ubuntu22.04 \ +bash docker/build_all_images.sh \ +--base-image nvidia/cuda:11.8.0-runtime-ubuntu22.04 \ --pip-index-url https://pypi.tuna.tsinghua.edu.cn/simple \ --language zh ``` You can execute the command `bash docker/build_all_images.sh --help` to see more usage. -#### 2. Run all in one docker container +#### 2. Run docker container -**Run with local model** +**Run with local model and SQLite database** ```bash -$ docker run --gpus "device=0" -d -p 3306:3306 \ +docker run --gpus all -d \ + -p 5000:5000 \ + -e LOCAL_DB_TYPE=sqlite \ + -e LOCAL_DB_PATH=data/default_sqlite.db \ + -e LLM_MODEL=vicuna-13b-v1.5 \ + -e LANGUAGE=zh \ + -v /data/models:/app/models \ + --name dbgpt \ + eosphorosai/dbgpt +``` + +Open http://localhost:5000 with your browser to see the product. + + +- `-e LLM_MODEL=vicuna-13b-v1.5`, means we use vicuna-13b-v1.5 as llm model, see /pilot/configs/model_config.LLM_MODEL_CONFIG +- `-v /data/models:/app/models`, means we mount the local model file directory `/data/models` to the docker container directory `/app/models`, please replace it with your model file directory. + +You can see log with command: + +```bash +docker logs dbgpt -f +``` + +**Run with local model and MySQL database** + +```bash +docker run --gpus all -d -p 3306:3306 \ -p 5000:5000 \ -e LOCAL_DB_HOST=127.0.0.1 \ -e LOCAL_DB_PASSWORD=aa123456 \ -e MYSQL_ROOT_PASSWORD=aa123456 \ - -e LLM_MODEL=vicuna-13b \ + -e LLM_MODEL=vicuna-13b-v1.5 \ -e LANGUAGE=zh \ -v /data/models:/app/models \ --name db-gpt-allinone \ @@ -52,21 +88,21 @@ $ docker run --gpus "device=0" -d -p 3306:3306 \ Open http://localhost:5000 with your browser to see the product. -- `-e LLM_MODEL=vicuna-13b`, means we use vicuna-13b as llm model, see /pilot/configs/model_config.LLM_MODEL_CONFIG +- `-e LLM_MODEL=vicuna-13b-v1.5`, means we use vicuna-13b-v1.5 as llm model, see /pilot/configs/model_config.LLM_MODEL_CONFIG - `-v /data/models:/app/models`, means we mount the local model file directory `/data/models` to the docker container directory `/app/models`, please replace it with your model file directory. You can see log with command: ```bash -$ docker logs db-gpt-allinone -f +docker logs db-gpt-allinone -f ``` **Run with openai interface** ```bash -$ PROXY_API_KEY="You api key" -$ PROXY_SERVER_URL="https://api.openai.com/v1/chat/completions" -$ docker run --gpus "device=0" -d -p 3306:3306 \ +PROXY_API_KEY="You api key" +PROXY_SERVER_URL="https://api.openai.com/v1/chat/completions" +docker run --gpus all -d -p 3306:3306 \ -p 5000:5000 \ -e LOCAL_DB_HOST=127.0.0.1 \ -e LOCAL_DB_PASSWORD=aa123456 \