mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-07-24 04:36:23 +00:00
feat: supports docker compose deployment
This commit is contained in:
parent
700d9bf3e9
commit
671cddf4e7
@ -2,57 +2,45 @@ version: '3.10'
|
||||
|
||||
services:
|
||||
db:
|
||||
image: mysql:8.0.33
|
||||
image: mysql/mysql-server
|
||||
environment:
|
||||
MYSQL_DATABASE: 'db'
|
||||
MYSQL_USER: 'user'
|
||||
MYSQL_PASSWORD: 'password'
|
||||
MYSQL_ROOT_PASSWORD: 'aa123456'
|
||||
ports:
|
||||
- 3306:3306
|
||||
volumes:
|
||||
- my-db:/var/lib/mysql
|
||||
- dbgpt-myql-db:/var/lib/mysql
|
||||
- ./docker/examples/my.cnf:/etc/my.cnf
|
||||
- ./docker/examples/sqls:/docker-entrypoint-initdb.d
|
||||
- ./assets/schema/knowledge_management.sql:/docker-entrypoint-initdb.d/knowledge_management.sql
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- dbgptnet
|
||||
webserver:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
command: python3 pilot/server/webserver.py
|
||||
image: db-gpt:latest
|
||||
command: python3 pilot/server/dbgpt_server.py
|
||||
environment:
|
||||
- MODEL_SERVER=http://llmserver:8000
|
||||
- LOCAL_DB_HOST=db
|
||||
- WEB_SERVER_PORT=7860
|
||||
- LOCAL_DB_PASSWORD=aa123456
|
||||
- ALLOWLISTED_PLUGINS=db_dashboard
|
||||
depends_on:
|
||||
- db
|
||||
- llmserver
|
||||
volumes:
|
||||
- ./models:/app/models
|
||||
- ./plugins:/app/plugins
|
||||
- data:/app/pilot/data
|
||||
env_file:
|
||||
- .env.template
|
||||
ports:
|
||||
- 7860:7860/tcp
|
||||
expose:
|
||||
- 7860/tcp
|
||||
restart: unless-stopped
|
||||
llmserver:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
command: python3 pilot/server/llmserver.py
|
||||
environment:
|
||||
- LOCAL_DB_HOST=db
|
||||
- LLM_MODEL=vicuna-13b
|
||||
depends_on:
|
||||
- db
|
||||
volumes:
|
||||
- ./models:/app/models
|
||||
- /data:/data
|
||||
# Please modify it to your own model directory
|
||||
- /data/models:/app/models
|
||||
- dbgpt-data:/app/pilot/data
|
||||
- dbgpt-message:/app/pilot/message
|
||||
env_file:
|
||||
- .env.template
|
||||
ports:
|
||||
- 8000:8000
|
||||
- 5000:5000/tcp
|
||||
# webserver may be failed, it must wait all sqls in /docker-entrypoint-initdb.d execute finish.
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- dbgptnet
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
@ -60,17 +48,11 @@ services:
|
||||
- driver: nvidia
|
||||
device_ids: ['0']
|
||||
capabilities: [gpu]
|
||||
tunnel:
|
||||
image: cloudflare/cloudflared:latest
|
||||
container_name: cloudflared-tunnel
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
- TUNNEL_URL=http://webserver:7860
|
||||
command: tunnel --no-autoupdate
|
||||
depends_on:
|
||||
- webserver
|
||||
|
||||
|
||||
volumes:
|
||||
my-db:
|
||||
data:
|
||||
dbgpt-myql-db:
|
||||
dbgpt-data:
|
||||
dbgpt-message:
|
||||
networks:
|
||||
dbgptnet:
|
||||
driver: bridge
|
||||
name: dbgptnet
|
30
docker/allinone/Dockerfile
Normal file
30
docker/allinone/Dockerfile
Normal file
@ -0,0 +1,30 @@
|
||||
ARG BASE_IMAGE="db-gpt:latest"
|
||||
|
||||
FROM ${BASE_IMAGE}
|
||||
|
||||
RUN apt-get update && apt-get install -y wget gnupg lsb-release net-tools
|
||||
|
||||
RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 467B942D3A79BD29
|
||||
|
||||
RUN wget https://dev.mysql.com/get/mysql-apt-config_0.8.17-1_all.deb
|
||||
RUN dpkg -i mysql-apt-config_0.8.17-1_all.deb
|
||||
|
||||
RUN apt-get update && apt-get install -y mysql-server && apt-get clean
|
||||
|
||||
# Remote access
|
||||
RUN sed -i 's/bind-address\s*=\s*127.0.0.1/bind-address = 0.0.0.0/g' /etc/mysql/mysql.conf.d/mysqld.cnf \
|
||||
&& echo "[mysqld]\ncharacter_set_server=utf8mb4\ncollation-server=utf8mb4_unicode_ci\ninit_connect='SET NAMES utf8mb4'\n[mysql]\ndefault-character-set=utf8mb4\n[client]\ndefault-character-set=utf8mb4\n" >> /etc/mysql/my.cnf
|
||||
|
||||
# Init sql
|
||||
RUN mkdir /docker-entrypoint-initdb.d \
|
||||
&& echo "USE mysql;\nUPDATE user SET Host='%' WHERE User='root';\nFLUSH PRIVILEGES;" > /docker-entrypoint-initdb.d/init.sql
|
||||
|
||||
ENV MYSQL_ROOT_PASSWORD=aa123456
|
||||
ENV LOCAL_DB_PASSWORD="$MYSQL_ROOT_PASSWORD"
|
||||
|
||||
RUN cp /app/assets/schema/knowledge_management.sql /docker-entrypoint-initdb.d/
|
||||
|
||||
COPY docker/allinone/allinone-entrypoint.sh /usr/local/bin/allinone-entrypoint.sh
|
||||
COPY docker/examples/sqls/ /docker-entrypoint-initdb.d/
|
||||
|
||||
ENTRYPOINT ["/usr/local/bin/allinone-entrypoint.sh"]
|
17
docker/allinone/allinone-entrypoint.sh
Executable file
17
docker/allinone/allinone-entrypoint.sh
Executable file
@ -0,0 +1,17 @@
|
||||
#!/bin/bash
|
||||
|
||||
service mysql start
|
||||
|
||||
# execute all mysql init script
|
||||
for file in /docker-entrypoint-initdb.d/*.sql
|
||||
do
|
||||
echo "execute sql file: $file"
|
||||
mysql -u root -p${MYSQL_ROOT_PASSWORD} < "$file"
|
||||
done
|
||||
|
||||
mysql -u root -p${MYSQL_ROOT_PASSWORD} -e "
|
||||
ALTER USER 'root'@'%' IDENTIFIED WITH mysql_native_password BY '$MYSQL_ROOT_PASSWORD';
|
||||
FLUSH PRIVILEGES;
|
||||
"
|
||||
|
||||
python3 pilot/server/dbgpt_server.py
|
9
docker/allinone/build_image.sh
Executable file
9
docker/allinone/build_image.sh
Executable file
@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_LOCATION=$0
|
||||
cd "$(dirname "$SCRIPT_LOCATION")"
|
||||
WORK_DIR=$(pwd)
|
||||
|
||||
IMAGE_NAME="db-gpt-allinone"
|
||||
|
||||
docker build -f Dockerfile -t $IMAGE_NAME $WORK_DIR/../../
|
13
docker/allinone/run.sh
Executable file
13
docker/allinone/run.sh
Executable file
@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
|
||||
docker run --gpus "device=0" -d -p 3306:3306 \
|
||||
-p 5000:5000 \
|
||||
-e LOCAL_DB_HOST=127.0.0.1 \
|
||||
-e LOCAL_DB_PASSWORD=aa123456 \
|
||||
-e MYSQL_ROOT_PASSWORD=aa123456 \
|
||||
-e LLM_MODEL=vicuna-13b \
|
||||
-e LANGUAGE=zh \
|
||||
-v /data:/data \
|
||||
-v /data/models:/app/models \
|
||||
--name db-gpt-allinone \
|
||||
db-gpt-allinone
|
19
docker/allinone/run_proxyllm.sh
Executable file
19
docker/allinone/run_proxyllm.sh
Executable file
@ -0,0 +1,19 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Your api key
|
||||
PROXY_API_KEY="$PROXY_API_KEY"
|
||||
PROXY_SERVER_URL="${PROXY_SERVER_URL-'https://api.openai.com/v1/chat/completions'}"
|
||||
|
||||
docker run --gpus "device=0" -d -p 3306:3306 \
|
||||
-p 5000:5000 \
|
||||
-e LOCAL_DB_HOST=127.0.0.1 \
|
||||
-e LOCAL_DB_PASSWORD=aa123456 \
|
||||
-e MYSQL_ROOT_PASSWORD=aa123456 \
|
||||
-e LLM_MODEL=proxyllm \
|
||||
-e PROXY_API_KEY=$PROXY_API_KEY \
|
||||
-e PROXY_SERVER_URL=$PROXY_SERVER_URL \
|
||||
-e LANGUAGE=zh \
|
||||
-v /data:/data \
|
||||
-v /data/models:/app/models \
|
||||
--name db-gpt-allinone \
|
||||
db-gpt-allinone
|
25
docker/base/Dockerfile
Normal file
25
docker/base/Dockerfile
Normal file
@ -0,0 +1,25 @@
|
||||
FROM nvidia/cuda:11.8.0-devel-ubuntu22.04
|
||||
|
||||
RUN apt-get update && apt-get install -y git python3 pip wget \
|
||||
&& apt-get clean
|
||||
|
||||
# download code from githu: https://github.com/csunny/DB-GPT
|
||||
# ENV DBGPT_VERSION="v0.3.3"
|
||||
# RUN wget https://github.com/csunny/DB-GPT/archive/refs/tags/$DBGPT_VERSION.zip
|
||||
|
||||
# clone latest code, and rename to /app
|
||||
RUN git clone https://github.com/csunny/DB-GPT.git /app
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN pip3 install --upgrade pip \
|
||||
&& pip3 install --no-cache-dir -r requirements.txt \
|
||||
&& pip3 install seaborn mpld3 \
|
||||
&& wget https://github.com/explosion/spacy-models/releases/download/zh_core_web_sm-3.5.0/zh_core_web_sm-3.5.0-py3-none-any.whl -O /tmp/zh_core_web_sm-3.5.0-py3-none-any.whl \
|
||||
&& pip3 install /tmp/zh_core_web_sm-3.5.0-py3-none-any.whl \
|
||||
&& rm /tmp/zh_core_web_sm-3.5.0-py3-none-any.whl \
|
||||
&& rm -rf `pip3 cache dir`
|
||||
|
||||
# RUN python3 -m spacy download zh_core_web_sm
|
||||
|
||||
EXPOSE 5000
|
8
docker/base/build_image.sh
Executable file
8
docker/base/build_image.sh
Executable file
@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_LOCATION=$0
|
||||
cd "$(dirname "$SCRIPT_LOCATION")"
|
||||
WORK_DIR=$(pwd)
|
||||
|
||||
IMAGE_NAME="db-gpt"
|
||||
docker build -f Dockerfile -t $IMAGE_NAME $WORK_DIR/../../
|
9
docker/build_all_images.sh
Executable file
9
docker/build_all_images.sh
Executable file
@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_LOCATION=$0
|
||||
cd "$(dirname "$SCRIPT_LOCATION")"
|
||||
WORK_DIR=$(pwd)
|
||||
|
||||
bash $WORK_DIR/base/build_image.sh
|
||||
|
||||
bash $WORK_DIR/allinone/build_image.sh
|
44
docker/examples/my.cnf
Normal file
44
docker/examples/my.cnf
Normal file
@ -0,0 +1,44 @@
|
||||
# For advice on how to change settings please see
|
||||
# http://dev.mysql.com/doc/refman/8.0/en/server-configuration-defaults.html
|
||||
|
||||
[mysqld]
|
||||
#
|
||||
# Remove leading # and set to the amount of RAM for the most important data
|
||||
# cache in MySQL. Start at 70% of total RAM for dedicated server, else 10%.
|
||||
# innodb_buffer_pool_size = 128M
|
||||
#
|
||||
# Remove leading # to turn on a very important data integrity option: logging
|
||||
# changes to the binary log between backups.
|
||||
# log_bin
|
||||
#
|
||||
# Remove leading # to set options mainly useful for reporting servers.
|
||||
# The server defaults are faster for transactions and fast SELECTs.
|
||||
# Adjust sizes as needed, experiment to find the optimal values.
|
||||
# join_buffer_size = 128M
|
||||
# sort_buffer_size = 2M
|
||||
# read_rnd_buffer_size = 2M
|
||||
|
||||
# Remove leading # to revert to previous value for default_authentication_plugin,
|
||||
# this will increase compatibility with older clients. For background, see:
|
||||
# https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_default_authentication_plugin
|
||||
# default-authentication-plugin=mysql_native_password
|
||||
skip-host-cache
|
||||
skip-name-resolve
|
||||
datadir=/var/lib/mysql
|
||||
socket=/var/lib/mysql/mysql.sock
|
||||
secure-file-priv=/var/lib/mysql-files
|
||||
user=mysql
|
||||
|
||||
pid-file=/var/run/mysqld/mysqld.pid
|
||||
|
||||
# add example config
|
||||
|
||||
default-authentication-plugin=mysql_native_password
|
||||
character_set_server=utf8mb4
|
||||
collation-server=utf8mb4_unicode_ci
|
||||
init_connect='SET NAMES utf8mb4'
|
||||
|
||||
[mysql]
|
||||
default-character-set=utf8mb4
|
||||
[client]
|
||||
default-character-set=utf8mb4
|
63
docker/examples/sqls/case_1_student_manager.sql
Normal file
63
docker/examples/sqls/case_1_student_manager.sql
Normal file
@ -0,0 +1,63 @@
|
||||
create database case_1_student_manager character set utf8;
|
||||
use case_1_student_manager;
|
||||
|
||||
CREATE TABLE students (
|
||||
student_id INT PRIMARY KEY,
|
||||
student_name VARCHAR(100) COMMENT '学生姓名',
|
||||
major VARCHAR(100) COMMENT '专业',
|
||||
year_of_enrollment INT COMMENT '入学年份',
|
||||
student_age INT COMMENT '学生年龄'
|
||||
) COMMENT '学生信息表';
|
||||
|
||||
CREATE TABLE courses (
|
||||
course_id INT PRIMARY KEY,
|
||||
course_name VARCHAR(100) COMMENT '课程名称',
|
||||
credit FLOAT COMMENT '学分'
|
||||
) COMMENT '课程信息表';
|
||||
|
||||
CREATE TABLE scores (
|
||||
student_id INT,
|
||||
course_id INT,
|
||||
score INT COMMENT '得分',
|
||||
semester VARCHAR(50) COMMENT '学期',
|
||||
PRIMARY KEY (student_id, course_id),
|
||||
FOREIGN KEY (student_id) REFERENCES students(student_id),
|
||||
FOREIGN KEY (course_id) REFERENCES courses(course_id)
|
||||
) COMMENT '学生成绩表';
|
||||
|
||||
|
||||
INSERT INTO students (student_id, student_name, major, year_of_enrollment, student_age) VALUES
|
||||
(1, '张三', '计算机科学', 2020, 20),
|
||||
(2, '李四', '计算机科学', 2021, 19),
|
||||
(3, '王五', '物理学', 2020, 21),
|
||||
(4, '赵六', '数学', 2021, 19),
|
||||
(5, '周七', '计算机科学', 2022, 18),
|
||||
(6, '吴八', '物理学', 2020, 21),
|
||||
(7, '郑九', '数学', 2021, 19),
|
||||
(8, '孙十', '计算机科学', 2022, 18),
|
||||
(9, '刘十一', '物理学', 2020, 21),
|
||||
(10, '陈十二', '数学', 2021, 19);
|
||||
|
||||
INSERT INTO courses (course_id, course_name, credit) VALUES
|
||||
(1, '计算机基础', 3),
|
||||
(2, '数据结构', 4),
|
||||
(3, '高等物理', 3),
|
||||
(4, '线性代数', 4),
|
||||
(5, '微积分', 5),
|
||||
(6, '编程语言', 4),
|
||||
(7, '量子力学', 3),
|
||||
(8, '概率论', 4),
|
||||
(9, '数据库系统', 4),
|
||||
(10, '计算机网络', 4);
|
||||
|
||||
INSERT INTO scores (student_id, course_id, score, semester) VALUES
|
||||
(1, 1, 90, '2020年秋季'),
|
||||
(1, 2, 85, '2021年春季'),
|
||||
(2, 1, 88, '2021年秋季'),
|
||||
(2, 2, 90, '2022年春季'),
|
||||
(3, 3, 92, '2020年秋季'),
|
||||
(3, 4, 85, '2021年春季'),
|
||||
(4, 3, 88, '2021年秋季'),
|
||||
(4, 4, 86, '2022年春季'),
|
||||
(5, 1, 90, '2022年秋季'),
|
||||
(5, 2, 87, '2023年春季');
|
63
docker/examples/sqls/case_2_ecom.sql
Normal file
63
docker/examples/sqls/case_2_ecom.sql
Normal file
@ -0,0 +1,63 @@
|
||||
create database case_2_ecom character set utf8;
|
||||
use case_2_ecom;
|
||||
|
||||
CREATE TABLE users (
|
||||
user_id INT PRIMARY KEY,
|
||||
user_name VARCHAR(100) COMMENT '用户名',
|
||||
user_email VARCHAR(100) COMMENT '用户邮箱',
|
||||
registration_date DATE COMMENT '注册日期',
|
||||
user_country VARCHAR(100) COMMENT '用户国家'
|
||||
) COMMENT '用户信息表';
|
||||
|
||||
CREATE TABLE products (
|
||||
product_id INT PRIMARY KEY,
|
||||
product_name VARCHAR(100) COMMENT '商品名称',
|
||||
product_price FLOAT COMMENT '商品价格'
|
||||
) COMMENT '商品信息表';
|
||||
|
||||
CREATE TABLE orders (
|
||||
order_id INT PRIMARY KEY,
|
||||
user_id INT,
|
||||
product_id INT,
|
||||
quantity INT COMMENT '数量',
|
||||
order_date DATE COMMENT '订单日期',
|
||||
FOREIGN KEY (user_id) REFERENCES users(user_id),
|
||||
FOREIGN KEY (product_id) REFERENCES products(product_id)
|
||||
) COMMENT '订单信息表';
|
||||
|
||||
|
||||
INSERT INTO users (user_id, user_name, user_email, registration_date, user_country) VALUES
|
||||
(1, 'John', 'john@gmail.com', '2020-01-01', 'USA'),
|
||||
(2, 'Mary', 'mary@gmail.com', '2021-01-01', 'UK'),
|
||||
(3, 'Bob', 'bob@gmail.com', '2020-01-01', 'USA'),
|
||||
(4, 'Alice', 'alice@gmail.com', '2021-01-01', 'UK'),
|
||||
(5, 'Charlie', 'charlie@gmail.com', '2020-01-01', 'USA'),
|
||||
(6, 'David', 'david@gmail.com', '2021-01-01', 'UK'),
|
||||
(7, 'Eve', 'eve@gmail.com', '2020-01-01', 'USA'),
|
||||
(8, 'Frank', 'frank@gmail.com', '2021-01-01', 'UK'),
|
||||
(9, 'Grace', 'grace@gmail.com', '2020-01-01', 'USA'),
|
||||
(10, 'Helen', 'helen@gmail.com', '2021-01-01', 'UK');
|
||||
|
||||
INSERT INTO products (product_id, product_name, product_price) VALUES
|
||||
(1, 'iPhone', 699),
|
||||
(2, 'Samsung Galaxy', 599),
|
||||
(3, 'iPad', 329),
|
||||
(4, 'Macbook', 1299),
|
||||
(5, 'Apple Watch', 399),
|
||||
(6, 'AirPods', 159),
|
||||
(7, 'Echo', 99),
|
||||
(8, 'Kindle', 89),
|
||||
(9, 'Fire TV Stick', 39),
|
||||
(10, 'Echo Dot', 49);
|
||||
|
||||
INSERT INTO orders (order_id, user_id, product_id, quantity, order_date) VALUES
|
||||
(1, 1, 1, 1, '2022-01-01'),
|
||||
(2, 1, 2, 1, '2022-02-01'),
|
||||
(3, 2, 3, 2, '2022-03-01'),
|
||||
(4, 2, 4, 1, '2022-04-01'),
|
||||
(5, 3, 5, 2, '2022-05-01'),
|
||||
(6, 3, 6, 3, '2022-06-01'),
|
||||
(7, 4, 7, 2, '2022-07-01'),
|
||||
(8, 4, 8, 1, '2022-08-01'),
|
||||
(9, 5, 9, 2, '2022-09-01'),
|
||||
(10, 5, 10, 3, '2022-10-01');
|
87
docker/examples/sqls/test_case.md
Normal file
87
docker/examples/sqls/test_case.md
Normal file
@ -0,0 +1,87 @@
|
||||
# 测试问题
|
||||
|
||||
## 场景一
|
||||
|
||||
学校管理系统,主要测试SQL助手的联合查询,条件查询和排序功能。
|
||||
|
||||
我们的数据库有三个表:学生表、课程表和成绩表。我们要测试SQL助手能否处理复杂的SQL查询,包括连接多个表,按照一定的条件筛选数据,以及对结果进行排序。
|
||||
|
||||
### Q1
|
||||
|
||||
查询所有学生的姓名,专业和成绩,按成绩降序排序
|
||||
|
||||
SQL:
|
||||
```sql
|
||||
SELECT students.student_name, students.major, scores.score
|
||||
FROM students
|
||||
JOIN scores ON students.student_id = scores.student_id
|
||||
ORDER BY scores.score DESC;
|
||||
```
|
||||
|
||||
### Q2
|
||||
|
||||
查询 "计算机科学" 专业的学生的平均成绩
|
||||
|
||||
SQL:
|
||||
```sql
|
||||
SELECT AVG(scores.score) as avg_score
|
||||
FROM students
|
||||
JOIN scores ON students.student_id = scores.student_id
|
||||
WHERE students.major = '计算机科学';
|
||||
```
|
||||
|
||||
### Q3
|
||||
|
||||
查询哪些学生在 "2023年春季" 学期的课程学分总和超过2学分
|
||||
|
||||
```sql
|
||||
SELECT students.student_name
|
||||
FROM students
|
||||
JOIN scores ON students.student_id = scores.student_id
|
||||
JOIN courses ON scores.course_id = courses.course_id
|
||||
WHERE scores.semester = '2023年春季'
|
||||
GROUP BY students.student_id
|
||||
HAVING SUM(courses.credit) > 2;
|
||||
```
|
||||
|
||||
## 场景二:电商系统,主要测试SQL助手的数据聚合和分组功能。
|
||||
|
||||
我们的数据库有三个表:用户表、商品表和订单表。我们要测试SQL助手能否处理复杂的SQL查询,包括对数据进行聚合和分组。
|
||||
|
||||
### Q1
|
||||
|
||||
查询每个用户的总订单数量
|
||||
|
||||
SQL:
|
||||
|
||||
```sql
|
||||
SELECT users.user_name, COUNT(orders.order_id) as order_count
|
||||
FROM users
|
||||
JOIN orders ON users.user_id = orders.user_id
|
||||
GROUP BY users.user_id;
|
||||
```
|
||||
|
||||
### Q2
|
||||
|
||||
查询每种商品的总销售额
|
||||
|
||||
```sql
|
||||
SELECT products.product_name, SUM(products.product_price * orders.quantity) as total_sales
|
||||
FROM products
|
||||
JOIN orders ON products.product_id = orders.product_id
|
||||
GROUP BY products.product_id;
|
||||
```
|
||||
|
||||
### Q3
|
||||
|
||||
查询2023年最受欢迎的商品(订单数量最多的商品)
|
||||
|
||||
```sql
|
||||
SELECT products.product_name
|
||||
FROM products
|
||||
JOIN orders ON products.product_id = orders.product_id
|
||||
WHERE YEAR(orders.order_date) = 2023
|
||||
GROUP BY products.product_id
|
||||
ORDER BY COUNT(orders.order_id) DESC
|
||||
LIMIT 1;
|
||||
```
|
19
docker/examples/sqls/test_case_info.sql
Normal file
19
docker/examples/sqls/test_case_info.sql
Normal file
@ -0,0 +1,19 @@
|
||||
create database test_case_info character set utf8;
|
||||
use test_case_info;
|
||||
|
||||
CREATE TABLE test_cases (
|
||||
case_id INT AUTO_INCREMENT PRIMARY KEY,
|
||||
scenario_name VARCHAR(100) COMMENT '场景名称',
|
||||
scenario_description TEXT COMMENT '场景描述',
|
||||
test_question VARCHAR(500) COMMENT '测试问题',
|
||||
expected_sql TEXT COMMENT '预期SQL',
|
||||
correct_output TEXT COMMENT '正确输出'
|
||||
) COMMENT '测试用例表';
|
||||
|
||||
INSERT INTO test_cases (scenario_name, scenario_description, test_question, expected_sql, correct_output) VALUES
|
||||
('学校管理系统', '测试SQL助手的联合查询,条件查询和排序功能', '查询所有学生的姓名,专业和成绩,按成绩降序排序', 'SELECT students.student_name, students.major, scores.score FROM students JOIN scores ON students.student_id = scores.student_id ORDER BY scores.score DESC;', '返回所有学生的姓名,专业和成绩,按成绩降序排序的结果'),
|
||||
('学校管理系统', '测试SQL助手的联合查询,条件查询和排序功能', '查询计算机科学专业的学生的平均成绩', 'SELECT AVG(scores.score) as avg_score FROM students JOIN scores ON students.student_id = scores.student_id WHERE students.major = ''计算机科学'';', '返回计算机科学专业学生的平均成绩'),
|
||||
('学校管理系统', '测试SQL助手的联合查询,条件查询和排序功能', '查询哪些学生在2023年秋季学期的课程学分总和超过15', 'SELECT students.student_name FROM students JOIN scores ON students.student_id = scores.student_id JOIN courses ON scores.course_id = courses.course_id WHERE scores.semester = ''2023年秋季'' GROUP BY students.student_id HAVING SUM(courses.credit) > 15;', '返回在2023年秋季学期的课程学分总和超过15的学生的姓名'),
|
||||
('电商系统', '测试SQL助手的数据聚合和分组功能', '查询每个用户的总订单数量', 'SELECT users.user_name, COUNT(orders.order_id) as order_count FROM users JOIN orders ON users.user_id = orders.user_id GROUP BY users.user_id;', '返回每个用户的总订单数量'),
|
||||
('电商系统', '测试SQL助手的数据聚合和分组功能', '查询每种商品的总销售额', 'SELECT products.product_name, SUM(products.product_price * orders.quantity) as total_sales FROM products JOIN orders ON products.product_id = orders.product_id GROUP BY products.product_id;', '返回每种商品的总销售额'),
|
||||
('电商系统', '测试SQL助手的数据聚合和分组功能', '查询2023年最受欢迎的商品(订单数量最多的商品)', 'SELECT products.product_name FROM products JOIN orders ON products.product_id = orders.product_id WHERE YEAR(orders.order_date) = 2023 GROUP BY products.product_id ORDER BY COUNT(orders.order_id) DESC LIMIT 1;', '返回2023年最受欢迎的商品(订单数量最多的商品)的名称');
|
3
docker/examples/sqls/user_config.sql
Normal file
3
docker/examples/sqls/user_config.sql
Normal file
@ -0,0 +1,3 @@
|
||||
USE mysql;
|
||||
UPDATE user SET Host='%' WHERE User='root';
|
||||
FLUSH PRIVILEGES;
|
@ -86,3 +86,98 @@ $ python pilot/server/dbgpt_server.py --light
|
||||
|
||||
If you want to learn about dbgpt-webui, read https://github.com/csunny/DB-GPT/tree/new-page-framework/datacenter
|
||||
|
||||
### 4. Docker (Experimental)
|
||||
|
||||
#### 4.1 Building Docker image
|
||||
|
||||
```bash
|
||||
$ bash docker/build_all_images.sh
|
||||
```
|
||||
|
||||
Review images by listing them:
|
||||
|
||||
```bash
|
||||
$ docker images|grep db-gpt
|
||||
```
|
||||
|
||||
Output should look something like the following:
|
||||
|
||||
```
|
||||
db-gpt-allinone latest e1ffd20b85ac 45 minutes ago 14.5GB
|
||||
db-gpt latest e36fb0cca5d9 3 hours ago 14GB
|
||||
```
|
||||
|
||||
#### 4.2. Run all in one docker container
|
||||
|
||||
**Run with local model**
|
||||
|
||||
```bash
|
||||
$ docker run --gpus "device=0" -d -p 3306:3306 \
|
||||
-p 5000:5000 \
|
||||
-e LOCAL_DB_HOST=127.0.0.1 \
|
||||
-e LOCAL_DB_PASSWORD=aa123456 \
|
||||
-e MYSQL_ROOT_PASSWORD=aa123456 \
|
||||
-e LLM_MODEL=vicuna-13b \
|
||||
-e LANGUAGE=zh \
|
||||
-v /data/models:/app/models \
|
||||
--name db-gpt-allinone \
|
||||
db-gpt-allinone
|
||||
```
|
||||
|
||||
Open http://localhost:5000 with your browser to see the product.
|
||||
|
||||
|
||||
- `-e LLM_MODEL=vicuna-13b`, means we use vicuna-13b as llm model, see /pilot/configs/model_config.LLM_MODEL_CONFIG
|
||||
- `-v /data/models:/app/models`, means we mount the local model file directory `/data/models` to the docker container directory `/app/models`, please replace it with your model file directory.
|
||||
|
||||
You can see log with command:
|
||||
|
||||
```bash
|
||||
$ docker logs db-gpt-allinone -f
|
||||
```
|
||||
|
||||
**Run with openai interface**
|
||||
|
||||
```bash
|
||||
$ PROXY_API_KEY="You api key"
|
||||
$ PROXY_SERVER_URL="https://api.openai.com/v1/chat/completions"
|
||||
$ docker run --gpus "device=0" -d -p 3306:3306 \
|
||||
-p 5000:5000 \
|
||||
-e LOCAL_DB_HOST=127.0.0.1 \
|
||||
-e LOCAL_DB_PASSWORD=aa123456 \
|
||||
-e MYSQL_ROOT_PASSWORD=aa123456 \
|
||||
-e LLM_MODEL=proxyllm \
|
||||
-e PROXY_API_KEY=$PROXY_API_KEY \
|
||||
-e PROXY_SERVER_URL=$PROXY_SERVER_URL \
|
||||
-e LANGUAGE=zh \
|
||||
-v /data/models/text2vec-large-chinese:/app/models/text2vec-large-chinese \
|
||||
--name db-gpt-allinone \
|
||||
db-gpt-allinone
|
||||
```
|
||||
|
||||
- `-e LLM_MODEL=proxyllm`, means we use proxy llm(openai interface, fastchat interface...)
|
||||
- `-v /data/models/text2vec-large-chinese:/app/models/text2vec-large-chinese`, means we mount the local text2vec model to the docker container.
|
||||
|
||||
#### 4.2. Run with docker compose
|
||||
|
||||
```bash
|
||||
$ docker compose up -d
|
||||
```
|
||||
|
||||
Output should look something like the following:
|
||||
```
|
||||
[+] Building 0.0s (0/0)
|
||||
[+] Running 2/2
|
||||
✔ Container db-gpt-db-1 Started 0.4s
|
||||
✔ Container db-gpt-webserver-1 Started
|
||||
```
|
||||
|
||||
You can see log with command:
|
||||
|
||||
```bash
|
||||
$ docker logs db-gpt-webserver-1 -f
|
||||
```
|
||||
|
||||
Open http://localhost:5000 with your browser to see the product.
|
||||
|
||||
You can open docker-compose.yml in the project root directory to see more details.
|
||||
|
Loading…
Reference in New Issue
Block a user