feat(core): Support configuration metadata database name in .env file (#741)

*Others*:
- Support disable alembic to initialize and upgrade database metadata
with parameter `--disable_alembic_upgrade`
- New APP ID parameter `proxy_api_app_id` for proxy LLM
This commit is contained in:
Aries-ckt
2023-10-27 18:52:00 +08:00
committed by GitHub
24 changed files with 396 additions and 135 deletions

View File

@@ -84,6 +84,7 @@ LOCAL_DB_TYPE=sqlite
# LOCAL_DB_PASSWORD=aa12345678 # LOCAL_DB_PASSWORD=aa12345678
# LOCAL_DB_HOST=127.0.0.1 # LOCAL_DB_HOST=127.0.0.1
# LOCAL_DB_PORT=3306 # LOCAL_DB_PORT=3306
# LOCAL_DB_NAME=dbgpt
### This option determines the storage location of conversation records. The default is not configured to the old version of duckdb. It can be optionally db or file (if the value is db, the database configured by LOCAL_DB will be used) ### This option determines the storage location of conversation records. The default is not configured to the old version of duckdb. It can be optionally db or file (if the value is db, the database configured by LOCAL_DB will be used)
#CHAT_HISTORY_STORE_TYPE=db #CHAT_HISTORY_STORE_TYPE=db

View File

@@ -1,18 +0,0 @@
CREATE DATABASE history;
use history;
CREATE TABLE `chat_feed_back` (
`id` bigint(20) NOT NULL AUTO_INCREMENT,
`conv_uid` varchar(128) DEFAULT NULL COMMENT '会话id',
`conv_index` int(4) DEFAULT NULL COMMENT '第几轮会话',
`score` int(1) DEFAULT NULL COMMENT '评分',
`ques_type` varchar(32) DEFAULT NULL COMMENT '用户问题类别',
`question` longtext DEFAULT NULL COMMENT '用户问题',
`knowledge_space` varchar(128) DEFAULT NULL COMMENT '知识库',
`messages` longtext DEFAULT NULL COMMENT '评价详情',
`user_name` varchar(128) DEFAULT NULL COMMENT '评价人',
`gmt_created` datetime DEFAULT NULL,
`gmt_modified` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uk_conv` (`conv_uid`,`conv_index`),
KEY `idx_conv` (`conv_uid`,`conv_index`)
) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8mb4 COMMENT='用户评分反馈表';

View File

@@ -1,5 +1,15 @@
CREATE DATABASE knowledge_management; -- You can change `dbgpt` to your actual metadata database name in your `.env` file
use knowledge_management; -- eg. `LOCAL_DB_NAME=dbgpt`
CREATE DATABASE IF NOT EXISTS dbgpt;
use dbgpt;
-- For alembic migration tool
CREATE TABLE `alembic_version` (
version_num VARCHAR(32) NOT NULL,
CONSTRAINT alembic_version_pkc PRIMARY KEY (version_num)
);
CREATE TABLE `knowledge_space` ( CREATE TABLE `knowledge_space` (
`id` int NOT NULL AUTO_INCREMENT COMMENT 'auto increment id', `id` int NOT NULL AUTO_INCREMENT COMMENT 'auto increment id',
`name` varchar(100) NOT NULL COMMENT 'knowledge space name', `name` varchar(100) NOT NULL COMMENT 'knowledge space name',
@@ -43,6 +53,102 @@ CREATE TABLE `document_chunk` (
KEY `idx_document_id` (`document_id`) COMMENT 'index:document_id' KEY `idx_document_id` (`document_id`) COMMENT 'index:document_id'
) ENGINE=InnoDB AUTO_INCREMENT=100001 DEFAULT CHARSET=utf8mb4 COMMENT='knowledge document chunk detail'; ) ENGINE=InnoDB AUTO_INCREMENT=100001 DEFAULT CHARSET=utf8mb4 COMMENT='knowledge document chunk detail';
CREATE TABLE `connect_config` (
`id` int NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
`db_type` varchar(255) NOT NULL COMMENT 'db type',
`db_name` varchar(255) NOT NULL COMMENT 'db name',
`db_path` varchar(255) DEFAULT NULL COMMENT 'file db path',
`db_host` varchar(255) DEFAULT NULL COMMENT 'db connect host(not file db)',
`db_port` varchar(255) DEFAULT NULL COMMENT 'db cnnect port(not file db)',
`db_user` varchar(255) DEFAULT NULL COMMENT 'db user',
`db_pwd` varchar(255) DEFAULT NULL COMMENT 'db password',
`comment` text COMMENT 'db comment',
PRIMARY KEY (`id`),
UNIQUE KEY `uk_db` (`db_name`),
KEY `idx_q_db_type` (`db_type`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT 'Connection confi';
CREATE TABLE `chat_history` (
`id` int NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
`conv_uid` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'Conversation record unique id',
`chat_mode` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'Conversation scene mode',
`summary` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'Conversation record summary',
`user_name` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'interlocutor',
`messages` text COLLATE utf8mb4_unicode_ci COMMENT 'Conversation details',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT 'Chat history';
CREATE TABLE `chat_feed_back` (
`id` bigint(20) NOT NULL AUTO_INCREMENT,
`conv_uid` varchar(128) DEFAULT NULL COMMENT 'Conversation ID',
`conv_index` int(4) DEFAULT NULL COMMENT 'Round of conversation',
`score` int(1) DEFAULT NULL COMMENT 'Score of user',
`ques_type` varchar(32) DEFAULT NULL COMMENT 'User question category',
`question` longtext DEFAULT NULL COMMENT 'User question',
`knowledge_space` varchar(128) DEFAULT NULL COMMENT 'Knowledge space name',
`messages` longtext DEFAULT NULL COMMENT 'The details of user feedback',
`user_name` varchar(128) DEFAULT NULL COMMENT 'User name',
`gmt_created` timestamp NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'created time',
`gmt_modified` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time',
PRIMARY KEY (`id`),
UNIQUE KEY `uk_conv` (`conv_uid`,`conv_index`),
KEY `idx_conv` (`conv_uid`,`conv_index`)
) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8mb4 COMMENT='User feedback table';
CREATE TABLE `my_plugin` (
`id` int NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
`tenant` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'user tenant',
`user_code` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'user code',
`user_name` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'user name',
`name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'plugin name',
`file_name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'plugin package file name',
`type` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin type',
`version` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin version',
`use_count` int DEFAULT NULL COMMENT 'plugin total use count',
`succ_count` int DEFAULT NULL COMMENT 'plugin total success count',
`gmt_created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP COMMENT 'plugin install time',
PRIMARY KEY (`id`),
UNIQUE KEY `name` (`name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT='User plugin table';
CREATE TABLE `plugin_hub` (
`id` int NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
`name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'plugin name',
`description` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'plugin description',
`author` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin author',
`email` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin author email',
`type` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin type',
`version` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin version',
`storage_channel` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin storage channel',
`storage_url` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin download url',
`download_param` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin download param',
`gmt_created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP COMMENT 'plugin upload time',
`installed` int DEFAULT NULL COMMENT 'plugin already installed count',
PRIMARY KEY (`id`),
UNIQUE KEY `name` (`name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT='Plugin Hub table';
CREATE TABLE `prompt_manage` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`chat_scene` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Chat scene',
`sub_chat_scene` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Sub chat scene',
`prompt_type` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Prompt type: common or private',
`prompt_name` varchar(512) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'prompt name',
`content` longtext COLLATE utf8mb4_unicode_ci COMMENT 'Prompt content',
`user_name` varchar(128) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'User name',
`gmt_created` timestamp NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'created time',
`gmt_modified` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time',
PRIMARY KEY (`id`),
UNIQUE KEY `prompt_name_uiq` (`prompt_name`),
KEY `gmt_created_idx` (`gmt_created`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT='Prompt management table';
CREATE DATABASE EXAMPLE_1; CREATE DATABASE EXAMPLE_1;
use EXAMPLE_1; use EXAMPLE_1;
CREATE TABLE `users` ( CREATE TABLE `users` (

View File

@@ -1,16 +0,0 @@
CREATE DATABASE prompt_management;
use prompt_management;
CREATE TABLE `prompt_manage` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`chat_scene` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT '场景',
`sub_chat_scene` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT '子场景',
`prompt_type` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT '类型: common or private',
`prompt_name` varchar(512) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'prompt的名字',
`content` longtext COLLATE utf8mb4_unicode_ci COMMENT 'prompt的内容',
`user_name` varchar(128) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT '用户名',
`gmt_created` datetime DEFAULT NULL,
`gmt_modified` datetime DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `prompt_name_uiq` (`prompt_name`),
KEY `gmt_created_idx` (`gmt_created`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT='prompt管理表';

View File

@@ -100,8 +100,49 @@ pip install --use-pep517 fschat
``` ```
##### Q9: alembic.util.exc.CommandError: Target database is not up to date. ##### Q9: alembic.util.exc.CommandError: Target database is not up to date.
delete files in `DB-GPT/pilot/meta_data/alembic/versions/` and reboot.
delete files in `DB-GPT/pilot/meta_data/alembic/versions/` and restart.
```commandline ```commandline
rm -rf DB-GPT/pilot/meta_data/alembic/versions/* rm -rf DB-GPT/pilot/meta_data/alembic/versions/*
rm -rf DB-GPT/pilot/meta_data/alembic/dbgpt.db rm -rf DB-GPT/pilot/meta_data/alembic/dbgpt.db
``` ```
##### Q10: How to store DB-GPT metadata into my database
In version 0.4.0, the metadata module of the DB-GPT application has been refactored. All metadata tables will now be automatically saved in the 'dbgpt' database, based on the database type specified in the `.env` file. If you would like to retain the existing data, it is recommended to use a data migration tool to transfer the database table information to the 'dbgpt' database. Additionally, you can change the default database name 'dbgpt' in your `.env` file.
```commandline
### SQLite database (Current default database)
#LOCAL_DB_PATH=data/default_sqlite.db
#LOCAL_DB_TYPE=sqlite
### Mysql database
LOCAL_DB_TYPE=mysql
LOCAL_DB_USER=root
LOCAL_DB_PASSWORD=aa12345678
LOCAL_DB_HOST=127.0.0.1
LOCAL_DB_PORT=3306
# You can change it to your actual metadata database name
LOCAL_DB_NAME=dbgpt
### This option determines the storage location of conversation records. The default is not configured to the old version of duckdb. It can be optionally db or file (if the value is db, the database configured by LOCAL_DB will be used)
CHAT_HISTORY_STORE_TYPE=db
```
##### Q11: pymysql.err.OperationalError: (1142, "ALTER command denied to user '{you db user}'@'{you db host}' for table '{some table name}'")
In version 0.4.0, DB-GPT use migration tool alembic to migrate metadata. If the database user does not have DDL permissions, this error will be reported. You can solve this problem by importing the metadata information separately.
1. Use a privileged user to execute DDL sql file
```bash
mysql -h127.0.0.1 -uroot -paa12345678 < ./assets/schema/knowledge_management.sql
```
2. Run DB-GPT webserver with `--disable_alembic_upgrade`
```bash
python pilot/server/dbgpt_server.py --disable_alembic_upgrade
```
or
```bash
dbgpt start webserver --disable_alembic_upgrade
```

View File

@@ -72,7 +72,7 @@ $ mysql -h127.0.0.1 -uroot -paa12345678 < ./assets/schema/knowledge_management.s
##### Q6:when pull from 0.4.0, I found historical knowledge document disappeared ##### Q6:when pull from 0.4.0, I found historical knowledge document disappeared
In version 0.4.0, the metadata module of the DB-GPT application has been refactored. All metadata tables will now be automatically saved in the 'dbgpt' database, based on the database type specified in the .env file. If you would like to retain the existing data, it is recommended to use a data migration tool to transfer the database table information to the 'dbgpt' database. In version 0.4.0, the metadata module of the DB-GPT application has been refactored. All metadata tables will now be automatically saved in the 'dbgpt' database, based on the database type specified in the `.env` file. If you would like to retain the existing data, it is recommended to use a data migration tool to transfer the database table information to the 'dbgpt' database. Additionally, you can change the default database name 'dbgpt' in your `.env` file.
```{tip} ```{tip}
old database:knowledge_management; old database:knowledge_management;
@@ -89,5 +89,6 @@ LOCAL_DB_USER=root
LOCAL_DB_PASSWORD=aa12345678 LOCAL_DB_PASSWORD=aa12345678
LOCAL_DB_HOST=127.0.0.1 LOCAL_DB_HOST=127.0.0.1
LOCAL_DB_PORT=3306 LOCAL_DB_PORT=3306
# You can change it to your actual metadata database name
``` LOCAL_DB_NAME=dbgpt
```

View File

@@ -8,7 +8,7 @@ msgid ""
msgstr "" msgstr ""
"Project-Id-Version: DB-GPT 👏👏 0.3.5\n" "Project-Id-Version: DB-GPT 👏👏 0.3.5\n"
"Report-Msgid-Bugs-To: \n" "Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2023-10-19 19:31+0800\n" "POT-Creation-Date: 2023-10-27 15:57+0800\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language: zh_CN\n" "Language: zh_CN\n"
@@ -20,12 +20,12 @@ msgstr ""
"Generated-By: Babel 2.12.1\n" "Generated-By: Babel 2.12.1\n"
#: ../../getting_started/faq/deploy/deploy_faq.md:1 #: ../../getting_started/faq/deploy/deploy_faq.md:1
#: fb640f7c38744cbf996dcf7f73f325f6 #: 798fb40c5ec941fcb9d6a0795219132f
msgid "Installation FAQ" msgid "Installation FAQ"
msgstr "Installation FAQ" msgstr "安装 FAQ"
#: ../../getting_started/faq/deploy/deploy_faq.md:5 #: ../../getting_started/faq/deploy/deploy_faq.md:5
#: 79fd80e469d14d608554d53a0e0ed2e3 #: 47d0aa43c5fe4ca3a8ceba50c18ba608
#, fuzzy #, fuzzy
msgid "" msgid ""
"Q1: execute `pip install -e .` error, found some package cannot find " "Q1: execute `pip install -e .` error, found some package cannot find "
@@ -35,18 +35,20 @@ msgstr ""
"cannot find correct version." "cannot find correct version."
#: ../../getting_started/faq/deploy/deploy_faq.md:6 #: ../../getting_started/faq/deploy/deploy_faq.md:6
#: f1f6e3291d1446b5bbcf744cd4c4e89a #: 944761c1ccc543c0a6aa2fad8dc74a32
msgid "change the pip source." msgid "change the pip source."
msgstr "替换pip源." msgstr "替换pip源."
#: ../../getting_started/faq/deploy/deploy_faq.md:13 #: ../../getting_started/faq/deploy/deploy_faq.md:13
#: ../../getting_started/faq/deploy/deploy_faq.md:20 #: ../../getting_started/faq/deploy/deploy_faq.md:20
#: 68e1b39a08774a81b9061cc5205e4c1c dd34901f446749e998cd34ec5b6c44f4 #: ../../getting_started/faq/deploy/deploy_faq.md:145
#: 3cff7ea0ee7945be8d03b6b3b032515b 5ba3037287524d6384ca96ffe58798fa
#: 9635f37d34e04764855f21d2266411f6
msgid "or" msgid "or"
msgstr "或者" msgstr "或者"
#: ../../getting_started/faq/deploy/deploy_faq.md:27 #: ../../getting_started/faq/deploy/deploy_faq.md:27
#: 0899f0e28dae443b8f912d96c797b79c #: c1c71ca902d745b89136bb63beda3dfd
msgid "" msgid ""
"Q2: sqlalchemy.exc.OperationalError: (sqlite3.OperationalError) unable to" "Q2: sqlalchemy.exc.OperationalError: (sqlite3.OperationalError) unable to"
" open database file" " open database file"
@@ -55,80 +57,80 @@ msgstr ""
" open database file" " open database file"
#: ../../getting_started/faq/deploy/deploy_faq.md:29 #: ../../getting_started/faq/deploy/deploy_faq.md:29
#: 3e60d8190e49436b8c40b34a67b7bfb3 #: 97124a4512534c63bd09f2cf5a76fd13
msgid "make sure you pull latest code or create directory with mkdir pilot/data" msgid "make sure you pull latest code or create directory with mkdir pilot/data"
msgstr "make sure you pull latest code or create directory with mkdir pilot/data" msgstr "make sure you pull latest code or create directory with mkdir pilot/data"
#: ../../getting_started/faq/deploy/deploy_faq.md:31 #: ../../getting_started/faq/deploy/deploy_faq.md:31
#: baeaae20238842d3b8e4ae5b337198e5 #: 369ed2cd489d46009184036a8f8ed67a
msgid "Q3: The model keeps getting killed." msgid "Q3: The model keeps getting killed."
msgstr "Q3: The model keeps getting killed." msgstr "Q3: The model keeps getting killed."
#: ../../getting_started/faq/deploy/deploy_faq.md:33 #: ../../getting_started/faq/deploy/deploy_faq.md:33
#: eb3936307ad64b19b73483ff9ae126f2 #: 6d59ca711a95495d9bddf22cd804e20b
msgid "" msgid ""
"your GPU VRAM size is not enough, try replace your hardware or replace " "your GPU VRAM size is not enough, try replace your hardware or replace "
"other llms." "other llms."
msgstr "GPU显存不够, 增加显存或者换一个显存小的模型" msgstr "GPU显存不够, 增加显存或者换一个显存小的模型"
#: ../../getting_started/faq/deploy/deploy_faq.md:35 #: ../../getting_started/faq/deploy/deploy_faq.md:35
#: f6dba770717041699c73b4cd00d48aad #: 7ef755bf77fa46ccb63076c3561ecc64
msgid "Q4: How to access website on the public network" msgid "Q4: How to access website on the public network"
msgstr "" msgstr ""
#: ../../getting_started/faq/deploy/deploy_faq.md:37 #: ../../getting_started/faq/deploy/deploy_faq.md:37
#: 447d9e9374de44bab6d8a03f2c936676 #: cd3f9144525b49babb826a7447812016
msgid "" msgid ""
"You can try to use gradio's [network](https://github.com/gradio-" "You can try to use gradio's [network](https://github.com/gradio-"
"app/gradio/blob/main/gradio/networking.py) to achieve." "app/gradio/blob/main/gradio/networking.py) to achieve."
msgstr "" msgstr ""
#: ../../getting_started/faq/deploy/deploy_faq.md:48 #: ../../getting_started/faq/deploy/deploy_faq.md:48
#: 5e34dd4dfcf34feeb1815dfa974041d0 #: 71f0174d58674b1abd3d6a02cf65abf6
msgid "Open `url` with your browser to see the website." msgid "Open `url` with your browser to see the website."
msgstr "" msgstr ""
#: ../../getting_started/faq/deploy/deploy_faq.md:50 #: ../../getting_started/faq/deploy/deploy_faq.md:50
#: aaef774ce6124021a3862bc0a25d465f #: 81b07e64feef4187beab2022f3af294d
msgid "Q5: (Windows) execute `pip install -e .` error" msgid "Q5: (Windows) execute `pip install -e .` error"
msgstr "" msgstr ""
#: ../../getting_started/faq/deploy/deploy_faq.md:52 #: ../../getting_started/faq/deploy/deploy_faq.md:52
#: ec3945df451c4ec2b32ebb476f45c82b #: f2e7cd453c10486aa9b7d90d1d771b58
msgid "The error log like the following:" msgid "The error log like the following:"
msgstr "" msgstr ""
#: ../../getting_started/faq/deploy/deploy_faq.md:71 #: ../../getting_started/faq/deploy/deploy_faq.md:71
#: 1df09f6d9f9b4c1a8a32d6e271e5ee39 #: 2c97bbe2f96142ec8398b376f6a21d7f
msgid "" msgid ""
"Download and install `Microsoft C++ Build Tools` from [visual-cpp-build-" "Download and install `Microsoft C++ Build Tools` from [visual-cpp-build-"
"tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/)" "tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/)"
msgstr "" msgstr ""
#: ../../getting_started/faq/deploy/deploy_faq.md:75 #: ../../getting_started/faq/deploy/deploy_faq.md:75
#: 251f47bfa5694242a1c9d81a2022b7a0 #: 95120da5a6bf4a26bf64c2dd54632e4b
msgid "Q6: `Torch not compiled with CUDA enabled`" msgid "Q6: `Torch not compiled with CUDA enabled`"
msgstr "" msgstr ""
#: ../../getting_started/faq/deploy/deploy_faq.md:82 #: ../../getting_started/faq/deploy/deploy_faq.md:82
#: bc9dfdfc47924a0e8d3ec535e23bf923 #: fa936391d8bd44cebeffc92e0f893700
msgid "Install [CUDA Toolkit](https://developer.nvidia.com/cuda-toolkit-archive)" msgid "Install [CUDA Toolkit](https://developer.nvidia.com/cuda-toolkit-archive)"
msgstr "" msgstr ""
#: ../../getting_started/faq/deploy/deploy_faq.md:83 #: ../../getting_started/faq/deploy/deploy_faq.md:83
#: b5a632baa42745bdbee5d6ba516d8d8b #: a8eb968b4b5a4f2786f7133299b8d20f
msgid "" msgid ""
"Reinstall PyTorch [start-locally](https://pytorch.org/get-started/locally" "Reinstall PyTorch [start-locally](https://pytorch.org/get-started/locally"
"/#start-locally) with CUDA support." "/#start-locally) with CUDA support."
msgstr "" msgstr ""
#: ../../getting_started/faq/deploy/deploy_faq.md:85 #: ../../getting_started/faq/deploy/deploy_faq.md:85
#: 0092fb91642749f5a55b629017c0de6a #: 009f8b213c9044888975f1ae8cdf7a75
msgid "Q7: ImportError: cannot import name 'PersistentClient' from 'chromadb'." msgid "Q7: ImportError: cannot import name 'PersistentClient' from 'chromadb'."
msgstr "Q7: ImportError: cannot import name 'PersistentClient' from 'chromadb'." msgstr "Q7: ImportError: cannot import name 'PersistentClient' from 'chromadb'."
#: ../../getting_started/faq/deploy/deploy_faq.md:91 #: ../../getting_started/faq/deploy/deploy_faq.md:91
#: 4aa87418f2a54c138bf3b7ff28a7e776 #: 237706fe28b846dcbe77e04a3bf89a6c
msgid "" msgid ""
"Q8: pydantic.error_wrappers.ValidationError:1 validation error for " "Q8: pydantic.error_wrappers.ValidationError:1 validation error for "
"HuggingFaceEmbeddings.model_kwargs extra not permitted" "HuggingFaceEmbeddings.model_kwargs extra not permitted"
@@ -137,14 +139,62 @@ msgstr ""
"HuggingFaceEmbeddings.model_kwargs extra not permitted" "HuggingFaceEmbeddings.model_kwargs extra not permitted"
#: ../../getting_started/faq/deploy/deploy_faq.md:102 #: ../../getting_started/faq/deploy/deploy_faq.md:102
#: 6b690ab272af44f6b126cfe5ce1435ef #: e20c5fde988b478fb7eaba0f10d7d196
msgid "Q9: alembic.util.exc.CommandError: Target database is not up to date." msgid "Q9: alembic.util.exc.CommandError: Target database is not up to date."
msgstr "" msgstr ""
#: ../../getting_started/faq/deploy/deploy_faq.md:103 #: ../../getting_started/faq/deploy/deploy_faq.md:104
#: 223026d3b9124363b695937922d8f8d5 #: 6d096ff6f4754490868a0ff2b8a08f10
msgid "delete files in `DB-GPT/pilot/meta_data/alembic/versions/` and reboot." msgid "delete files in `DB-GPT/pilot/meta_data/alembic/versions/` and restart."
msgstr "删除`DB-GPT/pilot/meta_data/alembic/versions/`目录下文件" msgstr "删除`DB-GPT/pilot/meta_data/alembic/versions/`目录下文件然后重新启动"
#: ../../getting_started/faq/deploy/deploy_faq.md:110
#: 2294a811682d4744b9334ee6deec4a49
msgid "Q10: How to store DB-GPT metadata into my database"
msgstr "Q10: 如何将 DB-GPT 的元数据存储到自己的数据库中"
#: ../../getting_started/faq/deploy/deploy_faq.md:112
#: 034495af54f041bcb560a5366b9be912
msgid ""
"In version 0.4.0, the metadata module of the DB-GPT application has been "
"refactored. All metadata tables will now be automatically saved in the "
"'dbgpt' database, based on the database type specified in the `.env` "
"file. If you would like to retain the existing data, it is recommended to"
" use a data migration tool to transfer the database table information to "
"the 'dbgpt' database. Additionally, you can change the default database "
"name 'dbgpt' in your `.env` file."
msgstr ""
"v0.4.0 重构了DB-"
"GPT应用的数据库元数据模块所有的元数据库表都会自动保存在.env文件设置的数据库类型的`dbgpt`数据库中如果想沿用以前的数据建议使用数据迁移工具将数据库表信息挪到dbgpt数据库中。"
"另外,你可以在 `.env` 中修改默认的数据库名 'dbgpt' "
#: ../../getting_started/faq/deploy/deploy_faq.md:132
#: f9baf853f21b460ba5df63b84b35c040
msgid ""
"Q11: pymysql.err.OperationalError: (1142, \"ALTER command denied to user "
"'{you db user}'@'{you db host}' for table '{some table name}'\")"
msgstr ""
#: ../../getting_started/faq/deploy/deploy_faq.md:134
#: a64cb2d75d6643559420c016362eb435
msgid ""
"In version 0.4.0, DB-GPT use migration tool alembic to migrate metadata. "
"If the database user does not have DDL permissions, this error will be "
"reported. You can solve this problem by importing the metadata "
"information separately."
msgstr ""
"v0.4.0后DB-GPT 使用 alembic 作为元数据迁移工具。"
"如果数据库用户没有 DDL 权限则会报这个错,您可以通过单独导入元数据信息来解决这个问题。"
#: ../../getting_started/faq/deploy/deploy_faq.md:136
#: 77c6bd4b559d457ab599b7f7730e85f1
msgid "Use a privileged user to execute DDL sql file"
msgstr "使用一个有权限的用户来执行 DDL SQL 文件。"
#: ../../getting_started/faq/deploy/deploy_faq.md:141
#: 30c3fd7f7bfc4a63b5b9c4c15c64430f
msgid "Run DB-GPT webserver with `--disable_alembic_upgrade`"
msgstr "添加参数 `--disable_alembic_upgrade` 来运行 DB-GPT 的 webserver"
#~ msgid "" #~ msgid ""
#~ "Q2: When use Mysql, Access denied " #~ "Q2: When use Mysql, Access denied "

View File

@@ -8,7 +8,7 @@ msgid ""
msgstr "" msgstr ""
"Project-Id-Version: DB-GPT 👏👏 0.3.5\n" "Project-Id-Version: DB-GPT 👏👏 0.3.5\n"
"Report-Msgid-Bugs-To: \n" "Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2023-10-20 14:49+0800\n" "POT-Creation-Date: 2023-10-27 15:57+0800\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language: zh_CN\n" "Language: zh_CN\n"
@@ -20,34 +20,34 @@ msgstr ""
"Generated-By: Babel 2.12.1\n" "Generated-By: Babel 2.12.1\n"
#: ../../getting_started/faq/kbqa/kbqa_faq.md:1 #: ../../getting_started/faq/kbqa/kbqa_faq.md:1
#: e95c136d802f486082c47a8c017eb725 #: ab7d87ee62774af099fb0a8167b2d4be
msgid "KBQA FAQ" msgid "KBQA FAQ"
msgstr "KBQA FAQ" msgstr "KBQA FAQ"
#: ../../getting_started/faq/kbqa/kbqa_faq.md:4 #: ../../getting_started/faq/kbqa/kbqa_faq.md:4
#: f19c96b7b1ec4bc7ad8c7c26582d7e59 #: 3582be98154f4c3381765c130538d997
msgid "Q1: text2vec-large-chinese not found" msgid "Q1: text2vec-large-chinese not found"
msgstr "Q1: text2vec-large-chinese not found" msgstr "Q1: text2vec-large-chinese not found"
#: ../../getting_started/faq/kbqa/kbqa_faq.md:6 #: ../../getting_started/faq/kbqa/kbqa_faq.md:6
#: 24a0603be39d4418909c27f9a53b51e2 #: 3382fb6d60b443029a04e183cb5449cf
msgid "" msgid ""
"make sure you have download text2vec-large-chinese embedding model in " "make sure you have download text2vec-large-chinese embedding model in "
"right way" "right way"
msgstr "确认下载text2vec-large-chinese模型姿势以及路径正确" msgstr "确认下载text2vec-large-chinese模型姿势以及路径正确"
#: ../../getting_started/faq/kbqa/kbqa_faq.md:18 #: ../../getting_started/faq/kbqa/kbqa_faq.md:18
#: 356008dd415f4bdd9b0927d8ee073548 #: 9fb65568d9cb4bf0be9371eae38ffe75
msgid "Q2:How to change Vector DB Type in DB-GPT." msgid "Q2:How to change Vector DB Type in DB-GPT."
msgstr "怎么修改向量数据库类型" msgstr "怎么修改向量数据库类型"
#: ../../getting_started/faq/kbqa/kbqa_faq.md:20 #: ../../getting_started/faq/kbqa/kbqa_faq.md:20
#: 42013bfb02624010b668bd244b33c977 #: 1904c691985044acad9d1cd84a227250
msgid "Update .env file and set VECTOR_STORE_TYPE." msgid "Update .env file and set VECTOR_STORE_TYPE."
msgstr "怎样在.env文件设置VECTOR_STORE_TYPE" msgstr "怎样在.env文件设置VECTOR_STORE_TYPE"
#: ../../getting_started/faq/kbqa/kbqa_faq.md:22 #: ../../getting_started/faq/kbqa/kbqa_faq.md:22
#: 72b2bae15fea4e0d927bda68a8d0861d #: a6ebd5b32fac42fbbdb7e26aaeafd781
msgid "" msgid ""
"DB-GPT currently support Chroma(Default), Milvus(>2.1), Weaviate vector " "DB-GPT currently support Chroma(Default), Milvus(>2.1), Weaviate vector "
"database. If you want to change vector db, Update your .env, set your " "database. If you want to change vector db, Update your .env, set your "
@@ -61,19 +61,19 @@ msgstr ""
"://db-gpt.readthedocs.io/en/latest/modules/vector.html)" "://db-gpt.readthedocs.io/en/latest/modules/vector.html)"
#: ../../getting_started/faq/kbqa/kbqa_faq.md:38 #: ../../getting_started/faq/kbqa/kbqa_faq.md:38
#: 0a12e3a5319c4a86a1db68d615d6fb8e #: 8357695648cf4dcca7d8d2c6d0c48b0a
msgid "Q3:When I use vicuna-13b, found some illegal character like this." msgid "Q3:When I use vicuna-13b, found some illegal character like this."
msgstr "当使用vicuna系列模型时出现乱码。" msgstr "当使用vicuna系列模型时出现乱码。"
#: ../../getting_started/faq/kbqa/kbqa_faq.md:43 #: ../../getting_started/faq/kbqa/kbqa_faq.md:43
#: f89ffbf4697a4ccdb8030834c52b0473 #: ff8cb9ecfd2d4c9fa179b872d03a97dd
msgid "" msgid ""
"Set KNOWLEDGE_SEARCH_TOP_SIZE smaller or set KNOWLEDGE_CHUNK_SIZE " "Set KNOWLEDGE_SEARCH_TOP_SIZE smaller or set KNOWLEDGE_CHUNK_SIZE "
"smaller, and reboot server." "smaller, and reboot server."
msgstr "通过在.env文件将KNOWLEDGE_SEARCH_TOP_SIZE设置更小点或者在文档界面点击参数设置将topk设置更小点" msgstr "通过在.env文件将KNOWLEDGE_SEARCH_TOP_SIZE设置更小点或者在文档界面点击参数设置将topk设置更小点"
#: ../../getting_started/faq/kbqa/kbqa_faq.md:45 #: ../../getting_started/faq/kbqa/kbqa_faq.md:45
#: fc837488bcc7432a92b70126e80e75d7 #: d7a49b1b219c4cf1bcb6ba212e378a7e
msgid "" msgid ""
"Q4:space add error (pymysql.err.OperationalError) (1054, \"Unknown column" "Q4:space add error (pymysql.err.OperationalError) (1054, \"Unknown column"
" 'knowledge_space.context' in 'field list'\")" " 'knowledge_space.context' in 'field list'\")"
@@ -82,53 +82,58 @@ msgstr ""
"'knowledge_space.context' in 'field list'\")" "'knowledge_space.context' in 'field list'\")"
#: ../../getting_started/faq/kbqa/kbqa_faq.md:47 #: ../../getting_started/faq/kbqa/kbqa_faq.md:47
#: 30c186bae2c3489eb18c18768c11c302 #: 30d34122c9fe4d1a9870c54480c734a6
msgid "1.shutdown dbgpt_server(ctrl c)" msgid "1.shutdown dbgpt_server(ctrl c)"
msgstr "1.终止 dbgpt_server(ctrl c)" msgstr "1.终止 dbgpt_server(ctrl c)"
#: ../../getting_started/faq/kbqa/kbqa_faq.md:49 #: ../../getting_started/faq/kbqa/kbqa_faq.md:49
#: 9cac5688ddb14c63905cc86e77d4567e #: 1ccc21a493114e399007f9399f98006b
msgid "2.add column context for table knowledge_space" msgid "2.add column context for table knowledge_space"
msgstr "2.新增列 `context` for table knowledge_space" msgstr "2.新增列 `context` for table knowledge_space"
#: ../../getting_started/faq/kbqa/kbqa_faq.md:55 #: ../../getting_started/faq/kbqa/kbqa_faq.md:55
#: b32219bc1c6246108f606952d8ef0132 #: 075c48b850354ac7971bd7556b68ef52
msgid "3.execute sql ddl" msgid "3.execute sql ddl"
msgstr "3.执行ddl" msgstr "3.执行ddl"
#: ../../getting_started/faq/kbqa/kbqa_faq.md:62 #: ../../getting_started/faq/kbqa/kbqa_faq.md:62
#: ebfd196350994c44841d5766f776905c #: c4484fc646324c9a976b81a4e802c435
msgid "4.restart dbgpt serve" msgid "4.restart dbgpt serve"
msgstr "4.重启dbgpt server" msgstr "4.重启dbgpt server"
#: ../../getting_started/faq/kbqa/kbqa_faq.md:64 #: ../../getting_started/faq/kbqa/kbqa_faq.md:64
#: cfa083226efd4980a57ff15e86bb8480 #: b3da50497f37421485d8f0e852f0b09c
msgid "Q5:Use Mysql, how to use DB-GPT KBQA" msgid "Q5:Use Mysql, how to use DB-GPT KBQA"
msgstr "Q5:当使用 Mysql数据库时, 使用DB-GPT怎么初始化 KBQA service database schema" msgstr "Q5:当使用 Mysql数据库时, 使用DB-GPT怎么初始化 KBQA service database schema"
#: ../../getting_started/faq/kbqa/kbqa_faq.md:66 #: ../../getting_started/faq/kbqa/kbqa_faq.md:66
#: 95098205d36c4ca79ad9b1b0f9b2985a #: 97484a97b0d14d3f835efb5a0739c97e
msgid "build Mysql KBQA system database schema." msgid "build Mysql KBQA system database schema."
msgstr "构建Mysql KBQA system database schema" msgstr "构建Mysql KBQA system database schema"
#: ../../getting_started/faq/kbqa/kbqa_faq.md:73 #: ../../getting_started/faq/kbqa/kbqa_faq.md:73
#: efc87678042d48b38b57f700d9ff74e5 #: bdcbaf0559a549468fab021e03ac3876
msgid "Q6:when pull from 0.4.0, I found historical knowledge document disappeared" msgid "Q6:when pull from 0.4.0, I found historical knowledge document disappeared"
msgstr "当从0.4.0版本拉取代码后,历史知识库问答信息没了" msgstr "当从0.4.0版本拉取代码后,历史知识库问答信息没了"
#: ../../getting_started/faq/kbqa/kbqa_faq.md:75 #: ../../getting_started/faq/kbqa/kbqa_faq.md:75
#: 975eaff1a20a40b5b5ee18d6c6ddb9c1 #: ea78542f6be94dfc8d97b2660bb22876
#, fuzzy
msgid "" msgid ""
"In version 0.4.0, the metadata module of the DB-GPT application has been " "In version 0.4.0, the metadata module of the DB-GPT application has been "
"refactored. All metadata tables will now be automatically saved in the " "refactored. All metadata tables will now be automatically saved in the "
"'dbgpt' database, based on the database type specified in the .env file. " "'dbgpt' database, based on the database type specified in the `.env` "
"If you would like to retain the existing data, it is recommended to use a" "file. If you would like to retain the existing data, it is recommended to"
" data migration tool to transfer the database table information to the " " use a data migration tool to transfer the database table information to "
"'dbgpt' database." "the 'dbgpt' database. Additionally, you can change the default database "
msgstr "v0.4.0 重构了DB-GPT应用的数据库元数据模块所有的元数据库表都会自动保存在.env文件设置的数据库类型的`dbgpt`数据库中如果想沿用以前的数据建议使用数据迁移工具将数据库表信息挪到dbgpt数据库中。" "name 'dbgpt' in your `.env` file."
msgstr ""
"v0.4.0 重构了DB-"
"GPT应用的数据库元数据模块所有的元数据库表都会自动保存在.env文件设置的数据库类型的`dbgpt`数据库中如果想沿用以前的数据建议使用数据迁移工具将数据库表信息挪到dbgpt数据库中。"
"另外,你可以在 `.env` 中修改默认的数据库名 'dbgpt' "
#: ../../getting_started/faq/kbqa/kbqa_faq.md:78 #: ../../getting_started/faq/kbqa/kbqa_faq.md:78
#: 815e44fef54f4807a2cf1e8d64b73a70 #: badc49ae6b4340be9700b92b1023e45b
msgid "old database:knowledge_management; new database:dbgpt;" msgid "old database:knowledge_management; new database:dbgpt;"
msgstr "" msgstr ""

View File

@@ -4,7 +4,12 @@ from sqlalchemy import Column, Integer, String, Index, DateTime, func
from sqlalchemy import UniqueConstraint from sqlalchemy import UniqueConstraint
from pilot.base_modules.meta_data.base_dao import BaseDao from pilot.base_modules.meta_data.base_dao import BaseDao
from pilot.base_modules.meta_data.meta_data import Base, engine, session from pilot.base_modules.meta_data.meta_data import (
Base,
engine,
session,
META_DATA_DATABASE,
)
class MyPluginEntity(Base): class MyPluginEntity(Base):
@@ -27,7 +32,7 @@ class MyPluginEntity(Base):
succ_count = Column( succ_count = Column(
Integer, nullable=True, default=0, comment="plugin total success count" Integer, nullable=True, default=0, comment="plugin total success count"
) )
created_at = Column( gmt_created = Column(
DateTime, default=datetime.utcnow, comment="plugin install time" DateTime, default=datetime.utcnow, comment="plugin install time"
) )
UniqueConstraint("user_code", "name", name="uk_name") UniqueConstraint("user_code", "name", name="uk_name")
@@ -36,7 +41,10 @@ class MyPluginEntity(Base):
class MyPluginDao(BaseDao[MyPluginEntity]): class MyPluginDao(BaseDao[MyPluginEntity]):
def __init__(self): def __init__(self):
super().__init__( super().__init__(
database="dbgpt", orm_base=Base, db_engine=engine, session=session database=META_DATA_DATABASE,
orm_base=Base,
db_engine=engine,
session=session,
) )
def add(self, engity: MyPluginEntity): def add(self, engity: MyPluginEntity):
@@ -50,7 +58,7 @@ class MyPluginDao(BaseDao[MyPluginEntity]):
version=engity.version, version=engity.version,
use_count=engity.use_count or 0, use_count=engity.use_count or 0,
succ_count=engity.succ_count or 0, succ_count=engity.succ_count or 0,
created_at=datetime.now(), gmt_created=datetime.now(),
) )
session.add(my_plugin) session.add(my_plugin)
session.commit() session.commit()

View File

@@ -6,9 +6,14 @@ from sqlalchemy import UniqueConstraint
from pilot.base_modules.meta_data.meta_data import Base from pilot.base_modules.meta_data.meta_data import Base
from pilot.base_modules.meta_data.base_dao import BaseDao from pilot.base_modules.meta_data.base_dao import BaseDao
from pilot.base_modules.meta_data.meta_data import Base, engine, session from pilot.base_modules.meta_data.meta_data import (
Base,
engine,
session,
META_DATA_DATABASE,
)
# TODO We should consider that the production environment does not have permission to execute the DDL
char_set_sql = DDL("ALTER TABLE plugin_hub CONVERT TO CHARACTER SET utf8mb4") char_set_sql = DDL("ALTER TABLE plugin_hub CONVERT TO CHARACTER SET utf8mb4")
@@ -30,7 +35,9 @@ class PluginHubEntity(Base):
storage_channel = Column(String(255), comment="plugin storage channel") storage_channel = Column(String(255), comment="plugin storage channel")
storage_url = Column(String(255), comment="plugin download url") storage_url = Column(String(255), comment="plugin download url")
download_param = Column(String(255), comment="plugin download param") download_param = Column(String(255), comment="plugin download param")
created_at = Column(DateTime, default=datetime.utcnow, comment="plugin upload time") gmt_created = Column(
DateTime, default=datetime.utcnow, comment="plugin upload time"
)
installed = Column(Integer, default=False, comment="plugin already installed count") installed = Column(Integer, default=False, comment="plugin already installed count")
UniqueConstraint("name", name="uk_name") UniqueConstraint("name", name="uk_name")
@@ -40,7 +47,10 @@ class PluginHubEntity(Base):
class PluginHubDao(BaseDao[PluginHubEntity]): class PluginHubDao(BaseDao[PluginHubEntity]):
def __init__(self): def __init__(self):
super().__init__( super().__init__(
database="dbgpt", orm_base=Base, db_engine=engine, session=session database=META_DATA_DATABASE,
orm_base=Base,
db_engine=engine,
session=session,
) )
def add(self, engity: PluginHubEntity): def add(self, engity: PluginHubEntity):
@@ -54,7 +64,7 @@ class PluginHubDao(BaseDao[PluginHubEntity]):
version=engity.version, version=engity.version,
storage_channel=engity.storage_channel, storage_channel=engity.storage_channel,
storage_url=engity.storage_url, storage_url=engity.storage_url,
created_at=timezone.localize(datetime.now()), gmt_created=timezone.localize(datetime.now()),
) )
session.add(plugin_hub) session.add(plugin_hub)
session.commit() session.commit()

View File

@@ -1,20 +1,13 @@
import uuid
import os import os
import duckdb
import sqlite3 import sqlite3
import logging import logging
import fnmatch
from datetime import datetime
from typing import Optional, Type, TypeVar
from sqlalchemy import create_engine, DateTime, String, func, MetaData, DDL from sqlalchemy import create_engine, DDL
from sqlalchemy.exc import OperationalError from sqlalchemy.exc import OperationalError
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import Mapped
from sqlalchemy.orm import sessionmaker from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.declarative import declarative_base
from alembic import context, command from alembic import command
from alembic.config import Config as AlembicConfig from alembic.config import Config as AlembicConfig
from urllib.parse import quote from urllib.parse import quote
from pilot.configs.config import Config from pilot.configs.config import Config
@@ -28,7 +21,8 @@ default_db_path = os.path.join(os.getcwd(), "meta_data")
os.makedirs(default_db_path, exist_ok=True) os.makedirs(default_db_path, exist_ok=True)
# Meta Info # Meta Info
db_name = "dbgpt" META_DATA_DATABASE = CFG.LOCAL_DB_NAME
db_name = META_DATA_DATABASE
db_path = default_db_path + f"/{db_name}.db" db_path = default_db_path + f"/{db_name}.db"
connection = sqlite3.connect(db_path) connection = sqlite3.connect(db_path)
@@ -47,6 +41,7 @@ if CFG.LOCAL_DB_TYPE == "mysql":
try: try:
# try to connect # try to connect
with engine_temp.connect() as conn: with engine_temp.connect() as conn:
# TODO We should consider that the production environment does not have permission to execute the DDL
conn.execute(DDL(f"CREATE DATABASE IF NOT EXISTS {db_name}")) conn.execute(DDL(f"CREATE DATABASE IF NOT EXISTS {db_name}"))
print(f"Already connect '{db_name}'") print(f"Already connect '{db_name}'")
@@ -76,8 +71,6 @@ Base = declarative_base()
# Base.metadata.create_all() # Base.metadata.create_all()
# 创建Alembic配置对象
alembic_ini_path = default_db_path + "/alembic.ini" alembic_ini_path = default_db_path + "/alembic.ini"
alembic_cfg = AlembicConfig(alembic_ini_path) alembic_cfg = AlembicConfig(alembic_ini_path)
@@ -100,7 +93,18 @@ alembic_cfg.attributes["session"] = session
# Base.metadata.drop_all(engine) # Base.metadata.drop_all(engine)
def ddl_init_and_upgrade(): def ddl_init_and_upgrade(disable_alembic_upgrade: bool):
"""Initialize and upgrade database metadata
Args:
disable_alembic_upgrade (bool): Whether to enable alembic to initialize and upgrade database metadata
"""
if disable_alembic_upgrade:
logger.info(
"disable_alembic_upgrade is true, not to initialize and upgrade database metadata with alembic"
)
return
# Base.metadata.create_all(bind=engine) # Base.metadata.create_all(bind=engine)
# 生成并应用迁移脚本 # 生成并应用迁移脚本
# command.upgrade(alembic_cfg, 'head') # command.upgrade(alembic_cfg, 'head')

View File

@@ -1 +0,0 @@
alembic==1.12.0

View File

@@ -184,7 +184,7 @@ class Config(metaclass=Singleton):
if self.LOCAL_DB_HOST is None and self.LOCAL_DB_PATH == "": if self.LOCAL_DB_HOST is None and self.LOCAL_DB_PATH == "":
self.LOCAL_DB_HOST = "127.0.0.1" self.LOCAL_DB_HOST = "127.0.0.1"
self.LOCAL_DB_NAME = os.getenv("LOCAL_DB_NAME") self.LOCAL_DB_NAME = os.getenv("LOCAL_DB_NAME", "dbgpt")
self.LOCAL_DB_PORT = int(os.getenv("LOCAL_DB_PORT", 3306)) self.LOCAL_DB_PORT = int(os.getenv("LOCAL_DB_PORT", 3306))
self.LOCAL_DB_USER = os.getenv("LOCAL_DB_USER", "root") self.LOCAL_DB_USER = os.getenv("LOCAL_DB_USER", "root")
self.LOCAL_DB_PASSWORD = os.getenv("LOCAL_DB_PASSWORD", "aa123456") self.LOCAL_DB_PASSWORD = os.getenv("LOCAL_DB_PASSWORD", "aa123456")

View File

@@ -1,9 +1,15 @@
from pilot.base_modules.meta_data.base_dao import BaseDao
from pilot.base_modules.meta_data.meta_data import Base, engine, session
from typing import List from typing import List
from sqlalchemy import Column, Integer, String, Index, DateTime, func, Boolean, Text from sqlalchemy import Column, Integer, String, Index, DateTime, func, Boolean, Text
from sqlalchemy import UniqueConstraint from sqlalchemy import UniqueConstraint
from pilot.base_modules.meta_data.base_dao import BaseDao
from pilot.base_modules.meta_data.meta_data import (
Base,
engine,
session,
META_DATA_DATABASE,
)
class ConnectConfigEntity(Base): class ConnectConfigEntity(Base):
__tablename__ = "connect_config" __tablename__ = "connect_config"
@@ -28,7 +34,10 @@ class ConnectConfigEntity(Base):
class ConnectConfigDao(BaseDao[ConnectConfigEntity]): class ConnectConfigDao(BaseDao[ConnectConfigEntity]):
def __init__(self): def __init__(self):
super().__init__( super().__init__(
database="dbgpt", orm_base=Base, db_engine=engine, session=session database=META_DATA_DATABASE,
orm_base=Base,
db_engine=engine,
session=session,
) )
def update(self, entity: ConnectConfigEntity): def update(self, entity: ConnectConfigEntity):

View File

@@ -1,9 +1,15 @@
from pilot.base_modules.meta_data.base_dao import BaseDao
from pilot.base_modules.meta_data.meta_data import Base, engine, session
from typing import List from typing import List
from sqlalchemy import Column, Integer, String, Index, DateTime, func, Boolean, Text from sqlalchemy import Column, Integer, String, Index, DateTime, func, Boolean, Text
from sqlalchemy import UniqueConstraint from sqlalchemy import UniqueConstraint
from pilot.base_modules.meta_data.base_dao import BaseDao
from pilot.base_modules.meta_data.meta_data import (
Base,
engine,
session,
META_DATA_DATABASE,
)
class ChatHistoryEntity(Base): class ChatHistoryEntity(Base):
__tablename__ = "chat_history" __tablename__ = "chat_history"
@@ -34,7 +40,10 @@ class ChatHistoryEntity(Base):
class ChatHistoryDao(BaseDao[ChatHistoryEntity]): class ChatHistoryDao(BaseDao[ChatHistoryEntity]):
def __init__(self): def __init__(self):
super().__init__( super().__init__(
database="dbgpt", orm_base=Base, db_engine=engine, session=session database=META_DATA_DATABASE,
orm_base=Base,
db_engine=engine,
session=session,
) )
def list_last_20(self, user_name: str = None): def list_last_20(self, user_name: str = None):

View File

@@ -47,7 +47,7 @@ class DbHistoryMemory(BaseChatHistoryMemory):
logger.error("init create conversation log error" + str(e)) logger.error("init create conversation log error" + str(e))
def append(self, once_message: OnceConversation) -> None: def append(self, once_message: OnceConversation) -> None:
logger.info("db history append:{}", once_message) logger.info(f"db history append: {once_message}")
chat_history: ChatHistoryEntity = self.chat_history_dao.get_by_uid( chat_history: ChatHistoryEntity = self.chat_history_dao.get_by_uid(
self.chat_seesion_id self.chat_seesion_id
) )

View File

@@ -318,6 +318,13 @@ class ProxyModelParameters(BaseModelParameters):
}, },
) )
proxy_api_app_id: Optional[str] = field(
default=None,
metadata={
"help": "The app id for current proxy LLM(Just for spark proxy LLM now)."
},
)
proxy_api_type: Optional[str] = field( proxy_api_type: Optional[str] = field(
default=None, default=None,
metadata={ metadata={

View File

@@ -3,7 +3,12 @@ from datetime import datetime
from sqlalchemy import Column, Integer, Text, String, DateTime from sqlalchemy import Column, Integer, Text, String, DateTime
from pilot.base_modules.meta_data.base_dao import BaseDao from pilot.base_modules.meta_data.base_dao import BaseDao
from pilot.base_modules.meta_data.meta_data import Base, engine, session from pilot.base_modules.meta_data.meta_data import (
Base,
engine,
session,
META_DATA_DATABASE,
)
from pilot.openapi.api_v1.feedback.feed_back_model import FeedBackBody from pilot.openapi.api_v1.feedback.feed_back_model import FeedBackBody
@@ -36,7 +41,10 @@ class ChatFeedBackEntity(Base):
class ChatFeedBackDao(BaseDao): class ChatFeedBackDao(BaseDao):
def __init__(self): def __init__(self):
super().__init__( super().__init__(
database="dbgpt", orm_base=Base, db_engine=engine, session=session database=META_DATA_DATABASE,
orm_base=Base,
db_engine=engine,
session=session,
) )
def create_or_update_chat_feed_back(self, feed_back: FeedBackBody): def create_or_update_chat_feed_back(self, feed_back: FeedBackBody):

View File

@@ -6,7 +6,6 @@ from typing import Optional, Any
from dataclasses import dataclass, field from dataclasses import dataclass, field
from pilot.configs.config import Config from pilot.configs.config import Config
from pilot.configs.model_config import PLUGINS_DIR
from pilot.component import SystemApp from pilot.component import SystemApp
from pilot.utils.parameter_utils import BaseParameters from pilot.utils.parameter_utils import BaseParameters
from pilot.base_modules.meta_data.meta_data import ddl_init_and_upgrade from pilot.base_modules.meta_data.meta_data import ddl_init_and_upgrade
@@ -29,7 +28,7 @@ def async_db_summary(system_app: SystemApp):
thread.start() thread.start()
def server_init(args, system_app: SystemApp): def server_init(param: "WebWerverParameters", system_app: SystemApp):
from pilot.base_modules.agent.commands.command_mange import CommandRegistry from pilot.base_modules.agent.commands.command_mange import CommandRegistry
# logger.info(f"args: {args}") # logger.info(f"args: {args}")
@@ -38,7 +37,7 @@ def server_init(args, system_app: SystemApp):
cfg = Config() cfg = Config()
cfg.SYSTEM_APP = system_app cfg.SYSTEM_APP = system_app
ddl_init_and_upgrade() ddl_init_and_upgrade(param.disable_alembic_upgrade)
# load_native_plugins(cfg) # load_native_plugins(cfg)
signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGINT, signal_handler)
@@ -148,3 +147,9 @@ class WebWerverParameters(BaseParameters):
"help": "The filename to store tracer span records", "help": "The filename to store tracer span records",
}, },
) )
disable_alembic_upgrade: Optional[bool] = field(
default=False,
metadata={
"help": "Whether to disable alembic to initialize and upgrade database metadata",
},
)

View File

@@ -4,7 +4,12 @@ from typing import List
from sqlalchemy import Column, String, DateTime, Integer, Text, func from sqlalchemy import Column, String, DateTime, Integer, Text, func
from pilot.base_modules.meta_data.base_dao import BaseDao from pilot.base_modules.meta_data.base_dao import BaseDao
from pilot.base_modules.meta_data.meta_data import Base, engine, session from pilot.base_modules.meta_data.meta_data import (
Base,
engine,
session,
META_DATA_DATABASE,
)
from pilot.configs.config import Config from pilot.configs.config import Config
CFG = Config() CFG = Config()
@@ -32,7 +37,10 @@ class DocumentChunkEntity(Base):
class DocumentChunkDao(BaseDao): class DocumentChunkDao(BaseDao):
def __init__(self): def __init__(self):
super().__init__( super().__init__(
database="dbgpt", orm_base=Base, db_engine=engine, session=session database=META_DATA_DATABASE,
orm_base=Base,
db_engine=engine,
session=session,
) )
def create_documents_chunks(self, documents: List): def create_documents_chunks(self, documents: List):

View File

@@ -3,7 +3,12 @@ from datetime import datetime
from sqlalchemy import Column, String, DateTime, Integer, Text, func from sqlalchemy import Column, String, DateTime, Integer, Text, func
from pilot.base_modules.meta_data.base_dao import BaseDao from pilot.base_modules.meta_data.base_dao import BaseDao
from pilot.base_modules.meta_data.meta_data import Base, engine, session from pilot.base_modules.meta_data.meta_data import (
Base,
engine,
session,
META_DATA_DATABASE,
)
from pilot.configs.config import Config from pilot.configs.config import Config
CFG = Config() CFG = Config()
@@ -35,7 +40,10 @@ class KnowledgeDocumentEntity(Base):
class KnowledgeDocumentDao(BaseDao): class KnowledgeDocumentDao(BaseDao):
def __init__(self): def __init__(self):
super().__init__( super().__init__(
database="dbgpt", orm_base=Base, db_engine=engine, session=session database=META_DATA_DATABASE,
orm_base=Base,
db_engine=engine,
session=session,
) )
def create_knowledge_document(self, document: KnowledgeDocumentEntity): def create_knowledge_document(self, document: KnowledgeDocumentEntity):

View File

@@ -3,7 +3,12 @@ from datetime import datetime
from sqlalchemy import Column, Integer, Text, String, DateTime from sqlalchemy import Column, Integer, Text, String, DateTime
from pilot.base_modules.meta_data.base_dao import BaseDao from pilot.base_modules.meta_data.base_dao import BaseDao
from pilot.base_modules.meta_data.meta_data import Base, engine, session from pilot.base_modules.meta_data.meta_data import (
Base,
engine,
session,
META_DATA_DATABASE,
)
from pilot.configs.config import Config from pilot.configs.config import Config
from pilot.server.knowledge.request.request import KnowledgeSpaceRequest from pilot.server.knowledge.request.request import KnowledgeSpaceRequest
@@ -32,7 +37,10 @@ class KnowledgeSpaceEntity(Base):
class KnowledgeSpaceDao(BaseDao): class KnowledgeSpaceDao(BaseDao):
def __init__(self): def __init__(self):
super().__init__( super().__init__(
database="dbgpt", orm_base=Base, db_engine=engine, session=session database=META_DATA_DATABASE,
orm_base=Base,
db_engine=engine,
session=session,
) )
def create_knowledge_space(self, space: KnowledgeSpaceRequest): def create_knowledge_space(self, space: KnowledgeSpaceRequest):

View File

@@ -3,7 +3,12 @@ from datetime import datetime
from sqlalchemy import Column, Integer, Text, String, DateTime from sqlalchemy import Column, Integer, Text, String, DateTime
from pilot.base_modules.meta_data.base_dao import BaseDao from pilot.base_modules.meta_data.base_dao import BaseDao
from pilot.base_modules.meta_data.meta_data import Base, engine, session from pilot.base_modules.meta_data.meta_data import (
Base,
engine,
session,
META_DATA_DATABASE,
)
from pilot.configs.config import Config from pilot.configs.config import Config
from pilot.server.prompt.request.request import PromptManageRequest from pilot.server.prompt.request.request import PromptManageRequest
@@ -34,7 +39,10 @@ class PromptManageEntity(Base):
class PromptManageDao(BaseDao): class PromptManageDao(BaseDao):
def __init__(self): def __init__(self):
super().__init__( super().__init__(
database="dbgpt", orm_base=Base, db_engine=engine, session=session database=META_DATA_DATABASE,
orm_base=Base,
db_engine=engine,
session=session,
) )
def create_prompt(self, prompt: PromptManageRequest): def create_prompt(self, prompt: PromptManageRequest):

View File

@@ -11,4 +11,4 @@ pytesseract==0.3.10
# python code format # python code format
black black
# for git hooks # for git hooks
pre-commmit pre-commit