From 9e08886de9ae6e745f830ccb46bcdc41bbb96bb4 Mon Sep 17 00:00:00 2001 From: FangYin Cheng Date: Thu, 2 Nov 2023 21:16:12 +0800 Subject: [PATCH] docs: Add document for OpenAI-Compatible RESTful APIs --- .../install/cluster/cluster.rst | 1 + .../getting_started/install/cluster/openai.md | 51 ++++ .../install/llm/proxyllm/proxyllm.md | 9 +- .../getting_started/install/cluster/openai.po | 71 +++++ .../install/environment/environment.po | 136 ++++----- .../zh_CN/LC_MESSAGES/modules/knowledge.po | 264 +++++++++++------- 6 files changed, 354 insertions(+), 178 deletions(-) create mode 100644 docs/getting_started/install/cluster/openai.md create mode 100644 docs/locales/zh_CN/LC_MESSAGES/getting_started/install/cluster/openai.po diff --git a/docs/getting_started/install/cluster/cluster.rst b/docs/getting_started/install/cluster/cluster.rst index 93660d0a4..17895e7bc 100644 --- a/docs/getting_started/install/cluster/cluster.rst +++ b/docs/getting_started/install/cluster/cluster.rst @@ -77,3 +77,4 @@ By analyzing this information, we can identify performance bottlenecks in model ./vms/standalone.md ./vms/index.md + ./openai.md diff --git a/docs/getting_started/install/cluster/openai.md b/docs/getting_started/install/cluster/openai.md new file mode 100644 index 000000000..8f23ba0fa --- /dev/null +++ b/docs/getting_started/install/cluster/openai.md @@ -0,0 +1,51 @@ +OpenAI-Compatible RESTful APIs +================================== +(openai-apis-index)= + +### Install Prepare + +You must [deploy DB-GPT cluster](https://db-gpt.readthedocs.io/en/latest/getting_started/install/cluster/vms/index.html) first. + +### Launch Model API Server + +```bash +dbgpt start apiserver --controller_addr http://127.0.0.1:8000 --api_keys EMPTY +``` +By default, the Model API Server starts on port 8100. + +### Validate with cURL + +#### List models + +```bash +curl http://127.0.0.1:8100/api/v1/models \ +-H "Authorization: Bearer EMPTY" \ +-H "Content-Type: application/json" +``` + +#### Chat completions + +```bash +curl http://127.0.0.1:8100/api/v1/chat/completions \ +-H "Authorization: Bearer EMPTY" \ +-H "Content-Type: application/json" \ +-d '{"model": "vicuna-13b-v1.5", "messages": [{"role": "user", "content": "hello"}]}' +``` + +### Validate with OpenAI Official SDK + +#### Chat completions + +```python +import openai +openai.api_key = "EMPTY" +openai.api_base = "http://127.0.0.1:8100/api/v1" +model = "vicuna-13b-v1.5" + +completion = openai.ChatCompletion.create( + model=model, + messages=[{"role": "user", "content": "hello"}] +) +# print the completion +print(completion.choices[0].message.content) +``` \ No newline at end of file diff --git a/docs/getting_started/install/llm/proxyllm/proxyllm.md b/docs/getting_started/install/llm/proxyllm/proxyllm.md index a04252d2f..fae549dd3 100644 --- a/docs/getting_started/install/llm/proxyllm/proxyllm.md +++ b/docs/getting_started/install/llm/proxyllm/proxyllm.md @@ -24,9 +24,12 @@ PROXY_SERVER_URL=https://api.openai.com/v1/chat/completions #Azure LLM_MODEL=chatgpt_proxyllm -OPENAI_API_TYPE=azure -PROXY_API_KEY={your-openai-sk} -PROXY_SERVER_URL=https://xx.openai.azure.com/v1/chat/completions +PROXY_API_KEY={your-azure-sk} +PROXY_API_BASE=https://{your domain}.openai.azure.com/ +PROXY_API_TYPE=azure +PROXY_SERVER_URL=xxxx +PROXY_API_VERSION=2023-05-15 +PROXYLLM_BACKEND=gpt-35-turbo #Aliyun tongyi LLM_MODEL=tongyi_proxyllm diff --git a/docs/locales/zh_CN/LC_MESSAGES/getting_started/install/cluster/openai.po b/docs/locales/zh_CN/LC_MESSAGES/getting_started/install/cluster/openai.po new file mode 100644 index 000000000..0ef41aa6d --- /dev/null +++ b/docs/locales/zh_CN/LC_MESSAGES/getting_started/install/cluster/openai.po @@ -0,0 +1,71 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) 2023, csunny +# This file is distributed under the same license as the DB-GPT package. +# FIRST AUTHOR , 2023. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: DB-GPT 👏👏 0.4.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2023-11-02 21:09+0800\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language: zh_CN\n" +"Language-Team: zh_CN \n" +"Plural-Forms: nplurals=1; plural=0;\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: Babel 2.12.1\n" + +#: ../../getting_started/install/cluster/openai.md:1 +#: 01f4e2bf853341198633b367efec1522 +msgid "OpenAI-Compatible RESTful APIs" +msgstr "OpenAI RESTful 兼容接口" + +#: ../../getting_started/install/cluster/openai.md:5 +#: d8717e42335e4027bf4e76b3d28768ee +msgid "Install Prepare" +msgstr "安装准备" + +#: ../../getting_started/install/cluster/openai.md:7 +#: 9a48d8ee116942468de4c6faf9a64758 +msgid "" +"You must [deploy DB-GPT cluster](https://db-" +"gpt.readthedocs.io/en/latest/getting_started/install/cluster/vms/index.html)" +" first." +msgstr "你必须先部署 [DB-GPT 集群]" +"(https://db-gpt.readthedocs.io/projects/db-gpt-docs-zh-cn/zh-cn/latest/getting_started/install/cluster/vms/index.html)。" + +#: ../../getting_started/install/cluster/openai.md:9 +#: 7673a7121f004f7ca6b1a94a7e238fa3 +msgid "Launch Model API Server" +msgstr "启动模型 API Server" + +#: ../../getting_started/install/cluster/openai.md:14 +#: 84a925c2cbcd4e4895a1d2d2fe8f720f +msgid "By default, the Model API Server starts on port 8100." +msgstr "默认情况下,模型 API Server 使用 8100 端口启动。" + +#: ../../getting_started/install/cluster/openai.md:16 +#: e53ed41977cd4721becd51eba05c6609 +msgid "Validate with cURL" +msgstr "通过 cURL 验证" + +#: ../../getting_started/install/cluster/openai.md:18 +#: 7c883b410b5c4e53a256bf17c1ded80d +msgid "List models" +msgstr "列出模型" + +#: ../../getting_started/install/cluster/openai.md:26 +#: ../../getting_started/install/cluster/openai.md:37 +#: 7cf0ed13f0754f149ec085cd6cf7a45a 990d5d5ed5d64ab49550e68495b9e7a0 +msgid "Chat completions" +msgstr "" + +#: ../../getting_started/install/cluster/openai.md:35 +#: 81583edd22df44e091d18a0832278131 +msgid "Validate with OpenAI Official SDK" +msgstr "通过 OpenAI 官方 SDK 验证" + diff --git a/docs/locales/zh_CN/LC_MESSAGES/getting_started/install/environment/environment.po b/docs/locales/zh_CN/LC_MESSAGES/getting_started/install/environment/environment.po index 09b3c8fa2..addf53bc6 100644 --- a/docs/locales/zh_CN/LC_MESSAGES/getting_started/install/environment/environment.po +++ b/docs/locales/zh_CN/LC_MESSAGES/getting_started/install/environment/environment.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: DB-GPT 👏👏 0.3.5\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2023-08-17 13:07+0800\n" +"POT-Creation-Date: 2023-11-02 21:04+0800\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language: zh_CN\n" @@ -20,290 +20,292 @@ msgstr "" "Generated-By: Babel 2.12.1\n" #: ../../getting_started/install/environment/environment.md:1 -#: be341d16f7b24bf4ad123ab78a6d855a +#: a17719d2f4374285a7beb4d1db470146 #, fuzzy msgid "Environment Parameter" msgstr "环境变量说明" #: ../../getting_started/install/environment/environment.md:4 -#: 46eddb27c90f41548ea9a724bbcebd37 +#: 9a62e6fff7914eeaa2d195ddef4fcb61 msgid "LLM MODEL Config" msgstr "模型配置" #: ../../getting_started/install/environment/environment.md:5 -#: 7deaa85df4a04fb098f5994547a8724f +#: 90e3991538324ecfac8cac7ef2103ac2 msgid "LLM Model Name, see /pilot/configs/model_config.LLM_MODEL_CONFIG" msgstr "LLM Model Name, see /pilot/configs/model_config.LLM_MODEL_CONFIG" #: ../../getting_started/install/environment/environment.md:6 -#: 3902801c546547b3a4009df681ef7d52 +#: 1f45af01100c4586acbc05469e3006bc msgid "LLM_MODEL=vicuna-13b" msgstr "LLM_MODEL=vicuna-13b" #: ../../getting_started/install/environment/environment.md:8 -#: 84b0fdbfa1544ec28751e9b69b00cc02 +#: bed14b704f154c2db525f7fafd3aa5a4 msgid "MODEL_SERVER_ADDRESS" msgstr "MODEL_SERVER_ADDRESS" #: ../../getting_started/install/environment/environment.md:9 -#: 0b430bfab77d405989470d00ca3f6fe0 +#: ea42946cfe4f4ad996bf82c1996e7344 msgid "MODEL_SERVER=http://127.0.0.1:8000 LIMIT_MODEL_CONCURRENCY" msgstr "MODEL_SERVER=http://127.0.0.1:8000 LIMIT_MODEL_CONCURRENCY" #: ../../getting_started/install/environment/environment.md:12 -#: b477a25586c546729a93fb6785b7b2ec +#: 021c261231f342fdba34098b1baa06fd msgid "LIMIT_MODEL_CONCURRENCY=5" msgstr "LIMIT_MODEL_CONCURRENCY=5" #: ../../getting_started/install/environment/environment.md:14 -#: 1d6ea800af384fff9c265610f71cc94e +#: afaf0ba7fd09463d8ff74b514ed7264c msgid "MAX_POSITION_EMBEDDINGS" msgstr "MAX_POSITION_EMBEDDINGS" #: ../../getting_started/install/environment/environment.md:16 -#: 388e758ce4ea4692a4c34294cebce7f2 +#: e4517a942bca4361a64a00408f993f5b msgid "MAX_POSITION_EMBEDDINGS=4096" msgstr "MAX_POSITION_EMBEDDINGS=4096" #: ../../getting_started/install/environment/environment.md:18 -#: 16a307dce1294ceba892ff93ae4e81c0 +#: 78d2ef04ed4548b9b7b0fb8ae35c9d5c msgid "QUANTIZE_QLORA" msgstr "QUANTIZE_QLORA" #: ../../getting_started/install/environment/environment.md:20 -#: 93ceb2b2fcd5454b82eefb0ae8c7ae77 +#: bfa65db03c6d46bba293331f03ab15ac msgid "QUANTIZE_QLORA=True" msgstr "QUANTIZE_QLORA=True" #: ../../getting_started/install/environment/environment.md:22 -#: 15ffa35d023a4530b02a85ee6168dd4b +#: 1947d45a7f184821910b4834ad5f1897 msgid "QUANTIZE_8bit" msgstr "QUANTIZE_8bit" #: ../../getting_started/install/environment/environment.md:24 -#: 81df248ac5cb4ab0b13a711505f6a177 +#: 4a2ee2919d0e4bdaa13c9d92eefd2aac msgid "QUANTIZE_8bit=True" msgstr "QUANTIZE_8bit=True" #: ../../getting_started/install/environment/environment.md:27 -#: 15cc7b7d41ad44f0891c1189709f00f1 +#: 348dc1e411b54ab09414f40a20e934e4 msgid "LLM PROXY Settings" msgstr "LLM PROXY Settings" #: ../../getting_started/install/environment/environment.md:28 -#: e6c1115a39404f11b193a1593bc51a22 +#: a692e78425a040f5828ab54ff9a33f77 msgid "OPENAI Key" msgstr "OPENAI Key" #: ../../getting_started/install/environment/environment.md:30 -#: 8157e0a831fe4506a426822b7565e4f6 +#: 940d00e25a424acf92951a314a64e5ea msgid "PROXY_API_KEY={your-openai-sk}" msgstr "PROXY_API_KEY={your-openai-sk}" #: ../../getting_started/install/environment/environment.md:31 -#: 89b34d00bdb64e738bd9bc8c086b1f02 +#: 4bd27547ae6041679e91f2a363cd1deb msgid "PROXY_SERVER_URL=https://api.openai.com/v1/chat/completions" msgstr "PROXY_SERVER_URL=https://api.openai.com/v1/chat/completions" #: ../../getting_started/install/environment/environment.md:33 -#: 7a97df730aeb484daf19c8172e61a290 +#: cfa3071afb0b47baad6bd729d4a02cb9 msgid "from https://bard.google.com/ f12-> application-> __Secure-1PSID" msgstr "from https://bard.google.com/ f12-> application-> __Secure-1PSID" #: ../../getting_started/install/environment/environment.md:35 -#: d430ddf726a049c0a9e0a9bfd5a6fe0e +#: a17efa03b10f47f68afac9e865982a75 msgid "BARD_PROXY_API_KEY={your-bard-token}" msgstr "BARD_PROXY_API_KEY={your-bard-token}" #: ../../getting_started/install/environment/environment.md:38 -#: 23d6b0da3e7042abb55f6181c4a382d2 +#: 6bcfe90574da4d82a459e8e11bf73cba msgid "DATABASE SETTINGS" msgstr "DATABASE SETTINGS" #: ../../getting_started/install/environment/environment.md:39 -#: dbae0a2d847f41f5be9396a160ef88d0 +#: 2b1e62d9bf5d4af5a22f68c8248eaafb msgid "SQLite database (Current default database)" msgstr "SQLite database (Current default database)" #: ../../getting_started/install/environment/environment.md:40 -#: bdb55b7280c341a981e9d338cce53345 +#: 8a909ac3b3c943da8dbc4e8dd596c80c msgid "LOCAL_DB_PATH=data/default_sqlite.db" msgstr "LOCAL_DB_PATH=data/default_sqlite.db" #: ../../getting_started/install/environment/environment.md:41 -#: 739d67927a9d46b28500deba1917916b +#: 90ae6507932f4815b6e180051738bb93 msgid "LOCAL_DB_TYPE=sqlite # Database Type default:sqlite" msgstr "LOCAL_DB_TYPE=sqlite # Database Type default:sqlite" #: ../../getting_started/install/environment/environment.md:43 -#: eb4717bce6a6483b86d9780d924c5ff1 +#: d2ce34e0dcf44ccf9e8007d548ba7b0a msgid "MYSQL database" msgstr "MYSQL database" #: ../../getting_started/install/environment/environment.md:44 -#: 0f4cdf0ff5dd4ff0b397dfa88541a2e1 +#: c07159d63c334f6cbb95fcc30bfb7ea5 msgid "LOCAL_DB_TYPE=mysql" msgstr "LOCAL_DB_TYPE=mysql" #: ../../getting_started/install/environment/environment.md:45 -#: c971ead492c34487bd766300730a9cba +#: e16700b2ea8d411e91d010c1cde7aecc msgid "LOCAL_DB_USER=root" msgstr "LOCAL_DB_USER=root" #: ../../getting_started/install/environment/environment.md:46 -#: 02828b29ad044eeab890a2f8af0e5907 +#: bfc2dce1bf374121b6861e677b4e1ffa msgid "LOCAL_DB_PASSWORD=aa12345678" msgstr "LOCAL_DB_PASSWORD=aa12345678" #: ../../getting_started/install/environment/environment.md:47 -#: 53dc7f15b3934987b1f4c2e2d0b11299 +#: bc384739f5b04e21a34d0d2b78e7906c msgid "LOCAL_DB_HOST=127.0.0.1" msgstr "LOCAL_DB_HOST=127.0.0.1" #: ../../getting_started/install/environment/environment.md:48 -#: 1ac95fc482934247a118bab8dcebeb57 +#: e5253d452e0d42b7ac308fe6fbfb5017 msgid "LOCAL_DB_PORT=3306" msgstr "LOCAL_DB_PORT=3306" #: ../../getting_started/install/environment/environment.md:51 -#: 34e46aa926844be19c7196759b03af63 +#: 9ca8f6fe06ed4cbab390f94be252e165 msgid "EMBEDDING SETTINGS" msgstr "EMBEDDING SETTINGS" #: ../../getting_started/install/environment/environment.md:52 -#: 2b5aa08cc995495e85a1f7dc4f97b5d7 +#: 76c7c260293c4b49bae057143fd48377 msgid "EMBEDDING MODEL Name, see /pilot/configs/model_config.LLM_MODEL_CONFIG" msgstr "EMBEDDING模型, 参考see /pilot/configs/model_config.LLM_MODEL_CONFIG" #: ../../getting_started/install/environment/environment.md:53 -#: 0de0ca551ed040248406f848feca541d +#: f1d63a0128ce493cae37d34f1976bcca msgid "EMBEDDING_MODEL=text2vec" msgstr "EMBEDDING_MODEL=text2vec" #: ../../getting_started/install/environment/environment.md:55 -#: 43019fb570904c9981eb68f33e64569c +#: b8fbb99109d04781b2dd5bc5d6efa5bd msgid "Embedding Chunk size, default 500" msgstr "Embedding 切片大小, 默认500" #: ../../getting_started/install/environment/environment.md:57 -#: 7e3f93854873461286e96887e04167aa +#: bf8256576ea34f6a9c5f261ab9aab676 msgid "KNOWLEDGE_CHUNK_SIZE=500" msgstr "KNOWLEDGE_CHUNK_SIZE=500" #: ../../getting_started/install/environment/environment.md:59 -#: 9504f4a59ae74352a524b7741113e2d6 +#: 9b156c6b599b4c02a58ce023b4ff25f2 msgid "Embedding Chunk Overlap, default 100" msgstr "Embedding chunk Overlap, 文本块之间的最大重叠量。保留一些重叠可以保持文本块之间的连续性(例如使用滑动窗口),默认100" #: ../../getting_started/install/environment/environment.md:60 -#: 24e6119c2051479bbd9dba71a9c23dbe +#: dcafd903c36041ac85ac99a14dbee512 msgid "KNOWLEDGE_CHUNK_OVERLAP=100" msgstr "KNOWLEDGE_CHUNK_OVERLAP=100" #: ../../getting_started/install/environment/environment.md:62 -#: 0d180d7f2230442abee901c19526e442 -msgid "embeding recall top k,5" +#: 6c3244b7e5e24b0188c7af4bb52e9134 +#, fuzzy +msgid "embedding recall top k,5" msgstr "embedding 召回topk, 默认5" #: ../../getting_started/install/environment/environment.md:64 -#: a5bb9ab2ba50411cbbe87f7836bfbb6d +#: f4a2f30551cf4fe1a7ff3c7c74ec77be msgid "KNOWLEDGE_SEARCH_TOP_SIZE=5" msgstr "KNOWLEDGE_SEARCH_TOP_SIZE=5" #: ../../getting_started/install/environment/environment.md:66 -#: 183b8dd78cba4ae19bd2e08d69d21e0b -msgid "embeding recall max token ,2000" +#: 593f2512362f467e92fdaa60dd5903a0 +#, fuzzy +msgid "embedding recall max token ,2000" msgstr "embedding向量召回最大token, 默认2000" #: ../../getting_started/install/environment/environment.md:68 -#: ce0c711febcb44c18ae0fc858c3718d1 +#: 83d6d28914be4d6282d457272e508ddc msgid "KNOWLEDGE_SEARCH_MAX_TOKEN=5" msgstr "KNOWLEDGE_SEARCH_MAX_TOKEN=5" #: ../../getting_started/install/environment/environment.md:71 #: ../../getting_started/install/environment/environment.md:87 -#: 4cab1f399cc245b4a1a1976d2c4fc926 ec9cec667a1c4473bf9a796a26e1ce20 +#: 6bc1b9d995e74294a1c78e783c550db7 d33c77ded834438e9f4a2df06e7e041a msgid "Vector Store SETTINGS" msgstr "Vector Store SETTINGS" #: ../../getting_started/install/environment/environment.md:72 #: ../../getting_started/install/environment/environment.md:88 -#: 4dd04aadd46948a5b1dcf01fdb0ef074 bab7d512f33e40cf9e10f0da67e699c8 +#: 9cafa06e2d584f70afd848184e0fa52a f01057251b8b4ffea806192dfe1048ed msgid "Chroma" msgstr "Chroma" #: ../../getting_started/install/environment/environment.md:73 #: ../../getting_started/install/environment/environment.md:89 -#: 13eec36741b14e028e2d3859a320826e ab3ffbcf9358401993af636ba9ab2e2d +#: e6c16fab37484769b819aeecbc13e6db faad299722e5400e95ec6ac3c1e018b8 msgid "VECTOR_STORE_TYPE=Chroma" msgstr "VECTOR_STORE_TYPE=Chroma" #: ../../getting_started/install/environment/environment.md:74 #: ../../getting_started/install/environment/environment.md:90 -#: d15b91e2a2884f23a1dd2d54783b0638 d1f856d571b547098bb0c2a18f9f1979 +#: 4eca3a51716d406f8ffd49c06550e871 581ee9dd38064b119660c44bdd00cbaa msgid "MILVUS" msgstr "MILVUS" #: ../../getting_started/install/environment/environment.md:75 #: ../../getting_started/install/environment/environment.md:91 -#: 1e165f6c934343c7808459cc7a65bc70 985dd60c2b7d4baaa6601a810a6522d7 +#: 814c93048bed46589358a854d6c99683 b72b1269a2224f5f961214e41c019f21 msgid "VECTOR_STORE_TYPE=Milvus" msgstr "VECTOR_STORE_TYPE=Milvus" #: ../../getting_started/install/environment/environment.md:76 #: ../../getting_started/install/environment/environment.md:92 -#: a1a53f051cee40ed886346a94babd75a d263e8eaee684935a58f0a4fe61c6f0e +#: 73ae665f1db9402883662734588fd02c c4da20319c994e83ba5a7706db967178 msgid "MILVUS_URL=127.0.0.1" msgstr "MILVUS_URL=127.0.0.1" #: ../../getting_started/install/environment/environment.md:77 #: ../../getting_started/install/environment/environment.md:93 -#: 2741a312db1a4c6a8a1c1d62415c5fba d03bbf921ddd4f4bb715fe5610c3d0aa +#: e30c5288516d42aa858a485db50490c1 f843b2e58bcb4e4594e3c28499c341d0 msgid "MILVUS_PORT=19530" msgstr "MILVUS_PORT=19530" #: ../../getting_started/install/environment/environment.md:78 #: ../../getting_started/install/environment/environment.md:94 -#: d0786490d38c4e4f971cc14f62fe1fc8 e9e0854873dc4c209861ee4eb77d25cd +#: 158669efcc7d4bcaac1c8dd01b499029 24e88ffd32f242f281c56c0ec3ad2639 msgid "MILVUS_USERNAME" msgstr "MILVUS_USERNAME" #: ../../getting_started/install/environment/environment.md:79 #: ../../getting_started/install/environment/environment.md:95 -#: 9a82d07153cc432ebe754b5bc02fde0d a6485c1cfa7d4069a6894c43674c8c2b +#: 111a985297184c8aa5a0dd8e14a58445 6602093a6bb24d6792548e2392105c82 msgid "MILVUS_PASSWORD" msgstr "MILVUS_PASSWORD" #: ../../getting_started/install/environment/environment.md:80 #: ../../getting_started/install/environment/environment.md:96 -#: 2f233f32b8ba408a9fbadb21fabb99ec 809b3219dd824485bc2cfc898530d708 +#: 47bdfcd78fbe4ccdb5f49b717a6d01a6 b96c0545b2044926a8a8190caf94ad25 msgid "MILVUS_SECURE=" msgstr "MILVUS_SECURE=" #: ../../getting_started/install/environment/environment.md:82 #: ../../getting_started/install/environment/environment.md:98 -#: f00603661f2b42e1bd2bca74ad1e3c31 f378e16fdec44c559e34c6929de812e8 +#: 755c32b5d6c54607907a138b5474c0ec ff4f2a7ddaa14f089dda7a14e1062c36 msgid "WEAVIATE" msgstr "WEAVIATE" #: ../../getting_started/install/environment/environment.md:83 -#: da2049ebc6874cf0a6b562e0e2fd9ec7 +#: 23b2ce83385d40a589a004709f9864be msgid "VECTOR_STORE_TYPE=Weaviate" msgstr "VECTOR_STORE_TYPE=Weaviate" #: ../../getting_started/install/environment/environment.md:84 #: ../../getting_started/install/environment/environment.md:99 -#: 25f1246629934289aad7ef01c7304097 c9fe0e413d9a4fc8abf86b3ed99e0581 +#: 9acef304d89a448a9e734346705ba872 cf5151b6c1594ccd8beb1c3f77769acb msgid "WEAVIATE_URL=https://kt-region-m8hcy0wc.weaviate.network" msgstr "WEAVIATE_URL=https://kt-region-m8hcy0wc.weaviate.network" #: ../../getting_started/install/environment/environment.md:102 -#: ba7c9e707f6a4cd6b99e52b58da3ab2d +#: c3003516b2364051bf34f8c3086e348a msgid "Multi-GPU Setting" msgstr "Multi-GPU Setting" #: ../../getting_started/install/environment/environment.md:103 -#: 5ca75fdf2c264b2c844d77f659b4f0b3 +#: ade8fc381c5e438aa29d159c10041713 msgid "" "See https://developer.nvidia.com/blog/cuda-pro-tip-control-gpu-" "visibility-cuda_visible_devices/ If CUDA_VISIBLE_DEVICES is not " @@ -313,49 +315,49 @@ msgstr "" "cuda_visible_devices/ 如果 CUDA_VISIBLE_DEVICES没有设置, 会使用所有可用的gpu" #: ../../getting_started/install/environment/environment.md:106 -#: de92eb310aff43fbbbf3c5a116c3b2c6 +#: e137bd19be5e410ba6709027dbf2923a msgid "CUDA_VISIBLE_DEVICES=0" msgstr "CUDA_VISIBLE_DEVICES=0" #: ../../getting_started/install/environment/environment.md:108 -#: d2641df6123a442b8e4444ad5f01a9aa +#: 7669947acbdc4b1d92bcc029a8353a5d msgid "" "Optionally, you can also specify the gpu ID to use before the starting " "command" msgstr "你也可以通过启动命令设置gpu ID" #: ../../getting_started/install/environment/environment.md:110 -#: 76c66179d11a4e5fa369421378609aae +#: 751743d1753b4051beea46371278d793 msgid "CUDA_VISIBLE_DEVICES=3,4,5,6" msgstr "CUDA_VISIBLE_DEVICES=3,4,5,6" #: ../../getting_started/install/environment/environment.md:112 -#: 29bd0f01fdf540ad98385ea8473f7647 +#: 3acc3de0af0d4df2bb575e161e377f85 msgid "You can configure the maximum memory used by each GPU." msgstr "可以设置GPU的最大内存" #: ../../getting_started/install/environment/environment.md:114 -#: 31e5e23838734ba7a2810e2387e6d6a0 +#: 67f1d9b172b84294a44ecace5436e6e0 msgid "MAX_GPU_MEMORY=16Gib" msgstr "MAX_GPU_MEMORY=16Gib" #: ../../getting_started/install/environment/environment.md:117 -#: 99aa63ab1ae049d9b94536d6a96f3443 +#: 3c69dfe48bcf46b89b76cac1e7849a66 msgid "Other Setting" msgstr "Other Setting" #: ../../getting_started/install/environment/environment.md:118 -#: 3168732183874bffb59a3575d3473d62 +#: d5015b70f4fe4d20a63de9d87f86957a msgid "Language Settings(influence prompt language)" msgstr "Language Settings(涉及prompt语言以及知识切片方式)" #: ../../getting_started/install/environment/environment.md:119 -#: 73eb0a96f29b4739bd456faa9cb5033d +#: 5543c28bb8e34c9fb3bb6b063c2b1750 msgid "LANGUAGE=en" msgstr "LANGUAGE=en" #: ../../getting_started/install/environment/environment.md:120 -#: c6646b78c6cf4d25a13108232f5b2046 +#: cb4ed5b892ee41068c1ca76cb29aa400 msgid "LANGUAGE=zh" msgstr "LANGUAGE=zh" diff --git a/docs/locales/zh_CN/LC_MESSAGES/modules/knowledge.po b/docs/locales/zh_CN/LC_MESSAGES/modules/knowledge.po index 0b3eac094..bb2dae7af 100644 --- a/docs/locales/zh_CN/LC_MESSAGES/modules/knowledge.po +++ b/docs/locales/zh_CN/LC_MESSAGES/modules/knowledge.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: DB-GPT 0.3.0\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2023-07-13 15:39+0800\n" +"POT-Creation-Date: 2023-11-02 21:04+0800\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language: zh_CN\n" @@ -19,103 +19,84 @@ msgstr "" "Content-Transfer-Encoding: 8bit\n" "Generated-By: Babel 2.12.1\n" -#: ../../modules/knowledge.rst:2 ../../modules/knowledge.rst:136 -#: 3cc8fa6e9fbd4d889603d99424e9529a +#: ../../modules/knowledge.md:1 b94b3b15cb2441ed9d78abd222a717b7 msgid "Knowledge" msgstr "知识" -#: ../../modules/knowledge.rst:4 0465a393d9d541958c39c1d07c885d1f +#: ../../modules/knowledge.md:3 c6d6e308a6ce42948d29e928136ef561 #, fuzzy msgid "" "As the knowledge base is currently the most significant user demand " "scenario, we natively support the construction and processing of " "knowledge bases. At the same time, we also provide multiple knowledge " -"base management strategies in this project, such as pdf knowledge,md " -"knowledge, txt knowledge, word knowledge, ppt knowledge:" +"base management strategies in this project, such as:" msgstr "" "由于知识库是当前用户需求最显著的场景,我们原生支持知识库的构建和处理。同时,我们还在本项目中提供了多种知识库管理策略,如:pdf,md , " "txt, word, ppt" -#: ../../modules/knowledge.rst:6 e670cbe14d8e4da88ba935e4120c31e0 -msgid "" -"We currently support many document formats: raw text, txt, pdf, md, html," -" doc, ppt, and url. In the future, we will continue to support more types" -" of knowledge, including audio, video, various databases, and big data " -"sources. Of course, we look forward to your active participation in " -"contributing code." +#: ../../modules/knowledge.md:4 268abc408d40410ba90cf5f121dc5270 +msgid "Default built-in knowledge base" msgstr "" -#: ../../modules/knowledge.rst:9 e0bf601a1a0c458297306db6ff79f931 -msgid "**Create your own knowledge repository**" +#: ../../modules/knowledge.md:5 558c3364c38b458a8ebf81030efc2a48 +msgid "Custom addition of knowledge bases" +msgstr "" + +#: ../../modules/knowledge.md:6 9cb3ce62da1440579c095848c7aef88c +msgid "" +"Various usage scenarios such as constructing knowledge bases through " +"plugin capabilities and web crawling. Users only need to organize the " +"knowledge documents, and they can use our existing capabilities to build " +"the knowledge base required for the large model." +msgstr "" + +#: ../../modules/knowledge.md:9 b8ca6bc4dd9845baa56e36eea7fac2a2 +#, fuzzy +msgid "Create your own knowledge repository" msgstr "创建你自己的知识库" -#: ../../modules/knowledge.rst:11 bb26708135d44615be3c1824668010f6 -msgid "1.prepare" -msgstr "准备" +#: ../../modules/knowledge.md:11 17d7178a67924f43aa5b6293707ef041 +msgid "" +"1.Place personal knowledge files or folders in the pilot/datasets " +"directory." +msgstr "" -#: ../../modules/knowledge.rst:13 c150a0378f3e4625908fa0d8a25860e9 +#: ../../modules/knowledge.md:13 31c31f14bf444981939689f9a9fb038a #, fuzzy msgid "" -"We currently support many document formats: TEXT(raw text), " -"DOCUMENT(.txt, .pdf, .md, .doc, .ppt, .html), and URL." +"We currently support many document formats: txt, pdf, md, html, doc, ppt," +" and url." msgstr "当前支持txt, pdf, md, html, doc, ppt, url文档格式" -#: ../../modules/knowledge.rst:15 7f9f02a93d5d4325b3d2d976f4bb28a0 +#: ../../modules/knowledge.md:15 9ad2f2e05f8842a9b9d8469a3704df23 msgid "before execution:" msgstr "开始前" -#: ../../modules/knowledge.rst:24 59699a8385e04982a992cf0d71f6dcd5 -#, fuzzy +#: ../../modules/knowledge.md:22 6fd2775914b641c4b8e486417b558ea6 msgid "" -"2.prepare embedding model, you can download from https://huggingface.co/." -" Notice you have installed git-lfs." +"2.Update your .env, set your vector store type, VECTOR_STORE_TYPE=Chroma " +"(now only support Chroma and Milvus, if you set Milvus, please set " +"MILVUS_URL and MILVUS_PORT)" msgstr "" -"提前准备Embedding Model, 你可以在https://huggingface.co/进行下载,注意:你需要先安装git-lfs.eg:" -" git clone https://huggingface.co/THUDM/chatglm2-6b" -#: ../../modules/knowledge.rst:27 2be1a17d0b54476b9dea080d244fd747 -msgid "" -"eg: git clone https://huggingface.co/sentence-transformers/all-" -"MiniLM-L6-v2" -msgstr "eg: git clone https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2" - -#: ../../modules/knowledge.rst:33 d328f6e243624c9488ebd27c9324621b -msgid "" -"3.prepare vector_store instance and vector store config, now we support " -"Chroma, Milvus and Weaviate." -msgstr "提前准备向量数据库环境,目前支持Chroma, Milvus and Weaviate向量数据库" - -#: ../../modules/knowledge.rst:63 44f97154eff647d399fd30b6f9e3b867 -msgid "" -"3.init Url Type EmbeddingEngine api and embedding your document into " -"vector store in your code." -msgstr "初始化 Url类型 EmbeddingEngine api, 将url文档embedding向量化到向量数据库 " - -#: ../../modules/knowledge.rst:75 e2581b414f0148bca88253c7af9cd591 -msgid "If you want to add your source_reader or text_splitter, do this:" -msgstr "如果你想手动添加你自定义的source_reader和text_splitter, 请参考:" - -#: ../../modules/knowledge.rst:95 74c110414f924bbfa3d512e45ba2f30f -#, fuzzy -msgid "" -"4.init Document Type EmbeddingEngine api and embedding your document into" -" vector store in your code. Document type can be .txt, .pdf, .md, .doc, " -".ppt." +#: ../../modules/knowledge.md:25 131c5f58898a4682940910980edb2043 +msgid "2.Run the knowledge repository initialization command" msgstr "" -"初始化 文档型类型 EmbeddingEngine api, 将文档embedding向量化到向量数据库(文档可以是.txt, .pdf, " -".md, .html, .doc, .ppt)" -#: ../../modules/knowledge.rst:108 0afd40098d5f4dfd9e44fe1d8004da25 +#: ../../modules/knowledge.md:31 2cf550f17881497bb881b19efcc18c23 msgid "" -"5.init TEXT Type EmbeddingEngine api and embedding your document into " -"vector store in your code." -msgstr "初始化TEXT类型 EmbeddingEngine api, 将文档embedding向量化到向量数据库" +"Optionally, you can run `dbgpt knowledge load --help` command to see more" +" usage." +msgstr "" -#: ../../modules/knowledge.rst:120 a66961bf3efd41fa8ea938129446f5a5 -msgid "4.similar search based on your knowledge base. ::" -msgstr "在知识库进行相似性搜索" +#: ../../modules/knowledge.md:33 c8a2ea571b944bdfbcad48fa8b54fcc9 +msgid "" +"3.Add the knowledge repository in the interface by entering the name of " +"your knowledge repository (if not specified, enter \"default\") so you " +"can use it for Q&A based on your knowledge base." +msgstr "" -#: ../../modules/knowledge.rst:126 b7066f408378450db26770f83fbd2716 +#: ../../modules/knowledge.md:35 b701170ad75e49dea7d7734c15681e0f msgid "" "Note that the default vector model used is text2vec-large-chinese (which " "is a large model, so if your personal computer configuration is not " @@ -125,48 +106,6 @@ msgstr "" "注意,这里默认向量模型是text2vec-large-chinese(模型比较大,如果个人电脑配置不够建议采用text2vec-base-" "chinese),因此确保需要将模型download下来放到models目录中。" -#: ../../modules/knowledge.rst:128 58481d55cab74936b6e84b24c39b1674 -#, fuzzy -msgid "" -"`pdf_embedding <./knowledge/pdf/pdf_embedding.html>`_: supported pdf " -"embedding." -msgstr "pdf_embedding <./knowledge/pdf_embedding.html>`_: supported pdf embedding." - -#: ../../modules/knowledge.rst:129 fbb013c4f1bc46af910c91292f6690cf -#, fuzzy -msgid "" -"`markdown_embedding <./knowledge/markdown/markdown_embedding.html>`_: " -"supported markdown embedding." -msgstr "pdf_embedding <./knowledge/pdf_embedding.html>`_: supported pdf embedding." - -#: ../../modules/knowledge.rst:130 59d45732f4914d16b4e01aee0992edf7 -#, fuzzy -msgid "" -"`word_embedding <./knowledge/word/word_embedding.html>`_: supported word " -"embedding." -msgstr "pdf_embedding <./knowledge/pdf_embedding.html>`_: supported pdf embedding." - -#: ../../modules/knowledge.rst:131 df0e6f311861423e885b38e020a7c0f0 -#, fuzzy -msgid "" -"`url_embedding <./knowledge/url/url_embedding.html>`_: supported url " -"embedding." -msgstr "pdf_embedding <./knowledge/pdf_embedding.html>`_: supported pdf embedding." - -#: ../../modules/knowledge.rst:132 7c550c1f5bc34fe9986731fb465e12cd -#, fuzzy -msgid "" -"`ppt_embedding <./knowledge/ppt/ppt_embedding.html>`_: supported ppt " -"embedding." -msgstr "pdf_embedding <./knowledge/pdf_embedding.html>`_: supported pdf embedding." - -#: ../../modules/knowledge.rst:133 8648684cb191476faeeb548389f79050 -#, fuzzy -msgid "" -"`string_embedding <./knowledge/string/string_embedding.html>`_: supported" -" raw text embedding." -msgstr "pdf_embedding <./knowledge/pdf_embedding.html>`_: supported pdf embedding." - #~ msgid "before execution: python -m spacy download zh_core_web_sm" #~ msgstr "在执行之前请先执行python -m spacy download zh_core_web_sm" @@ -201,3 +140,112 @@ msgstr "pdf_embedding <./knowledge/pdf_embedding.html>`_: supported pdf embeddin #~ "and MILVUS_PORT)" #~ msgstr "2.更新你的.env,设置你的向量存储类型,VECTOR_STORE_TYPE=Chroma(现在只支持Chroma和Milvus,如果你设置了Milvus,请设置MILVUS_URL和MILVUS_PORT)" +#~ msgid "" +#~ "We currently support many document " +#~ "formats: raw text, txt, pdf, md, " +#~ "html, doc, ppt, and url. In the" +#~ " future, we will continue to support" +#~ " more types of knowledge, including " +#~ "audio, video, various databases, and big" +#~ " data sources. Of course, we look " +#~ "forward to your active participation in" +#~ " contributing code." +#~ msgstr "" + +#~ msgid "1.prepare" +#~ msgstr "准备" + +#~ msgid "" +#~ "2.prepare embedding model, you can " +#~ "download from https://huggingface.co/. Notice " +#~ "you have installed git-lfs." +#~ msgstr "" +#~ "提前准备Embedding Model, 你可以在https://huggingface.co/进行下载,注意" +#~ ":你需要先安装git-lfs.eg: git clone " +#~ "https://huggingface.co/THUDM/chatglm2-6b" + +#~ msgid "" +#~ "eg: git clone https://huggingface.co/sentence-" +#~ "transformers/all-MiniLM-L6-v2" +#~ msgstr "" +#~ "eg: git clone https://huggingface.co/sentence-" +#~ "transformers/all-MiniLM-L6-v2" + +#~ msgid "" +#~ "3.prepare vector_store instance and vector " +#~ "store config, now we support Chroma, " +#~ "Milvus and Weaviate." +#~ msgstr "提前准备向量数据库环境,目前支持Chroma, Milvus and Weaviate向量数据库" + +#~ msgid "" +#~ "3.init Url Type EmbeddingEngine api and" +#~ " embedding your document into vector " +#~ "store in your code." +#~ msgstr "初始化 Url类型 EmbeddingEngine api, 将url文档embedding向量化到向量数据库 " + +#~ msgid "If you want to add your source_reader or text_splitter, do this:" +#~ msgstr "如果你想手动添加你自定义的source_reader和text_splitter, 请参考:" + +#~ msgid "" +#~ "4.init Document Type EmbeddingEngine api " +#~ "and embedding your document into vector" +#~ " store in your code. Document type" +#~ " can be .txt, .pdf, .md, .doc, " +#~ ".ppt." +#~ msgstr "" +#~ "初始化 文档型类型 EmbeddingEngine api, " +#~ "将文档embedding向量化到向量数据库(文档可以是.txt, .pdf, .md, .html," +#~ " .doc, .ppt)" + +#~ msgid "" +#~ "5.init TEXT Type EmbeddingEngine api and" +#~ " embedding your document into vector " +#~ "store in your code." +#~ msgstr "初始化TEXT类型 EmbeddingEngine api, 将文档embedding向量化到向量数据库" + +#~ msgid "4.similar search based on your knowledge base. ::" +#~ msgstr "在知识库进行相似性搜索" + +#~ msgid "" +#~ "`pdf_embedding <./knowledge/pdf/pdf_embedding.html>`_: " +#~ "supported pdf embedding." +#~ msgstr "" +#~ "pdf_embedding <./knowledge/pdf_embedding.html>`_: " +#~ "supported pdf embedding." + +#~ msgid "" +#~ "`markdown_embedding " +#~ "<./knowledge/markdown/markdown_embedding.html>`_: supported " +#~ "markdown embedding." +#~ msgstr "" +#~ "pdf_embedding <./knowledge/pdf_embedding.html>`_: " +#~ "supported pdf embedding." + +#~ msgid "" +#~ "`word_embedding <./knowledge/word/word_embedding.html>`_: " +#~ "supported word embedding." +#~ msgstr "" +#~ "pdf_embedding <./knowledge/pdf_embedding.html>`_: " +#~ "supported pdf embedding." + +#~ msgid "" +#~ "`url_embedding <./knowledge/url/url_embedding.html>`_: " +#~ "supported url embedding." +#~ msgstr "" +#~ "pdf_embedding <./knowledge/pdf_embedding.html>`_: " +#~ "supported pdf embedding." + +#~ msgid "" +#~ "`ppt_embedding <./knowledge/ppt/ppt_embedding.html>`_: " +#~ "supported ppt embedding." +#~ msgstr "" +#~ "pdf_embedding <./knowledge/pdf_embedding.html>`_: " +#~ "supported pdf embedding." + +#~ msgid "" +#~ "`string_embedding <./knowledge/string/string_embedding.html>`_:" +#~ " supported raw text embedding." +#~ msgstr "" +#~ "pdf_embedding <./knowledge/pdf_embedding.html>`_: " +#~ "supported pdf embedding." +