diff --git a/docs/conf.py b/docs/conf.py index 1fc61dc1e..51a8b65b9 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -14,7 +14,7 @@ project = "DB-GPT" copyright = "2023, csunny" author = "csunny" -version = "👏👏 0.3.6" +version = "👏👏 0.3.8" html_title = project + " " + version # -- General configuration --------------------------------------------------- diff --git a/docs/getting_started/faq/llm/llm_faq.md b/docs/getting_started/faq/llm/llm_faq.md index bf5d9a3fa..901f0ca98 100644 --- a/docs/getting_started/faq/llm/llm_faq.md +++ b/docs/getting_started/faq/llm/llm_faq.md @@ -51,7 +51,7 @@ Llama-2-70b with 8-bit quantization can run with 80 GB of VRAM, and 4-bit quanti Note: you need to install the latest dependencies according to [requirements.txt](https://github.com/eosphoros-ai/DB-GPT/blob/main/requirements.txt). -##### Q5 How to Add LLM Service dynamic +##### Q5 How to Add LLM Service dynamic local mode Now DB-GPT through multi-llm service switch, so how to add llm service dynamic, @@ -64,8 +64,21 @@ eg: dbgpt model start --model_name chatglm2-6b --model_path /root/DB-GPT/models/ chatgpt eg: dbgpt model start --model_name chatgpt_proxyllm --model_path chatgpt_proxyllm --proxy_api_key ${OPENAI_KEY} --proxy_server_url {OPENAI_URL} ``` +##### Q6 How to Add LLM Service dynamic in remote mode +If you deploy llm service in remote machine instance, and you want to add model service to dbgpt server to manage -##### Q6 dbgpt command not found +use dbgpt start worker and set --controller_addr. + +```commandline +eg: dbgpt start worker --model_name vicuna-13b-v1.5 \ +--model_path /app/models/vicuna-13b-v1.5 \ +--port 8002 \ +--controller_addr http://127.0.0.1:8000 + +``` + + +##### Q7 dbgpt command not found ```commandline pip install -e . diff --git a/docs/locales/zh_CN/LC_MESSAGES/getting_started/faq/llm/llm_faq.po b/docs/locales/zh_CN/LC_MESSAGES/getting_started/faq/llm/llm_faq.po index 4670537c2..2f127555b 100644 --- a/docs/locales/zh_CN/LC_MESSAGES/getting_started/faq/llm/llm_faq.po +++ b/docs/locales/zh_CN/LC_MESSAGES/getting_started/faq/llm/llm_faq.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: DB-GPT 👏👏 0.3.5\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2023-09-13 23:04+0800\n" +"POT-Creation-Date: 2023-09-14 14:35+0800\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language: zh_CN\n" @@ -120,7 +120,8 @@ msgid "" msgstr "" #: ../../getting_started/faq/llm/llm_faq.md:54 5fe0d9ced7e848799f4d7bce92a5c130 -msgid "Q5 How to Add LLM Service dynamic" +#, fuzzy +msgid "Q5 How to Add LLM Service dynamic local mode" msgstr "Q5 怎样动态新增模型服务" #: ../../getting_started/faq/llm/llm_faq.md:56 fd921148e3e547beb6c74035a6b6a8b0 @@ -129,7 +130,23 @@ msgid "" "dynamic," msgstr "DB-GPT支持多个模型服务切换, 怎样添加一个模型服务呢" -#: ../../getting_started/faq/llm/llm_faq.md:68 f8c024339da447ce8160a4eb9f87c125 -msgid "Q6 dbgpt command not found" +#: ../../getting_started/faq/llm/llm_faq.md:67 5fe0d9ced7e848799f4d7bce92a5c130 +#, fuzzy +msgid "Q6 How to Add LLM Service dynamic in remote mode" +msgstr "Q5 怎样动态新增模型服务" + +#: ../../getting_started/faq/llm/llm_faq.md:68 bd29cd6d29a64908af15b391d73ea82a +msgid "" +"If you deploy llm service in remote machine instance, and you want to " +"add model service to dbgpt server to manage" +msgstr "如果你想在远程机器实例部署大模型服务并添加到本地dbgpt_server进行管理" + +#: ../../getting_started/faq/llm/llm_faq.md:70 ace16dfc4326431dbe4a9a32e4a83ba4 +msgid "use dbgpt start worker and set --controller_addr." +msgstr "使用1`dbgpt start worker`命令并设置注册地址--controller_addr" + +#: ../../getting_started/faq/llm/llm_faq.md:81 f8c024339da447ce8160a4eb9f87c125 +#, fuzzy +msgid "Q7 dbgpt command not found" msgstr "Q6 dbgpt command not found" diff --git a/setup.py b/setup.py index d717b0bc6..4c5bd0902 100644 --- a/setup.py +++ b/setup.py @@ -347,7 +347,7 @@ init_install_requires() setuptools.setup( name="db-gpt", packages=find_packages(exclude=("tests", "*.tests", "*.tests.*", "examples")), - version="0.3.6", + version="0.3.8", author="csunny", author_email="cfqcsunny@gmail.com", description="DB-GPT is an experimental open-source project that uses localized GPT large models to interact with your data and environment."