diff --git a/docs/modules/prompts.md b/docs/modules/prompts.md index 647b93658..2e8f5181d 100644 --- a/docs/modules/prompts.md +++ b/docs/modules/prompts.md @@ -1,3 +1,5 @@ # Prompts -Prompt is a very important part of the interaction between the large model and the user, and to a certain extent, it determines the quality and accuracy of the answer generated by the large model. In this project, we will automatically optimize the corresponding prompt according to user input and usage scenarios, making it easier and more efficient for users to use large language models. \ No newline at end of file +Prompt is a very important part of the interaction between the large model and the user, and to a certain extent, it determines the quality and accuracy of the answer generated by the large model. In this project, we will automatically optimize the corresponding prompt according to user input and usage scenarios, making it easier and more efficient for users to use large language models. + +### 1.DB-GPT Prompt \ No newline at end of file diff --git a/pilot/common/plugins.py b/pilot/common/plugins.py index 09931c90e..2a142d513 100644 --- a/pilot/common/plugins.py +++ b/pilot/common/plugins.py @@ -74,6 +74,9 @@ def create_directory_if_not_exists(directory_path: str) -> bool: def load_native_plugins(cfg: Config): + if not cfg.plugins_auto_load: + print("not auto load_native_plugins") + return print("load_native_plugins") ### TODO 默认拉主分支,后续拉发布版本 branch_name = cfg.plugins_git_branch diff --git a/pilot/configs/config.py b/pilot/configs/config.py index 06b91e33b..d4ed13f22 100644 --- a/pilot/configs/config.py +++ b/pilot/configs/config.py @@ -90,6 +90,7 @@ class Config(metaclass=Singleton): ### The associated configuration parameters of the plug-in control the loading and use of the plug-in self.plugins: List[AutoGPTPluginTemplate] = [] self.plugins_openai = [] + self.plugins_auto_load = os.getenv("AUTO_LOAD_PLUGIN", "False") == "True" self.plugins_git_branch = os.getenv("PLUGINS_GIT_BRANCH", "plugin_dashboard") @@ -154,8 +155,8 @@ class Config(metaclass=Singleton): ### EMBEDDING Configuration self.EMBEDDING_MODEL = os.getenv("EMBEDDING_MODEL", "text2vec") - self.KNOWLEDGE_CHUNK_SIZE = int(os.getenv("KNOWLEDGE_CHUNK_SIZE", 500)) - self.KNOWLEDGE_SEARCH_TOP_SIZE = int(os.getenv("KNOWLEDGE_SEARCH_TOP_SIZE", 10)) + self.KNOWLEDGE_CHUNK_SIZE = int(os.getenv("KNOWLEDGE_CHUNK_SIZE", 100)) + self.KNOWLEDGE_SEARCH_TOP_SIZE = int(os.getenv("KNOWLEDGE_SEARCH_TOP_SIZE", 5)) ### SUMMARY_CONFIG Configuration self.SUMMARY_CONFIG = os.getenv("SUMMARY_CONFIG", "VECTOR") diff --git a/plugins/DB-GPT-Plugins-plugin_dashboard-20230613224150.zip b/plugins/DB-GPT-Plugins-plugin_dashboard-20230613224150.zip new file mode 100644 index 000000000..11a669c92 Binary files /dev/null and b/plugins/DB-GPT-Plugins-plugin_dashboard-20230613224150.zip differ