From 9e7a68349a770e7cff8d24168d6cc378d9756ec3 Mon Sep 17 00:00:00 2001 From: csunny Date: Tue, 9 May 2023 00:15:19 +0800 Subject: [PATCH 1/2] update config --- pilot/agent/agent_manager.py | 30 +++++++++++++++++++++++++----- pilot/configs/config.py | 22 ++++++++++++++++++++-- pilot/model/base.py | 11 +++++++++++ pilot/model/chat.py | 3 +++ 4 files changed, 59 insertions(+), 7 deletions(-) create mode 100644 pilot/model/base.py create mode 100644 pilot/model/chat.py diff --git a/pilot/agent/agent_manager.py b/pilot/agent/agent_manager.py index ef33f36da..7934a112e 100644 --- a/pilot/agent/agent_manager.py +++ b/pilot/agent/agent_manager.py @@ -2,14 +2,34 @@ # -*- coding:utf-8 -*- from pilot.singleton import Singleton +from pilot.configs.config import Config class AgentManager(metaclass=Singleton): - """Agent manager for managing DB-GPT agents""" - def __init__(self) -> None: - - self.agents = {} #TODO need to define + """Agent manager for managing DB-GPT agents + In order to compatible auto gpt plugins, + we use the same template with it. + + Args: next_keys + agents + cfg + """ - def create_agent(self): + def __init__(self) -> None: + self.next_key = 0 + self.agents = {} #TODO need to define + self.cfg = Config() + + def create_agent(self, task: str, prompt: str, model: str) -> tuple[int, str]: + """Create a new agent and return its key + + Args: + task: The task to perform + prompt: The prompt to use + model: The model to use + + Returns: + The key of the new agent + """ pass def message_agent(self): diff --git a/pilot/configs/config.py b/pilot/configs/config.py index a66402c17..5876149cf 100644 --- a/pilot/configs/config.py +++ b/pilot/configs/config.py @@ -1,6 +1,9 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- +import os +from typing import List + from auto_gpt_plugin_template import AutoGPTPluginTemplate from pilot.singleton import Singleton @@ -8,7 +11,22 @@ class Config(metaclass=Singleton): """Configuration class to store the state of bools for different scripts access""" def __init__(self) -> None: """Initialize the Config class""" - pass + # TODO change model_config there + self.debug_mode = False + self.execute_local_commands = ( + os.getenv("EXECUTE_LOCAL_COMMANDS", "False") == "True" + ) - # TODO change model_config there \ No newline at end of file + self.plugins_dir = os.getenv("PLUGINS_DIR", 'plugins') + self.plugins:List[AutoGPTPluginTemplate] = [] + + def set_debug_mode(self, value: bool) -> None: + """Set the debug mode value.""" + self.debug_mode = value + + def set_plugins(self,value: bool) -> None: + """Set the plugins value.""" + self.plugins = value + + \ No newline at end of file diff --git a/pilot/model/base.py b/pilot/model/base.py new file mode 100644 index 000000000..8199198eb --- /dev/null +++ b/pilot/model/base.py @@ -0,0 +1,11 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +from typing import List, TypedDict + +class Message(TypedDict): + """LLM Message object containing usually like (role: content) """ + + role: str + content: str + diff --git a/pilot/model/chat.py b/pilot/model/chat.py new file mode 100644 index 000000000..97206f2d5 --- /dev/null +++ b/pilot/model/chat.py @@ -0,0 +1,3 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- + From 71fa957b580e6ef0b62c54823a4aab796771610e Mon Sep 17 00:00:00 2001 From: csunny Date: Tue, 9 May 2023 00:41:46 +0800 Subject: [PATCH 2/2] impl llm utils --- pilot/agent/agent_manager.py | 13 +++++++++- pilot/configs/config.py | 5 ++++ pilot/model/llm_utils.py | 47 ++++++++++++++++++++++++++++++++++++ 3 files changed, 64 insertions(+), 1 deletion(-) create mode 100644 pilot/model/llm_utils.py diff --git a/pilot/agent/agent_manager.py b/pilot/agent/agent_manager.py index 7934a112e..a62e58d23 100644 --- a/pilot/agent/agent_manager.py +++ b/pilot/agent/agent_manager.py @@ -3,6 +3,8 @@ from pilot.singleton import Singleton from pilot.configs.config import Config +from typing import List +from pilot.model.base import Message class AgentManager(metaclass=Singleton): """Agent manager for managing DB-GPT agents @@ -30,7 +32,16 @@ class AgentManager(metaclass=Singleton): Returns: The key of the new agent """ - pass + messages: List[Message] = [ + {"role": "user", "content": prompt}, + ] + + for plugin in self.cfg.plugins: + if not plugin.can_handle_pre_instruction(): + continue + if plugin_messages := plugin.pre_instruction(messages): + messages.extend(iter(plugin_messages)) + # def message_agent(self): pass diff --git a/pilot/configs/config.py b/pilot/configs/config.py index 5876149cf..709de4b69 100644 --- a/pilot/configs/config.py +++ b/pilot/configs/config.py @@ -17,6 +17,7 @@ class Config(metaclass=Singleton): self.execute_local_commands = ( os.getenv("EXECUTE_LOCAL_COMMANDS", "False") == "True" ) + self.temperature = float(os.getenv("TEMPERATURE", "0.7")) self.plugins_dir = os.getenv("PLUGINS_DIR", 'plugins') self.plugins:List[AutoGPTPluginTemplate] = [] @@ -29,4 +30,8 @@ class Config(metaclass=Singleton): """Set the plugins value.""" self.plugins = value + def set_temperature(self, value: int) -> None: + """ Set the temperature value.""" + self.temperature = value + \ No newline at end of file diff --git a/pilot/model/llm_utils.py b/pilot/model/llm_utils.py new file mode 100644 index 000000000..ee4e3b6e9 --- /dev/null +++ b/pilot/model/llm_utils.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- + +from typing import List, Optional +from pilot.model.base import Message +from pilot.configs.config import Config +from pilot.server.vicuna_server import generate_output + +def create_chat_completion( + messages: List[Message], # type: ignore + model: Optional[str] = None, + temperature: float = None, + max_tokens: Optional[int] = None, +) -> str: + """Create a chat completion using the vicuna local model + + Args: + messages(List[Message]): The messages to send to the chat completion + model (str, optional): The model to use. Defaults to None. + temperature (float, optional): The temperature to use. Defaults to 0.7. + max_tokens (int, optional): The max tokens to use. Defaults to None + + Returns: + str: The response from chat completion + """ + cfg = Config() + if temperature is None: + temperature = cfg.temperature + + for plugin in cfg.plugins: + if plugin.can_handle_chat_completion( + messages=messages, + model=model, + temperature=temperature, + max_tokens=max_tokens, + ): + message = plugin.handle_chat_completion( + messages=messages, + model=model, + temperature=temperature, + max_tokens=max_tokens, + ) + if message is not None: + return message + + response = None + # TODO impl this use vicuna server api