mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-08-03 01:12:15 +00:00
style:fmt
This commit is contained in:
parent
d11ec46ee5
commit
517737d730
@ -52,7 +52,18 @@ class RDBMSDatabase(BaseConnect):
|
|||||||
custom_table_info: Optional[dict] = None,
|
custom_table_info: Optional[dict] = None,
|
||||||
view_support: bool = False,
|
view_support: bool = False,
|
||||||
):
|
):
|
||||||
"""Create engine from database URI."""
|
"""Create engine from database URI.
|
||||||
|
Args:
|
||||||
|
- engine: Engine sqlalchemy.engine
|
||||||
|
- schema: Optional[str].
|
||||||
|
- metadata: Optional[MetaData]
|
||||||
|
- ignore_tables: Optional[List[str]]
|
||||||
|
- include_tables: Optional[List[str]]
|
||||||
|
- sample_rows_in_table_info: int default:3,
|
||||||
|
- indexes_in_table_info: bool = False,
|
||||||
|
- custom_table_info: Optional[dict] = None,
|
||||||
|
- view_support: bool = False,
|
||||||
|
"""
|
||||||
self._engine = engine
|
self._engine = engine
|
||||||
self._schema = schema
|
self._schema = schema
|
||||||
if include_tables and ignore_tables:
|
if include_tables and ignore_tables:
|
||||||
@ -92,6 +103,15 @@ class RDBMSDatabase(BaseConnect):
|
|||||||
engine_args: Optional[dict] = None,
|
engine_args: Optional[dict] = None,
|
||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
) -> RDBMSDatabase:
|
) -> RDBMSDatabase:
|
||||||
|
"""Construct a SQLAlchemy engine from uri database.
|
||||||
|
Args:
|
||||||
|
host (str): database host.
|
||||||
|
port (int): database port.
|
||||||
|
user (str): database user.
|
||||||
|
pwd (str): database password.
|
||||||
|
db_name (str): database name.
|
||||||
|
engine_args (Optional[dict]):other engine_args.
|
||||||
|
"""
|
||||||
db_url: str = (
|
db_url: str = (
|
||||||
cls.driver
|
cls.driver
|
||||||
+ "://"
|
+ "://"
|
||||||
|
@ -21,6 +21,12 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class BaseChat(ABC):
|
class BaseChat(ABC):
|
||||||
|
"""DB-GPT Chat Service Base Module
|
||||||
|
Include:
|
||||||
|
stream_call():scene + prompt -> stream response
|
||||||
|
nostream_call():scene + prompt -> nostream response
|
||||||
|
"""
|
||||||
|
|
||||||
chat_scene: str = None
|
chat_scene: str = None
|
||||||
llm_model: Any = None
|
llm_model: Any = None
|
||||||
# By default, keep the last two rounds of conversation records as the context
|
# By default, keep the last two rounds of conversation records as the context
|
||||||
@ -32,6 +38,14 @@ class BaseChat(ABC):
|
|||||||
arbitrary_types_allowed = True
|
arbitrary_types_allowed = True
|
||||||
|
|
||||||
def __init__(self, chat_param: Dict):
|
def __init__(self, chat_param: Dict):
|
||||||
|
"""Chat Module Initialization
|
||||||
|
Args:
|
||||||
|
- chat_param: Dict
|
||||||
|
- chat_session_id: (str) chat session_id
|
||||||
|
- current_user_input: (str) current user input
|
||||||
|
- model_name:(str) llm model name
|
||||||
|
- select_param:(str) select param
|
||||||
|
"""
|
||||||
self.chat_session_id = chat_param["chat_session_id"]
|
self.chat_session_id = chat_param["chat_session_id"]
|
||||||
self.chat_mode = chat_param["chat_mode"]
|
self.chat_mode = chat_param["chat_mode"]
|
||||||
self.current_user_input: str = chat_param["current_user_input"]
|
self.current_user_input: str = chat_param["current_user_input"]
|
||||||
|
@ -18,10 +18,20 @@ logger = logging.getLogger("chat_agent")
|
|||||||
|
|
||||||
|
|
||||||
class ChatAgent(BaseChat):
|
class ChatAgent(BaseChat):
|
||||||
|
"""Chat With Agent through plugin"""
|
||||||
|
|
||||||
chat_scene: str = ChatScene.ChatAgent.value()
|
chat_scene: str = ChatScene.ChatAgent.value()
|
||||||
chat_retention_rounds = 0
|
chat_retention_rounds = 0
|
||||||
|
|
||||||
def __init__(self, chat_param: Dict):
|
def __init__(self, chat_param: Dict):
|
||||||
|
"""Chat Agent Module Initialization
|
||||||
|
Args:
|
||||||
|
- chat_param: Dict
|
||||||
|
- chat_session_id: (str) chat session_id
|
||||||
|
- current_user_input: (str) current user input
|
||||||
|
- model_name:(str) llm model name
|
||||||
|
- select_param:(str) agent plugin
|
||||||
|
"""
|
||||||
if not chat_param["select_param"]:
|
if not chat_param["select_param"]:
|
||||||
raise ValueError("Please select a Plugin!")
|
raise ValueError("Please select a Plugin!")
|
||||||
self.select_plugins = chat_param["select_param"].split(",")
|
self.select_plugins = chat_param["select_param"].split(",")
|
||||||
|
@ -19,10 +19,17 @@ CFG = Config()
|
|||||||
class ChatDashboard(BaseChat):
|
class ChatDashboard(BaseChat):
|
||||||
chat_scene: str = ChatScene.ChatDashboard.value()
|
chat_scene: str = ChatScene.ChatDashboard.value()
|
||||||
report_name: str
|
report_name: str
|
||||||
"""Number of results to return from the query"""
|
"""Chat Dashboard to generate dashboard chart"""
|
||||||
|
|
||||||
def __init__(self, chat_param: Dict):
|
def __init__(self, chat_param: Dict):
|
||||||
""" """
|
"""Chat Dashboard Module Initialization
|
||||||
|
Args:
|
||||||
|
- chat_param: Dict
|
||||||
|
- chat_session_id: (str) chat session_id
|
||||||
|
- current_user_input: (str) current user input
|
||||||
|
- model_name:(str) llm model name
|
||||||
|
- select_param:(str) dbname
|
||||||
|
"""
|
||||||
self.db_name = chat_param["select_param"]
|
self.db_name = chat_param["select_param"]
|
||||||
chat_param["chat_mode"] = ChatScene.ChatDashboard
|
chat_param["chat_mode"] = ChatScene.ChatDashboard
|
||||||
super().__init__(chat_param=chat_param)
|
super().__init__(chat_param=chat_param)
|
||||||
|
@ -19,10 +19,20 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class ChatExcel(BaseChat):
|
class ChatExcel(BaseChat):
|
||||||
|
"""a Excel analyzer to analyze Excel Data"""
|
||||||
|
|
||||||
chat_scene: str = ChatScene.ChatExcel.value()
|
chat_scene: str = ChatScene.ChatExcel.value()
|
||||||
chat_retention_rounds = 1
|
chat_retention_rounds = 1
|
||||||
|
|
||||||
def __init__(self, chat_param: Dict):
|
def __init__(self, chat_param: Dict):
|
||||||
|
"""Chat Excel Module Initialization
|
||||||
|
Args:
|
||||||
|
- chat_param: Dict
|
||||||
|
- chat_session_id: (str) chat session_id
|
||||||
|
- current_user_input: (str) current user input
|
||||||
|
- model_name:(str) llm model name
|
||||||
|
- select_param:(str) file path
|
||||||
|
"""
|
||||||
chat_mode = ChatScene.ChatExcel
|
chat_mode = ChatScene.ChatExcel
|
||||||
|
|
||||||
self.select_param = chat_param["select_param"]
|
self.select_param = chat_param["select_param"]
|
||||||
|
@ -15,6 +15,14 @@ class ChatWithDbAutoExecute(BaseChat):
|
|||||||
"""Number of results to return from the query"""
|
"""Number of results to return from the query"""
|
||||||
|
|
||||||
def __init__(self, chat_param: Dict):
|
def __init__(self, chat_param: Dict):
|
||||||
|
"""Chat Data Module Initialization
|
||||||
|
Args:
|
||||||
|
- chat_param: Dict
|
||||||
|
- chat_session_id: (str) chat session_id
|
||||||
|
- current_user_input: (str) current user input
|
||||||
|
- model_name:(str) llm model name
|
||||||
|
- select_param:(str) dbname
|
||||||
|
"""
|
||||||
chat_mode = ChatScene.ChatWithDbExecute
|
chat_mode = ChatScene.ChatWithDbExecute
|
||||||
self.db_name = chat_param["select_param"]
|
self.db_name = chat_param["select_param"]
|
||||||
chat_param["chat_mode"] = chat_mode
|
chat_param["chat_mode"] = chat_mode
|
||||||
@ -31,6 +39,9 @@ class ChatWithDbAutoExecute(BaseChat):
|
|||||||
self.top_k: int = 200
|
self.top_k: int = 200
|
||||||
|
|
||||||
def generate_input_values(self):
|
def generate_input_values(self):
|
||||||
|
"""
|
||||||
|
generate input values
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
from pilot.summary.db_summary_client import DBSummaryClient
|
from pilot.summary.db_summary_client import DBSummaryClient
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
@ -12,10 +12,17 @@ CFG = Config()
|
|||||||
class ChatWithDbQA(BaseChat):
|
class ChatWithDbQA(BaseChat):
|
||||||
chat_scene: str = ChatScene.ChatWithDbQA.value()
|
chat_scene: str = ChatScene.ChatWithDbQA.value()
|
||||||
|
|
||||||
"""Number of results to return from the query"""
|
"""As a DBA, Chat DB Module, chat with combine DB meta schema """
|
||||||
|
|
||||||
def __init__(self, chat_param: Dict):
|
def __init__(self, chat_param: Dict):
|
||||||
""" """
|
"""Chat DB Module Initialization
|
||||||
|
Args:
|
||||||
|
- chat_param: Dict
|
||||||
|
- chat_session_id: (str) chat session_id
|
||||||
|
- current_user_input: (str) current user input
|
||||||
|
- model_name:(str) llm model name
|
||||||
|
- select_param:(str) dbname
|
||||||
|
"""
|
||||||
self.db_name = chat_param["select_param"]
|
self.db_name = chat_param["select_param"]
|
||||||
chat_param["chat_mode"] = ChatScene.ChatWithDbQA
|
chat_param["chat_mode"] = ChatScene.ChatWithDbQA
|
||||||
super().__init__(chat_param=chat_param)
|
super().__init__(chat_param=chat_param)
|
||||||
|
@ -11,11 +11,21 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class ChatWithPlugin(BaseChat):
|
class ChatWithPlugin(BaseChat):
|
||||||
|
"""Chat With Plugin"""
|
||||||
|
|
||||||
chat_scene: str = ChatScene.ChatExecution.value()
|
chat_scene: str = ChatScene.ChatExecution.value()
|
||||||
plugins_prompt_generator: PluginPromptGenerator
|
plugins_prompt_generator: PluginPromptGenerator
|
||||||
select_plugin: str = None
|
select_plugin: str = None
|
||||||
|
|
||||||
def __init__(self, chat_param: Dict):
|
def __init__(self, chat_param: Dict):
|
||||||
|
"""Chat Dashboard Module Initialization
|
||||||
|
Args:
|
||||||
|
- chat_param: Dict
|
||||||
|
- chat_session_id: (str) chat session_id
|
||||||
|
- current_user_input: (str) current user input
|
||||||
|
- model_name:(str) llm model name
|
||||||
|
- select_param:(str) plugin selector
|
||||||
|
"""
|
||||||
self.plugin_selector = chat_param["select_param"]
|
self.plugin_selector = chat_param["select_param"]
|
||||||
chat_param["chat_mode"] = ChatScene.ChatExecution
|
chat_param["chat_mode"] = ChatScene.ChatExecution
|
||||||
super().__init__(chat_param=chat_param)
|
super().__init__(chat_param=chat_param)
|
||||||
|
@ -19,10 +19,17 @@ CFG = Config()
|
|||||||
class ChatKnowledge(BaseChat):
|
class ChatKnowledge(BaseChat):
|
||||||
chat_scene: str = ChatScene.ChatKnowledge.value()
|
chat_scene: str = ChatScene.ChatKnowledge.value()
|
||||||
|
|
||||||
"""Number of results to return from the query"""
|
"""KBQA Chat Module"""
|
||||||
|
|
||||||
def __init__(self, chat_param: Dict):
|
def __init__(self, chat_param: Dict):
|
||||||
""" """
|
"""Chat Knowledge Module Initialization
|
||||||
|
Args:
|
||||||
|
- chat_param: Dict
|
||||||
|
- chat_session_id: (str) chat session_id
|
||||||
|
- current_user_input: (str) current user input
|
||||||
|
- model_name:(str) llm model name
|
||||||
|
- select_param:(str) space name
|
||||||
|
"""
|
||||||
from pilot.embedding_engine.embedding_engine import EmbeddingEngine
|
from pilot.embedding_engine.embedding_engine import EmbeddingEngine
|
||||||
from pilot.embedding_engine.embedding_factory import EmbeddingFactory
|
from pilot.embedding_engine.embedding_factory import EmbeddingFactory
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user