diff --git a/pilot/connections/rdbms/py_study/study_enum.py b/pilot/connections/rdbms/py_study/study_enum.py new file mode 100644 index 000000000..21ae1d1a0 --- /dev/null +++ b/pilot/connections/rdbms/py_study/study_enum.py @@ -0,0 +1,44 @@ +from enum import Enum +from typing import List + +class Test(Enum): + XXX = ("x", "1", True) + YYY =("Y", "2", False) + ZZZ = ("Z", "3") + def __init__(self, code, v, flag= False): + self.code = code + self.v = v + self.flag = flag + +class Scene: + def __init__(self, code, name, describe, param_types:List=[], is_inner: bool = False): + self.code = code + self.name = name + self.describe = describe + self.param_types = param_types + self.is_inner = is_inner + + +class ChatScene(Enum): + + ChatWithDbExecute = Scene("chat_with_db_execute", "Chat Data", "Dialogue with your private data through natural language.", ["DB Select"]) + ChatWithDbQA = Scene("chat_with_db_qa", "Chat Meta Data", "Have a Professional Conversation with Metadata.", ["DB Select"]) + ChatExecution = Scene("chat_execution", "Chat Plugin", "Use tools through dialogue to accomplish your goals.", ["Plugin Select"]) + ChatDefaultKnowledge = Scene("chat_default_knowledge", "Chat Default Knowledge", "Dialogue through natural language and private documents and knowledge bases.") + ChatNewKnowledge = Scene("chat_new_knowledge", "Chat New Knowledge", "Dialogue through natural language and private documents and knowledge bases.", ["Knowledge Select"]) + ChatUrlKnowledge = Scene("chat_url_knowledge", "Chat URL", "Dialogue through natural language and private documents and knowledge bases.", ["Url Input"]) + InnerChatDBSummary = Scene("inner_chat_db_summary", "DB Summary", "Db Summary.", True) + + ChatNormal = Scene("chat_normal", "Chat Normal", "Native LLM large model AI dialogue.") + ChatDashboard = Scene("chat_dashboard", "Chat Dashboard", "Provide you with professional analysis reports through natural language.", ["DB Select"]) + ChatKnowledge = Scene("chat_knowledge", "Chat Knowledge", "Dialogue through natural language and private documents and knowledge bases.", ["Knowledge Space Select"]) + + def scene_value(self): + return self.value.code; + + def scene_name(self): + return self._value_.name; + +if __name__ == "__main__": + print(ChatScene.ChatWithDbExecute.scene_value()) + # print(ChatScene.ChatWithDbExecute.value.describe) \ No newline at end of file diff --git a/pilot/openapi/api_v1/api_v1.py b/pilot/openapi/api_v1/api_v1.py index 6ef4e5395..2d9eaf6e1 100644 --- a/pilot/openapi/api_v1/api_v1.py +++ b/pilot/openapi/api_v1/api_v1.py @@ -66,7 +66,7 @@ def __get_conv_user_message(conversations: dict): def __new_conversation(chat_mode, user_id) -> ConversationVo: unique_id = uuid.uuid1() - history_mem = DuckdbHistoryMemory(str(unique_id)) + # history_mem = DuckdbHistoryMemory(str(unique_id)) return ConversationVo(conv_uid=str(unique_id), chat_mode=chat_mode) @@ -132,38 +132,36 @@ async def dialogue_scenes(): ChatScene.ChatExecution, ] for scene in new_modes: - if not scene.value in [ - ChatScene.ChatNormal.value, - ChatScene.InnerChatDBSummary.value, - ]: - scene_vo = ChatSceneVo( - chat_scene=scene.value, - scene_name=scene.name, - param_title="Selection Param", - ) - scene_vos.append(scene_vo) + + scene_vo = ChatSceneVo( + chat_scene=scene.value(), + scene_name=scene.scene_name(), + scene_describe= scene.describe(), + param_title=",".join(scene.param_types()), + ) + scene_vos.append(scene_vo) return Result.succ(scene_vos) @router.post("/v1/chat/dialogue/new", response_model=Result[ConversationVo]) async def dialogue_new( - chat_mode: str = ChatScene.ChatNormal.value, user_id: str = None + chat_mode: str = ChatScene.ChatNormal.value(), user_id: str = None ): conv_vo = __new_conversation(chat_mode, user_id) return Result.succ(conv_vo) @router.post("/v1/chat/mode/params/list", response_model=Result[dict]) -async def params_list(chat_mode: str = ChatScene.ChatNormal.value): - if ChatScene.ChatWithDbQA.value == chat_mode: +async def params_list(chat_mode: str = ChatScene.ChatNormal.value()): + if ChatScene.ChatWithDbQA.value() == chat_mode: return Result.succ(get_db_list()) - elif ChatScene.ChatWithDbExecute.value == chat_mode: + elif ChatScene.ChatWithDbExecute.value() == chat_mode: return Result.succ(get_db_list()) - elif ChatScene.ChatDashboard.value == chat_mode: + elif ChatScene.ChatDashboard.value() == chat_mode: return Result.succ(get_db_list()) - elif ChatScene.ChatExecution.value == chat_mode: + elif ChatScene.ChatExecution.value() == chat_mode: return Result.succ(plugins_select_info()) - elif ChatScene.ChatKnowledge.value == chat_mode: + elif ChatScene.ChatKnowledge.value() == chat_mode: return Result.succ(knowledge_list()) else: return Result.succ(None) @@ -196,7 +194,7 @@ async def dialogue_history_messages(con_uid: str): async def chat_completions(dialogue: ConversationVo = Body()): print(f"chat_completions:{dialogue.chat_mode},{dialogue.select_param}") if not dialogue.chat_mode: - dialogue.chat_mode = ChatScene.ChatNormal.value + dialogue.chat_mode = ChatScene.ChatNormal.value() if not dialogue.conv_uid: conv_vo = __new_conversation(dialogue.chat_mode, dialogue.user_name) dialogue.conv_uid = conv_vo.conv_uid @@ -217,17 +215,17 @@ async def chat_completions(dialogue: ConversationVo = Body()): "user_input": dialogue.user_input, } - if ChatScene.ChatWithDbQA.value == dialogue.chat_mode: + if ChatScene.ChatWithDbQA.value() == dialogue.chat_mode: chat_param.update({"db_name": dialogue.select_param}) - elif ChatScene.ChatWithDbExecute.value == dialogue.chat_mode: + elif ChatScene.ChatWithDbExecute.value() == dialogue.chat_mode: chat_param.update({"db_name": dialogue.select_param}) - elif ChatScene.ChatDashboard.value == dialogue.chat_mode: + elif ChatScene.ChatDashboard.value() == dialogue.chat_mode: chat_param.update({"db_name": dialogue.select_param}) ## DEFAULT chat_param.update({"report_name": "sales_report"}) - elif ChatScene.ChatExecution.value == dialogue.chat_mode: + elif ChatScene.ChatExecution.value() == dialogue.chat_mode: chat_param.update({"plugin_selector": dialogue.select_param}) - elif ChatScene.ChatKnowledge.value == dialogue.chat_mode: + elif ChatScene.ChatKnowledge.value() == dialogue.chat_mode: chat_param.update({"knowledge_space": dialogue.select_param}) chat: BaseChat = CHAT_FACTORY.get_implementation(dialogue.chat_mode, **chat_param) diff --git a/pilot/openapi/api_v1/api_view_model.py b/pilot/openapi/api_v1/api_view_model.py index 7d3bad504..f504e1e76 100644 --- a/pilot/openapi/api_v1/api_view_model.py +++ b/pilot/openapi/api_v1/api_view_model.py @@ -26,7 +26,8 @@ class Result(Generic[T], BaseModel): class ChatSceneVo(BaseModel): chat_scene: str = Field(..., description="chat_scene") scene_name: str = Field(..., description="chat_scene name show for user") - param_title: str = Field(..., description="chat_scene required parameter title") + scene_describe: str = Field("", description="chat_scene describe ") + param_title: str = Field("", description="chat_scene required parameter title") class ConversationVo(BaseModel): diff --git a/pilot/scene/base.py b/pilot/scene/base.py index 7c7d483f9..2684855c9 100644 --- a/pilot/scene/base.py +++ b/pilot/scene/base.py @@ -1,28 +1,52 @@ from enum import Enum +from typing import List class Scene: - def __init__(self, code, describe, is_inner): + def __init__(self, code, name, describe, param_types: List = [], is_inner: bool = False): self.code = code + self.name = name self.describe = describe + self.param_types = param_types self.is_inner = is_inner class ChatScene(Enum): - ChatWithDbExecute = "chat_with_db_execute" - ChatWithDbQA = "chat_with_db_qa" - ChatExecution = "chat_execution" - ChatDefaultKnowledge = "chat_default_knowledge" - ChatNewKnowledge = "chat_new_knowledge" - ChatUrlKnowledge = "chat_url_knowledge" - InnerChatDBSummary = "inner_chat_db_summary" + ChatWithDbExecute = Scene("chat_with_db_execute", "Chat Data", + "Dialogue with your private data through natural language.", ["DB Select"]) + ChatWithDbQA = Scene("chat_with_db_qa", "Chat Meta Data", "Have a Professional Conversation with Metadata.", + ["DB Select"]) + ChatExecution = Scene("chat_execution", "Chat Plugin", "Use tools through dialogue to accomplish your goals.", + ["Plugin Select"]) + ChatDefaultKnowledge = Scene("chat_default_knowledge", "Chat Default Knowledge", + "Dialogue through natural language and private documents and knowledge bases.") + ChatNewKnowledge = Scene("chat_new_knowledge", "Chat New Knowledge", + "Dialogue through natural language and private documents and knowledge bases.", + ["Knowledge Select"]) + ChatUrlKnowledge = Scene("chat_url_knowledge", "Chat URL", + "Dialogue through natural language and private documents and knowledge bases.", + ["Url Input"]) + InnerChatDBSummary = Scene("inner_chat_db_summary", "DB Summary", "Db Summary.", True) - ChatNormal = "chat_normal" - ChatDashboard = "chat_dashboard" - ChatKnowledge = "chat_knowledge" - # ChatDb = "chat_db" - # ChatData= "chat_data" + ChatNormal = Scene("chat_normal", "Chat Normal", "Native LLM large model AI dialogue.") + ChatDashboard = Scene("chat_dashboard", "Chat Dashboard", + "Provide you with professional analysis reports through natural language.", ["DB Select"]) + ChatKnowledge = Scene("chat_knowledge", "Chat Knowledge", + "Dialogue through natural language and private documents and knowledge bases.", + ["Knowledge Space Select"]) @staticmethod def is_valid_mode(mode): - return any(mode == item.value for item in ChatScene) + return any(mode == item.value() for item in ChatScene) + + def value(self): + return self._value_.code; + + def scene_name(self): + return self._value_.name; + + def describe(self): + return self._value_.describe; + + def param_types(self): + return self._value_.param_types diff --git a/pilot/scene/base_chat.py b/pilot/scene/base_chat.py index 01586a701..33553cb30 100644 --- a/pilot/scene/base_chat.py +++ b/pilot/scene/base_chat.py @@ -73,10 +73,10 @@ class BaseChat(ABC): ### load prompt template self.prompt_template: PromptTemplate = CFG.prompt_templates[ - self.chat_mode.value + self.chat_mode.value() ] self.history_message: List[OnceConversation] = self.memory.messages() - self.current_message: OnceConversation = OnceConversation(chat_mode.value) + self.current_message: OnceConversation = OnceConversation(chat_mode.value()) self.current_tokens_used: int = 0 class Config: diff --git a/pilot/scene/chat_dashboard/chat.py b/pilot/scene/chat_dashboard/chat.py index 84a87c5b6..366edd56a 100644 --- a/pilot/scene/chat_dashboard/chat.py +++ b/pilot/scene/chat_dashboard/chat.py @@ -23,7 +23,7 @@ CFG = Config() class ChatDashboard(BaseChat): - chat_scene: str = ChatScene.ChatDashboard.value + chat_scene: str = ChatScene.ChatDashboard.value() report_name: str """Number of results to return from the query""" diff --git a/pilot/scene/chat_dashboard/prompt.py b/pilot/scene/chat_dashboard/prompt.py index 0053e4a51..7a88ab0a5 100644 --- a/pilot/scene/chat_dashboard/prompt.py +++ b/pilot/scene/chat_dashboard/prompt.py @@ -40,7 +40,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value PROMPT_NEED_NEED_STREAM_OUT = False prompt = PromptTemplate( - template_scene=ChatScene.ChatDashboard.value, + template_scene=ChatScene.ChatDashboard.value(), input_variables=["input", "table_info", "dialect", "supported_chat_type"], response_format=json.dumps(RESPONSE_FORMAT, indent=4), template_define=PROMPT_SCENE_DEFINE, diff --git a/pilot/scene/chat_db/auto_execute/chat.py b/pilot/scene/chat_db/auto_execute/chat.py index 5b5998024..9f5f919a2 100644 --- a/pilot/scene/chat_db/auto_execute/chat.py +++ b/pilot/scene/chat_db/auto_execute/chat.py @@ -17,7 +17,7 @@ CFG = Config() class ChatWithDbAutoExecute(BaseChat): - chat_scene: str = ChatScene.ChatWithDbExecute.value + chat_scene: str = ChatScene.ChatWithDbExecute.value() """Number of results to return from the query""" diff --git a/pilot/scene/chat_db/auto_execute/prompt.py b/pilot/scene/chat_db/auto_execute/prompt.py index 80a25cc42..5b7174638 100644 --- a/pilot/scene/chat_db/auto_execute/prompt.py +++ b/pilot/scene/chat_db/auto_execute/prompt.py @@ -37,7 +37,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value PROMPT_NEED_NEED_STREAM_OUT = False prompt = PromptTemplate( - template_scene=ChatScene.ChatWithDbExecute.value, + template_scene=ChatScene.ChatWithDbExecute.value(), input_variables=["input", "table_info", "dialect", "top_k", "response"], response_format=json.dumps(RESPONSE_FORMAT_SIMPLE, indent=4), template_define=PROMPT_SCENE_DEFINE, diff --git a/pilot/scene/chat_db/professional_qa/chat.py b/pilot/scene/chat_db/professional_qa/chat.py index 3c8d0eb37..c4674d7a7 100644 --- a/pilot/scene/chat_db/professional_qa/chat.py +++ b/pilot/scene/chat_db/professional_qa/chat.py @@ -15,7 +15,7 @@ CFG = Config() class ChatWithDbQA(BaseChat): - chat_scene: str = ChatScene.ChatWithDbQA.value + chat_scene: str = ChatScene.ChatWithDbQA.value() """Number of results to return from the query""" diff --git a/pilot/scene/chat_db/professional_qa/prompt.py b/pilot/scene/chat_db/professional_qa/prompt.py index c6787be7b..f33b0971f 100644 --- a/pilot/scene/chat_db/professional_qa/prompt.py +++ b/pilot/scene/chat_db/professional_qa/prompt.py @@ -59,7 +59,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value PROMPT_NEED_NEED_STREAM_OUT = True prompt = PromptTemplate( - template_scene=ChatScene.ChatWithDbQA.value, + template_scene=ChatScene.ChatWithDbQA.value(), input_variables=["input", "table_info"], response_format=None, template_define=PROMPT_SCENE_DEFINE, diff --git a/pilot/scene/chat_execution/chat.py b/pilot/scene/chat_execution/chat.py index 1c3735ac3..da370bf18 100644 --- a/pilot/scene/chat_execution/chat.py +++ b/pilot/scene/chat_execution/chat.py @@ -16,7 +16,7 @@ CFG = Config() class ChatWithPlugin(BaseChat): - chat_scene: str = ChatScene.ChatExecution.value + chat_scene: str = ChatScene.ChatExecution.value() plugins_prompt_generator: PluginPromptGenerator select_plugin: str = None diff --git a/pilot/scene/chat_execution/prompt.py b/pilot/scene/chat_execution/prompt.py index af5087609..a4d83459f 100644 --- a/pilot/scene/chat_execution/prompt.py +++ b/pilot/scene/chat_execution/prompt.py @@ -40,7 +40,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value PROMPT_NEED_STREAM_OUT = False prompt = PromptTemplate( - template_scene=ChatScene.ChatExecution.value, + template_scene=ChatScene.ChatExecution.value(), input_variables=["input", "constraints", "commands_infos", "response"], response_format=json.dumps(RESPONSE_FORMAT, indent=4), template_define=PROMPT_SCENE_DEFINE, diff --git a/pilot/scene/chat_knowledge/custom/chat.py b/pilot/scene/chat_knowledge/custom/chat.py index 9a67b4d75..2c3992542 100644 --- a/pilot/scene/chat_knowledge/custom/chat.py +++ b/pilot/scene/chat_knowledge/custom/chat.py @@ -23,7 +23,7 @@ CFG = Config() class ChatNewKnowledge(BaseChat): - chat_scene: str = ChatScene.ChatNewKnowledge.value + chat_scene: str = ChatScene.ChatNewKnowledge.value() """Number of results to return from the query""" diff --git a/pilot/scene/chat_knowledge/custom/prompt.py b/pilot/scene/chat_knowledge/custom/prompt.py index 4892e28cd..2b74add93 100644 --- a/pilot/scene/chat_knowledge/custom/prompt.py +++ b/pilot/scene/chat_knowledge/custom/prompt.py @@ -38,7 +38,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value PROMPT_NEED_NEED_STREAM_OUT = True prompt = PromptTemplate( - template_scene=ChatScene.ChatNewKnowledge.value, + template_scene=ChatScene.ChatNewKnowledge.value(), input_variables=["context", "question"], response_format=None, template_define=PROMPT_SCENE_DEFINE, diff --git a/pilot/scene/chat_knowledge/default/chat.py b/pilot/scene/chat_knowledge/default/chat.py index 51587a048..972b7f88c 100644 --- a/pilot/scene/chat_knowledge/default/chat.py +++ b/pilot/scene/chat_knowledge/default/chat.py @@ -25,7 +25,7 @@ CFG = Config() class ChatDefaultKnowledge(BaseChat): - chat_scene: str = ChatScene.ChatDefaultKnowledge.value + chat_scene: str = ChatScene.ChatDefaultKnowledge.value() """Number of results to return from the query""" diff --git a/pilot/scene/chat_knowledge/default/prompt.py b/pilot/scene/chat_knowledge/default/prompt.py index 760686366..5940295a6 100644 --- a/pilot/scene/chat_knowledge/default/prompt.py +++ b/pilot/scene/chat_knowledge/default/prompt.py @@ -39,7 +39,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value PROMPT_NEED_NEED_STREAM_OUT = True prompt = PromptTemplate( - template_scene=ChatScene.ChatDefaultKnowledge.value, + template_scene=ChatScene.ChatDefaultKnowledge.value(), input_variables=["context", "question"], response_format=None, template_define=PROMPT_SCENE_DEFINE, diff --git a/pilot/scene/chat_knowledge/inner_db_summary/chat.py b/pilot/scene/chat_knowledge/inner_db_summary/chat.py index c1e8d279a..4a952e6cc 100644 --- a/pilot/scene/chat_knowledge/inner_db_summary/chat.py +++ b/pilot/scene/chat_knowledge/inner_db_summary/chat.py @@ -8,7 +8,7 @@ CFG = Config() class InnerChatDBSummary(BaseChat): - chat_scene: str = ChatScene.InnerChatDBSummary.value + chat_scene: str = ChatScene.InnerChatDBSummary.value() """Number of results to return from the query""" diff --git a/pilot/scene/chat_knowledge/inner_db_summary/prompt.py b/pilot/scene/chat_knowledge/inner_db_summary/prompt.py index d2c6b1913..ea2407329 100644 --- a/pilot/scene/chat_knowledge/inner_db_summary/prompt.py +++ b/pilot/scene/chat_knowledge/inner_db_summary/prompt.py @@ -38,7 +38,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value PROMPT_NEED_NEED_STREAM_OUT = False prompt = PromptTemplate( - template_scene=ChatScene.InnerChatDBSummary.value, + template_scene=ChatScene.InnerChatDBSummary.value(), input_variables=["db_profile_summary", "db_input", "response"], response_format=json.dumps(RESPONSE_FORMAT, indent=4), template_define=PROMPT_SCENE_DEFINE, diff --git a/pilot/scene/chat_knowledge/url/chat.py b/pilot/scene/chat_knowledge/url/chat.py index aab4bdb9f..d27edf6bf 100644 --- a/pilot/scene/chat_knowledge/url/chat.py +++ b/pilot/scene/chat_knowledge/url/chat.py @@ -24,7 +24,7 @@ CFG = Config() class ChatUrlKnowledge(BaseChat): - chat_scene: str = ChatScene.ChatUrlKnowledge.value + chat_scene: str = ChatScene.ChatUrlKnowledge.value() """Number of results to return from the query""" diff --git a/pilot/scene/chat_knowledge/url/prompt.py b/pilot/scene/chat_knowledge/url/prompt.py index 3e9659130..2a65246ec 100644 --- a/pilot/scene/chat_knowledge/url/prompt.py +++ b/pilot/scene/chat_knowledge/url/prompt.py @@ -38,7 +38,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value PROMPT_NEED_NEED_STREAM_OUT = True prompt = PromptTemplate( - template_scene=ChatScene.ChatUrlKnowledge.value, + template_scene=ChatScene.ChatUrlKnowledge.value(), input_variables=["context", "question"], response_format=None, template_define=PROMPT_SCENE_DEFINE, diff --git a/pilot/scene/chat_knowledge/v1/chat.py b/pilot/scene/chat_knowledge/v1/chat.py index 1c4786725..e142a5245 100644 --- a/pilot/scene/chat_knowledge/v1/chat.py +++ b/pilot/scene/chat_knowledge/v1/chat.py @@ -61,4 +61,4 @@ class ChatKnowledge(BaseChat): @property def chat_type(self) -> str: - return ChatScene.ChatKnowledge.value + return ChatScene.ChatKnowledge.value() diff --git a/pilot/scene/chat_knowledge/v1/prompt.py b/pilot/scene/chat_knowledge/v1/prompt.py index 0fd9f9ff3..60a483b01 100644 --- a/pilot/scene/chat_knowledge/v1/prompt.py +++ b/pilot/scene/chat_knowledge/v1/prompt.py @@ -39,7 +39,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value PROMPT_NEED_NEED_STREAM_OUT = True prompt = PromptTemplate( - template_scene=ChatScene.ChatKnowledge.value, + template_scene=ChatScene.ChatKnowledge.value(), input_variables=["context", "question"], response_format=None, template_define=PROMPT_SCENE_DEFINE, diff --git a/pilot/scene/chat_normal/chat.py b/pilot/scene/chat_normal/chat.py index a183020c5..acc022419 100644 --- a/pilot/scene/chat_normal/chat.py +++ b/pilot/scene/chat_normal/chat.py @@ -14,7 +14,7 @@ CFG = Config() class ChatNormal(BaseChat): - chat_scene: str = ChatScene.ChatNormal.value + chat_scene: str = ChatScene.ChatNormal.value() """Number of results to return from the query""" diff --git a/pilot/scene/chat_normal/prompt.py b/pilot/scene/chat_normal/prompt.py index 1dc455255..9c4f39a2d 100644 --- a/pilot/scene/chat_normal/prompt.py +++ b/pilot/scene/chat_normal/prompt.py @@ -17,7 +17,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value PROMPT_NEED_NEED_STREAM_OUT = True prompt = PromptTemplate( - template_scene=ChatScene.ChatNormal.value, + template_scene=ChatScene.ChatNormal.value(), input_variables=["input"], response_format=None, template_define=PROMPT_SCENE_DEFINE,