WEB API independent

This commit is contained in:
tuyang.yhj 2023-07-04 11:01:25 +08:00
parent 575ed36534
commit 5712b71c0f
25 changed files with 128 additions and 61 deletions

View File

@ -0,0 +1,44 @@
from enum import Enum
from typing import List
class Test(Enum):
XXX = ("x", "1", True)
YYY =("Y", "2", False)
ZZZ = ("Z", "3")
def __init__(self, code, v, flag= False):
self.code = code
self.v = v
self.flag = flag
class Scene:
def __init__(self, code, name, describe, param_types:List=[], is_inner: bool = False):
self.code = code
self.name = name
self.describe = describe
self.param_types = param_types
self.is_inner = is_inner
class ChatScene(Enum):
ChatWithDbExecute = Scene("chat_with_db_execute", "Chat Data", "Dialogue with your private data through natural language.", ["DB Select"])
ChatWithDbQA = Scene("chat_with_db_qa", "Chat Meta Data", "Have a Professional Conversation with Metadata.", ["DB Select"])
ChatExecution = Scene("chat_execution", "Chat Plugin", "Use tools through dialogue to accomplish your goals.", ["Plugin Select"])
ChatDefaultKnowledge = Scene("chat_default_knowledge", "Chat Default Knowledge", "Dialogue through natural language and private documents and knowledge bases.")
ChatNewKnowledge = Scene("chat_new_knowledge", "Chat New Knowledge", "Dialogue through natural language and private documents and knowledge bases.", ["Knowledge Select"])
ChatUrlKnowledge = Scene("chat_url_knowledge", "Chat URL", "Dialogue through natural language and private documents and knowledge bases.", ["Url Input"])
InnerChatDBSummary = Scene("inner_chat_db_summary", "DB Summary", "Db Summary.", True)
ChatNormal = Scene("chat_normal", "Chat Normal", "Native LLM large model AI dialogue.")
ChatDashboard = Scene("chat_dashboard", "Chat Dashboard", "Provide you with professional analysis reports through natural language.", ["DB Select"])
ChatKnowledge = Scene("chat_knowledge", "Chat Knowledge", "Dialogue through natural language and private documents and knowledge bases.", ["Knowledge Space Select"])
def scene_value(self):
return self.value.code;
def scene_name(self):
return self._value_.name;
if __name__ == "__main__":
print(ChatScene.ChatWithDbExecute.scene_value())
# print(ChatScene.ChatWithDbExecute.value.describe)

View File

@ -66,7 +66,7 @@ def __get_conv_user_message(conversations: dict):
def __new_conversation(chat_mode, user_id) -> ConversationVo:
unique_id = uuid.uuid1()
history_mem = DuckdbHistoryMemory(str(unique_id))
# history_mem = DuckdbHistoryMemory(str(unique_id))
return ConversationVo(conv_uid=str(unique_id), chat_mode=chat_mode)
@ -132,38 +132,36 @@ async def dialogue_scenes():
ChatScene.ChatExecution,
]
for scene in new_modes:
if not scene.value in [
ChatScene.ChatNormal.value,
ChatScene.InnerChatDBSummary.value,
]:
scene_vo = ChatSceneVo(
chat_scene=scene.value,
scene_name=scene.name,
param_title="Selection Param",
)
scene_vos.append(scene_vo)
scene_vo = ChatSceneVo(
chat_scene=scene.value(),
scene_name=scene.scene_name(),
scene_describe= scene.describe(),
param_title=",".join(scene.param_types()),
)
scene_vos.append(scene_vo)
return Result.succ(scene_vos)
@router.post("/v1/chat/dialogue/new", response_model=Result[ConversationVo])
async def dialogue_new(
chat_mode: str = ChatScene.ChatNormal.value, user_id: str = None
chat_mode: str = ChatScene.ChatNormal.value(), user_id: str = None
):
conv_vo = __new_conversation(chat_mode, user_id)
return Result.succ(conv_vo)
@router.post("/v1/chat/mode/params/list", response_model=Result[dict])
async def params_list(chat_mode: str = ChatScene.ChatNormal.value):
if ChatScene.ChatWithDbQA.value == chat_mode:
async def params_list(chat_mode: str = ChatScene.ChatNormal.value()):
if ChatScene.ChatWithDbQA.value() == chat_mode:
return Result.succ(get_db_list())
elif ChatScene.ChatWithDbExecute.value == chat_mode:
elif ChatScene.ChatWithDbExecute.value() == chat_mode:
return Result.succ(get_db_list())
elif ChatScene.ChatDashboard.value == chat_mode:
elif ChatScene.ChatDashboard.value() == chat_mode:
return Result.succ(get_db_list())
elif ChatScene.ChatExecution.value == chat_mode:
elif ChatScene.ChatExecution.value() == chat_mode:
return Result.succ(plugins_select_info())
elif ChatScene.ChatKnowledge.value == chat_mode:
elif ChatScene.ChatKnowledge.value() == chat_mode:
return Result.succ(knowledge_list())
else:
return Result.succ(None)
@ -196,7 +194,7 @@ async def dialogue_history_messages(con_uid: str):
async def chat_completions(dialogue: ConversationVo = Body()):
print(f"chat_completions:{dialogue.chat_mode},{dialogue.select_param}")
if not dialogue.chat_mode:
dialogue.chat_mode = ChatScene.ChatNormal.value
dialogue.chat_mode = ChatScene.ChatNormal.value()
if not dialogue.conv_uid:
conv_vo = __new_conversation(dialogue.chat_mode, dialogue.user_name)
dialogue.conv_uid = conv_vo.conv_uid
@ -217,17 +215,17 @@ async def chat_completions(dialogue: ConversationVo = Body()):
"user_input": dialogue.user_input,
}
if ChatScene.ChatWithDbQA.value == dialogue.chat_mode:
if ChatScene.ChatWithDbQA.value() == dialogue.chat_mode:
chat_param.update({"db_name": dialogue.select_param})
elif ChatScene.ChatWithDbExecute.value == dialogue.chat_mode:
elif ChatScene.ChatWithDbExecute.value() == dialogue.chat_mode:
chat_param.update({"db_name": dialogue.select_param})
elif ChatScene.ChatDashboard.value == dialogue.chat_mode:
elif ChatScene.ChatDashboard.value() == dialogue.chat_mode:
chat_param.update({"db_name": dialogue.select_param})
## DEFAULT
chat_param.update({"report_name": "sales_report"})
elif ChatScene.ChatExecution.value == dialogue.chat_mode:
elif ChatScene.ChatExecution.value() == dialogue.chat_mode:
chat_param.update({"plugin_selector": dialogue.select_param})
elif ChatScene.ChatKnowledge.value == dialogue.chat_mode:
elif ChatScene.ChatKnowledge.value() == dialogue.chat_mode:
chat_param.update({"knowledge_space": dialogue.select_param})
chat: BaseChat = CHAT_FACTORY.get_implementation(dialogue.chat_mode, **chat_param)

View File

@ -26,7 +26,8 @@ class Result(Generic[T], BaseModel):
class ChatSceneVo(BaseModel):
chat_scene: str = Field(..., description="chat_scene")
scene_name: str = Field(..., description="chat_scene name show for user")
param_title: str = Field(..., description="chat_scene required parameter title")
scene_describe: str = Field("", description="chat_scene describe ")
param_title: str = Field("", description="chat_scene required parameter title")
class ConversationVo(BaseModel):

View File

@ -1,28 +1,52 @@
from enum import Enum
from typing import List
class Scene:
def __init__(self, code, describe, is_inner):
def __init__(self, code, name, describe, param_types: List = [], is_inner: bool = False):
self.code = code
self.name = name
self.describe = describe
self.param_types = param_types
self.is_inner = is_inner
class ChatScene(Enum):
ChatWithDbExecute = "chat_with_db_execute"
ChatWithDbQA = "chat_with_db_qa"
ChatExecution = "chat_execution"
ChatDefaultKnowledge = "chat_default_knowledge"
ChatNewKnowledge = "chat_new_knowledge"
ChatUrlKnowledge = "chat_url_knowledge"
InnerChatDBSummary = "inner_chat_db_summary"
ChatWithDbExecute = Scene("chat_with_db_execute", "Chat Data",
"Dialogue with your private data through natural language.", ["DB Select"])
ChatWithDbQA = Scene("chat_with_db_qa", "Chat Meta Data", "Have a Professional Conversation with Metadata.",
["DB Select"])
ChatExecution = Scene("chat_execution", "Chat Plugin", "Use tools through dialogue to accomplish your goals.",
["Plugin Select"])
ChatDefaultKnowledge = Scene("chat_default_knowledge", "Chat Default Knowledge",
"Dialogue through natural language and private documents and knowledge bases.")
ChatNewKnowledge = Scene("chat_new_knowledge", "Chat New Knowledge",
"Dialogue through natural language and private documents and knowledge bases.",
["Knowledge Select"])
ChatUrlKnowledge = Scene("chat_url_knowledge", "Chat URL",
"Dialogue through natural language and private documents and knowledge bases.",
["Url Input"])
InnerChatDBSummary = Scene("inner_chat_db_summary", "DB Summary", "Db Summary.", True)
ChatNormal = "chat_normal"
ChatDashboard = "chat_dashboard"
ChatKnowledge = "chat_knowledge"
# ChatDb = "chat_db"
# ChatData= "chat_data"
ChatNormal = Scene("chat_normal", "Chat Normal", "Native LLM large model AI dialogue.")
ChatDashboard = Scene("chat_dashboard", "Chat Dashboard",
"Provide you with professional analysis reports through natural language.", ["DB Select"])
ChatKnowledge = Scene("chat_knowledge", "Chat Knowledge",
"Dialogue through natural language and private documents and knowledge bases.",
["Knowledge Space Select"])
@staticmethod
def is_valid_mode(mode):
return any(mode == item.value for item in ChatScene)
return any(mode == item.value() for item in ChatScene)
def value(self):
return self._value_.code;
def scene_name(self):
return self._value_.name;
def describe(self):
return self._value_.describe;
def param_types(self):
return self._value_.param_types

View File

@ -73,10 +73,10 @@ class BaseChat(ABC):
### load prompt template
self.prompt_template: PromptTemplate = CFG.prompt_templates[
self.chat_mode.value
self.chat_mode.value()
]
self.history_message: List[OnceConversation] = self.memory.messages()
self.current_message: OnceConversation = OnceConversation(chat_mode.value)
self.current_message: OnceConversation = OnceConversation(chat_mode.value())
self.current_tokens_used: int = 0
class Config:

View File

@ -23,7 +23,7 @@ CFG = Config()
class ChatDashboard(BaseChat):
chat_scene: str = ChatScene.ChatDashboard.value
chat_scene: str = ChatScene.ChatDashboard.value()
report_name: str
"""Number of results to return from the query"""

View File

@ -40,7 +40,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_NEED_STREAM_OUT = False
prompt = PromptTemplate(
template_scene=ChatScene.ChatDashboard.value,
template_scene=ChatScene.ChatDashboard.value(),
input_variables=["input", "table_info", "dialect", "supported_chat_type"],
response_format=json.dumps(RESPONSE_FORMAT, indent=4),
template_define=PROMPT_SCENE_DEFINE,

View File

@ -17,7 +17,7 @@ CFG = Config()
class ChatWithDbAutoExecute(BaseChat):
chat_scene: str = ChatScene.ChatWithDbExecute.value
chat_scene: str = ChatScene.ChatWithDbExecute.value()
"""Number of results to return from the query"""

View File

@ -37,7 +37,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_NEED_STREAM_OUT = False
prompt = PromptTemplate(
template_scene=ChatScene.ChatWithDbExecute.value,
template_scene=ChatScene.ChatWithDbExecute.value(),
input_variables=["input", "table_info", "dialect", "top_k", "response"],
response_format=json.dumps(RESPONSE_FORMAT_SIMPLE, indent=4),
template_define=PROMPT_SCENE_DEFINE,

View File

@ -15,7 +15,7 @@ CFG = Config()
class ChatWithDbQA(BaseChat):
chat_scene: str = ChatScene.ChatWithDbQA.value
chat_scene: str = ChatScene.ChatWithDbQA.value()
"""Number of results to return from the query"""

View File

@ -59,7 +59,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_NEED_STREAM_OUT = True
prompt = PromptTemplate(
template_scene=ChatScene.ChatWithDbQA.value,
template_scene=ChatScene.ChatWithDbQA.value(),
input_variables=["input", "table_info"],
response_format=None,
template_define=PROMPT_SCENE_DEFINE,

View File

@ -16,7 +16,7 @@ CFG = Config()
class ChatWithPlugin(BaseChat):
chat_scene: str = ChatScene.ChatExecution.value
chat_scene: str = ChatScene.ChatExecution.value()
plugins_prompt_generator: PluginPromptGenerator
select_plugin: str = None

View File

@ -40,7 +40,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_STREAM_OUT = False
prompt = PromptTemplate(
template_scene=ChatScene.ChatExecution.value,
template_scene=ChatScene.ChatExecution.value(),
input_variables=["input", "constraints", "commands_infos", "response"],
response_format=json.dumps(RESPONSE_FORMAT, indent=4),
template_define=PROMPT_SCENE_DEFINE,

View File

@ -23,7 +23,7 @@ CFG = Config()
class ChatNewKnowledge(BaseChat):
chat_scene: str = ChatScene.ChatNewKnowledge.value
chat_scene: str = ChatScene.ChatNewKnowledge.value()
"""Number of results to return from the query"""

View File

@ -38,7 +38,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_NEED_STREAM_OUT = True
prompt = PromptTemplate(
template_scene=ChatScene.ChatNewKnowledge.value,
template_scene=ChatScene.ChatNewKnowledge.value(),
input_variables=["context", "question"],
response_format=None,
template_define=PROMPT_SCENE_DEFINE,

View File

@ -25,7 +25,7 @@ CFG = Config()
class ChatDefaultKnowledge(BaseChat):
chat_scene: str = ChatScene.ChatDefaultKnowledge.value
chat_scene: str = ChatScene.ChatDefaultKnowledge.value()
"""Number of results to return from the query"""

View File

@ -39,7 +39,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_NEED_STREAM_OUT = True
prompt = PromptTemplate(
template_scene=ChatScene.ChatDefaultKnowledge.value,
template_scene=ChatScene.ChatDefaultKnowledge.value(),
input_variables=["context", "question"],
response_format=None,
template_define=PROMPT_SCENE_DEFINE,

View File

@ -8,7 +8,7 @@ CFG = Config()
class InnerChatDBSummary(BaseChat):
chat_scene: str = ChatScene.InnerChatDBSummary.value
chat_scene: str = ChatScene.InnerChatDBSummary.value()
"""Number of results to return from the query"""

View File

@ -38,7 +38,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_NEED_STREAM_OUT = False
prompt = PromptTemplate(
template_scene=ChatScene.InnerChatDBSummary.value,
template_scene=ChatScene.InnerChatDBSummary.value(),
input_variables=["db_profile_summary", "db_input", "response"],
response_format=json.dumps(RESPONSE_FORMAT, indent=4),
template_define=PROMPT_SCENE_DEFINE,

View File

@ -24,7 +24,7 @@ CFG = Config()
class ChatUrlKnowledge(BaseChat):
chat_scene: str = ChatScene.ChatUrlKnowledge.value
chat_scene: str = ChatScene.ChatUrlKnowledge.value()
"""Number of results to return from the query"""

View File

@ -38,7 +38,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_NEED_STREAM_OUT = True
prompt = PromptTemplate(
template_scene=ChatScene.ChatUrlKnowledge.value,
template_scene=ChatScene.ChatUrlKnowledge.value(),
input_variables=["context", "question"],
response_format=None,
template_define=PROMPT_SCENE_DEFINE,

View File

@ -61,4 +61,4 @@ class ChatKnowledge(BaseChat):
@property
def chat_type(self) -> str:
return ChatScene.ChatKnowledge.value
return ChatScene.ChatKnowledge.value()

View File

@ -39,7 +39,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_NEED_STREAM_OUT = True
prompt = PromptTemplate(
template_scene=ChatScene.ChatKnowledge.value,
template_scene=ChatScene.ChatKnowledge.value(),
input_variables=["context", "question"],
response_format=None,
template_define=PROMPT_SCENE_DEFINE,

View File

@ -14,7 +14,7 @@ CFG = Config()
class ChatNormal(BaseChat):
chat_scene: str = ChatScene.ChatNormal.value
chat_scene: str = ChatScene.ChatNormal.value()
"""Number of results to return from the query"""

View File

@ -17,7 +17,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_NEED_STREAM_OUT = True
prompt = PromptTemplate(
template_scene=ChatScene.ChatNormal.value,
template_scene=ChatScene.ChatNormal.value(),
input_variables=["input"],
response_format=None,
template_define=PROMPT_SCENE_DEFINE,