mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-08-26 20:09:45 +00:00
WEB API independent
This commit is contained in:
parent
575ed36534
commit
5712b71c0f
44
pilot/connections/rdbms/py_study/study_enum.py
Normal file
44
pilot/connections/rdbms/py_study/study_enum.py
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
from enum import Enum
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
class Test(Enum):
|
||||||
|
XXX = ("x", "1", True)
|
||||||
|
YYY =("Y", "2", False)
|
||||||
|
ZZZ = ("Z", "3")
|
||||||
|
def __init__(self, code, v, flag= False):
|
||||||
|
self.code = code
|
||||||
|
self.v = v
|
||||||
|
self.flag = flag
|
||||||
|
|
||||||
|
class Scene:
|
||||||
|
def __init__(self, code, name, describe, param_types:List=[], is_inner: bool = False):
|
||||||
|
self.code = code
|
||||||
|
self.name = name
|
||||||
|
self.describe = describe
|
||||||
|
self.param_types = param_types
|
||||||
|
self.is_inner = is_inner
|
||||||
|
|
||||||
|
|
||||||
|
class ChatScene(Enum):
|
||||||
|
|
||||||
|
ChatWithDbExecute = Scene("chat_with_db_execute", "Chat Data", "Dialogue with your private data through natural language.", ["DB Select"])
|
||||||
|
ChatWithDbQA = Scene("chat_with_db_qa", "Chat Meta Data", "Have a Professional Conversation with Metadata.", ["DB Select"])
|
||||||
|
ChatExecution = Scene("chat_execution", "Chat Plugin", "Use tools through dialogue to accomplish your goals.", ["Plugin Select"])
|
||||||
|
ChatDefaultKnowledge = Scene("chat_default_knowledge", "Chat Default Knowledge", "Dialogue through natural language and private documents and knowledge bases.")
|
||||||
|
ChatNewKnowledge = Scene("chat_new_knowledge", "Chat New Knowledge", "Dialogue through natural language and private documents and knowledge bases.", ["Knowledge Select"])
|
||||||
|
ChatUrlKnowledge = Scene("chat_url_knowledge", "Chat URL", "Dialogue through natural language and private documents and knowledge bases.", ["Url Input"])
|
||||||
|
InnerChatDBSummary = Scene("inner_chat_db_summary", "DB Summary", "Db Summary.", True)
|
||||||
|
|
||||||
|
ChatNormal = Scene("chat_normal", "Chat Normal", "Native LLM large model AI dialogue.")
|
||||||
|
ChatDashboard = Scene("chat_dashboard", "Chat Dashboard", "Provide you with professional analysis reports through natural language.", ["DB Select"])
|
||||||
|
ChatKnowledge = Scene("chat_knowledge", "Chat Knowledge", "Dialogue through natural language and private documents and knowledge bases.", ["Knowledge Space Select"])
|
||||||
|
|
||||||
|
def scene_value(self):
|
||||||
|
return self.value.code;
|
||||||
|
|
||||||
|
def scene_name(self):
|
||||||
|
return self._value_.name;
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print(ChatScene.ChatWithDbExecute.scene_value())
|
||||||
|
# print(ChatScene.ChatWithDbExecute.value.describe)
|
@ -66,7 +66,7 @@ def __get_conv_user_message(conversations: dict):
|
|||||||
|
|
||||||
def __new_conversation(chat_mode, user_id) -> ConversationVo:
|
def __new_conversation(chat_mode, user_id) -> ConversationVo:
|
||||||
unique_id = uuid.uuid1()
|
unique_id = uuid.uuid1()
|
||||||
history_mem = DuckdbHistoryMemory(str(unique_id))
|
# history_mem = DuckdbHistoryMemory(str(unique_id))
|
||||||
return ConversationVo(conv_uid=str(unique_id), chat_mode=chat_mode)
|
return ConversationVo(conv_uid=str(unique_id), chat_mode=chat_mode)
|
||||||
|
|
||||||
|
|
||||||
@ -132,14 +132,12 @@ async def dialogue_scenes():
|
|||||||
ChatScene.ChatExecution,
|
ChatScene.ChatExecution,
|
||||||
]
|
]
|
||||||
for scene in new_modes:
|
for scene in new_modes:
|
||||||
if not scene.value in [
|
|
||||||
ChatScene.ChatNormal.value,
|
|
||||||
ChatScene.InnerChatDBSummary.value,
|
|
||||||
]:
|
|
||||||
scene_vo = ChatSceneVo(
|
scene_vo = ChatSceneVo(
|
||||||
chat_scene=scene.value,
|
chat_scene=scene.value(),
|
||||||
scene_name=scene.name,
|
scene_name=scene.scene_name(),
|
||||||
param_title="Selection Param",
|
scene_describe= scene.describe(),
|
||||||
|
param_title=",".join(scene.param_types()),
|
||||||
)
|
)
|
||||||
scene_vos.append(scene_vo)
|
scene_vos.append(scene_vo)
|
||||||
return Result.succ(scene_vos)
|
return Result.succ(scene_vos)
|
||||||
@ -147,23 +145,23 @@ async def dialogue_scenes():
|
|||||||
|
|
||||||
@router.post("/v1/chat/dialogue/new", response_model=Result[ConversationVo])
|
@router.post("/v1/chat/dialogue/new", response_model=Result[ConversationVo])
|
||||||
async def dialogue_new(
|
async def dialogue_new(
|
||||||
chat_mode: str = ChatScene.ChatNormal.value, user_id: str = None
|
chat_mode: str = ChatScene.ChatNormal.value(), user_id: str = None
|
||||||
):
|
):
|
||||||
conv_vo = __new_conversation(chat_mode, user_id)
|
conv_vo = __new_conversation(chat_mode, user_id)
|
||||||
return Result.succ(conv_vo)
|
return Result.succ(conv_vo)
|
||||||
|
|
||||||
|
|
||||||
@router.post("/v1/chat/mode/params/list", response_model=Result[dict])
|
@router.post("/v1/chat/mode/params/list", response_model=Result[dict])
|
||||||
async def params_list(chat_mode: str = ChatScene.ChatNormal.value):
|
async def params_list(chat_mode: str = ChatScene.ChatNormal.value()):
|
||||||
if ChatScene.ChatWithDbQA.value == chat_mode:
|
if ChatScene.ChatWithDbQA.value() == chat_mode:
|
||||||
return Result.succ(get_db_list())
|
return Result.succ(get_db_list())
|
||||||
elif ChatScene.ChatWithDbExecute.value == chat_mode:
|
elif ChatScene.ChatWithDbExecute.value() == chat_mode:
|
||||||
return Result.succ(get_db_list())
|
return Result.succ(get_db_list())
|
||||||
elif ChatScene.ChatDashboard.value == chat_mode:
|
elif ChatScene.ChatDashboard.value() == chat_mode:
|
||||||
return Result.succ(get_db_list())
|
return Result.succ(get_db_list())
|
||||||
elif ChatScene.ChatExecution.value == chat_mode:
|
elif ChatScene.ChatExecution.value() == chat_mode:
|
||||||
return Result.succ(plugins_select_info())
|
return Result.succ(plugins_select_info())
|
||||||
elif ChatScene.ChatKnowledge.value == chat_mode:
|
elif ChatScene.ChatKnowledge.value() == chat_mode:
|
||||||
return Result.succ(knowledge_list())
|
return Result.succ(knowledge_list())
|
||||||
else:
|
else:
|
||||||
return Result.succ(None)
|
return Result.succ(None)
|
||||||
@ -196,7 +194,7 @@ async def dialogue_history_messages(con_uid: str):
|
|||||||
async def chat_completions(dialogue: ConversationVo = Body()):
|
async def chat_completions(dialogue: ConversationVo = Body()):
|
||||||
print(f"chat_completions:{dialogue.chat_mode},{dialogue.select_param}")
|
print(f"chat_completions:{dialogue.chat_mode},{dialogue.select_param}")
|
||||||
if not dialogue.chat_mode:
|
if not dialogue.chat_mode:
|
||||||
dialogue.chat_mode = ChatScene.ChatNormal.value
|
dialogue.chat_mode = ChatScene.ChatNormal.value()
|
||||||
if not dialogue.conv_uid:
|
if not dialogue.conv_uid:
|
||||||
conv_vo = __new_conversation(dialogue.chat_mode, dialogue.user_name)
|
conv_vo = __new_conversation(dialogue.chat_mode, dialogue.user_name)
|
||||||
dialogue.conv_uid = conv_vo.conv_uid
|
dialogue.conv_uid = conv_vo.conv_uid
|
||||||
@ -217,17 +215,17 @@ async def chat_completions(dialogue: ConversationVo = Body()):
|
|||||||
"user_input": dialogue.user_input,
|
"user_input": dialogue.user_input,
|
||||||
}
|
}
|
||||||
|
|
||||||
if ChatScene.ChatWithDbQA.value == dialogue.chat_mode:
|
if ChatScene.ChatWithDbQA.value() == dialogue.chat_mode:
|
||||||
chat_param.update({"db_name": dialogue.select_param})
|
chat_param.update({"db_name": dialogue.select_param})
|
||||||
elif ChatScene.ChatWithDbExecute.value == dialogue.chat_mode:
|
elif ChatScene.ChatWithDbExecute.value() == dialogue.chat_mode:
|
||||||
chat_param.update({"db_name": dialogue.select_param})
|
chat_param.update({"db_name": dialogue.select_param})
|
||||||
elif ChatScene.ChatDashboard.value == dialogue.chat_mode:
|
elif ChatScene.ChatDashboard.value() == dialogue.chat_mode:
|
||||||
chat_param.update({"db_name": dialogue.select_param})
|
chat_param.update({"db_name": dialogue.select_param})
|
||||||
## DEFAULT
|
## DEFAULT
|
||||||
chat_param.update({"report_name": "sales_report"})
|
chat_param.update({"report_name": "sales_report"})
|
||||||
elif ChatScene.ChatExecution.value == dialogue.chat_mode:
|
elif ChatScene.ChatExecution.value() == dialogue.chat_mode:
|
||||||
chat_param.update({"plugin_selector": dialogue.select_param})
|
chat_param.update({"plugin_selector": dialogue.select_param})
|
||||||
elif ChatScene.ChatKnowledge.value == dialogue.chat_mode:
|
elif ChatScene.ChatKnowledge.value() == dialogue.chat_mode:
|
||||||
chat_param.update({"knowledge_space": dialogue.select_param})
|
chat_param.update({"knowledge_space": dialogue.select_param})
|
||||||
|
|
||||||
chat: BaseChat = CHAT_FACTORY.get_implementation(dialogue.chat_mode, **chat_param)
|
chat: BaseChat = CHAT_FACTORY.get_implementation(dialogue.chat_mode, **chat_param)
|
||||||
|
@ -26,7 +26,8 @@ class Result(Generic[T], BaseModel):
|
|||||||
class ChatSceneVo(BaseModel):
|
class ChatSceneVo(BaseModel):
|
||||||
chat_scene: str = Field(..., description="chat_scene")
|
chat_scene: str = Field(..., description="chat_scene")
|
||||||
scene_name: str = Field(..., description="chat_scene name show for user")
|
scene_name: str = Field(..., description="chat_scene name show for user")
|
||||||
param_title: str = Field(..., description="chat_scene required parameter title")
|
scene_describe: str = Field("", description="chat_scene describe ")
|
||||||
|
param_title: str = Field("", description="chat_scene required parameter title")
|
||||||
|
|
||||||
|
|
||||||
class ConversationVo(BaseModel):
|
class ConversationVo(BaseModel):
|
||||||
|
@ -1,28 +1,52 @@
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
|
||||||
class Scene:
|
class Scene:
|
||||||
def __init__(self, code, describe, is_inner):
|
def __init__(self, code, name, describe, param_types: List = [], is_inner: bool = False):
|
||||||
self.code = code
|
self.code = code
|
||||||
|
self.name = name
|
||||||
self.describe = describe
|
self.describe = describe
|
||||||
|
self.param_types = param_types
|
||||||
self.is_inner = is_inner
|
self.is_inner = is_inner
|
||||||
|
|
||||||
|
|
||||||
class ChatScene(Enum):
|
class ChatScene(Enum):
|
||||||
ChatWithDbExecute = "chat_with_db_execute"
|
ChatWithDbExecute = Scene("chat_with_db_execute", "Chat Data",
|
||||||
ChatWithDbQA = "chat_with_db_qa"
|
"Dialogue with your private data through natural language.", ["DB Select"])
|
||||||
ChatExecution = "chat_execution"
|
ChatWithDbQA = Scene("chat_with_db_qa", "Chat Meta Data", "Have a Professional Conversation with Metadata.",
|
||||||
ChatDefaultKnowledge = "chat_default_knowledge"
|
["DB Select"])
|
||||||
ChatNewKnowledge = "chat_new_knowledge"
|
ChatExecution = Scene("chat_execution", "Chat Plugin", "Use tools through dialogue to accomplish your goals.",
|
||||||
ChatUrlKnowledge = "chat_url_knowledge"
|
["Plugin Select"])
|
||||||
InnerChatDBSummary = "inner_chat_db_summary"
|
ChatDefaultKnowledge = Scene("chat_default_knowledge", "Chat Default Knowledge",
|
||||||
|
"Dialogue through natural language and private documents and knowledge bases.")
|
||||||
|
ChatNewKnowledge = Scene("chat_new_knowledge", "Chat New Knowledge",
|
||||||
|
"Dialogue through natural language and private documents and knowledge bases.",
|
||||||
|
["Knowledge Select"])
|
||||||
|
ChatUrlKnowledge = Scene("chat_url_knowledge", "Chat URL",
|
||||||
|
"Dialogue through natural language and private documents and knowledge bases.",
|
||||||
|
["Url Input"])
|
||||||
|
InnerChatDBSummary = Scene("inner_chat_db_summary", "DB Summary", "Db Summary.", True)
|
||||||
|
|
||||||
ChatNormal = "chat_normal"
|
ChatNormal = Scene("chat_normal", "Chat Normal", "Native LLM large model AI dialogue.")
|
||||||
ChatDashboard = "chat_dashboard"
|
ChatDashboard = Scene("chat_dashboard", "Chat Dashboard",
|
||||||
ChatKnowledge = "chat_knowledge"
|
"Provide you with professional analysis reports through natural language.", ["DB Select"])
|
||||||
# ChatDb = "chat_db"
|
ChatKnowledge = Scene("chat_knowledge", "Chat Knowledge",
|
||||||
# ChatData= "chat_data"
|
"Dialogue through natural language and private documents and knowledge bases.",
|
||||||
|
["Knowledge Space Select"])
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_valid_mode(mode):
|
def is_valid_mode(mode):
|
||||||
return any(mode == item.value for item in ChatScene)
|
return any(mode == item.value() for item in ChatScene)
|
||||||
|
|
||||||
|
def value(self):
|
||||||
|
return self._value_.code;
|
||||||
|
|
||||||
|
def scene_name(self):
|
||||||
|
return self._value_.name;
|
||||||
|
|
||||||
|
def describe(self):
|
||||||
|
return self._value_.describe;
|
||||||
|
|
||||||
|
def param_types(self):
|
||||||
|
return self._value_.param_types
|
||||||
|
@ -73,10 +73,10 @@ class BaseChat(ABC):
|
|||||||
|
|
||||||
### load prompt template
|
### load prompt template
|
||||||
self.prompt_template: PromptTemplate = CFG.prompt_templates[
|
self.prompt_template: PromptTemplate = CFG.prompt_templates[
|
||||||
self.chat_mode.value
|
self.chat_mode.value()
|
||||||
]
|
]
|
||||||
self.history_message: List[OnceConversation] = self.memory.messages()
|
self.history_message: List[OnceConversation] = self.memory.messages()
|
||||||
self.current_message: OnceConversation = OnceConversation(chat_mode.value)
|
self.current_message: OnceConversation = OnceConversation(chat_mode.value())
|
||||||
self.current_tokens_used: int = 0
|
self.current_tokens_used: int = 0
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
|
@ -23,7 +23,7 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class ChatDashboard(BaseChat):
|
class ChatDashboard(BaseChat):
|
||||||
chat_scene: str = ChatScene.ChatDashboard.value
|
chat_scene: str = ChatScene.ChatDashboard.value()
|
||||||
report_name: str
|
report_name: str
|
||||||
"""Number of results to return from the query"""
|
"""Number of results to return from the query"""
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
|
|||||||
PROMPT_NEED_NEED_STREAM_OUT = False
|
PROMPT_NEED_NEED_STREAM_OUT = False
|
||||||
|
|
||||||
prompt = PromptTemplate(
|
prompt = PromptTemplate(
|
||||||
template_scene=ChatScene.ChatDashboard.value,
|
template_scene=ChatScene.ChatDashboard.value(),
|
||||||
input_variables=["input", "table_info", "dialect", "supported_chat_type"],
|
input_variables=["input", "table_info", "dialect", "supported_chat_type"],
|
||||||
response_format=json.dumps(RESPONSE_FORMAT, indent=4),
|
response_format=json.dumps(RESPONSE_FORMAT, indent=4),
|
||||||
template_define=PROMPT_SCENE_DEFINE,
|
template_define=PROMPT_SCENE_DEFINE,
|
||||||
|
@ -17,7 +17,7 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class ChatWithDbAutoExecute(BaseChat):
|
class ChatWithDbAutoExecute(BaseChat):
|
||||||
chat_scene: str = ChatScene.ChatWithDbExecute.value
|
chat_scene: str = ChatScene.ChatWithDbExecute.value()
|
||||||
|
|
||||||
"""Number of results to return from the query"""
|
"""Number of results to return from the query"""
|
||||||
|
|
||||||
|
@ -37,7 +37,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
|
|||||||
PROMPT_NEED_NEED_STREAM_OUT = False
|
PROMPT_NEED_NEED_STREAM_OUT = False
|
||||||
|
|
||||||
prompt = PromptTemplate(
|
prompt = PromptTemplate(
|
||||||
template_scene=ChatScene.ChatWithDbExecute.value,
|
template_scene=ChatScene.ChatWithDbExecute.value(),
|
||||||
input_variables=["input", "table_info", "dialect", "top_k", "response"],
|
input_variables=["input", "table_info", "dialect", "top_k", "response"],
|
||||||
response_format=json.dumps(RESPONSE_FORMAT_SIMPLE, indent=4),
|
response_format=json.dumps(RESPONSE_FORMAT_SIMPLE, indent=4),
|
||||||
template_define=PROMPT_SCENE_DEFINE,
|
template_define=PROMPT_SCENE_DEFINE,
|
||||||
|
@ -15,7 +15,7 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class ChatWithDbQA(BaseChat):
|
class ChatWithDbQA(BaseChat):
|
||||||
chat_scene: str = ChatScene.ChatWithDbQA.value
|
chat_scene: str = ChatScene.ChatWithDbQA.value()
|
||||||
|
|
||||||
"""Number of results to return from the query"""
|
"""Number of results to return from the query"""
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
|
|||||||
PROMPT_NEED_NEED_STREAM_OUT = True
|
PROMPT_NEED_NEED_STREAM_OUT = True
|
||||||
|
|
||||||
prompt = PromptTemplate(
|
prompt = PromptTemplate(
|
||||||
template_scene=ChatScene.ChatWithDbQA.value,
|
template_scene=ChatScene.ChatWithDbQA.value(),
|
||||||
input_variables=["input", "table_info"],
|
input_variables=["input", "table_info"],
|
||||||
response_format=None,
|
response_format=None,
|
||||||
template_define=PROMPT_SCENE_DEFINE,
|
template_define=PROMPT_SCENE_DEFINE,
|
||||||
|
@ -16,7 +16,7 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class ChatWithPlugin(BaseChat):
|
class ChatWithPlugin(BaseChat):
|
||||||
chat_scene: str = ChatScene.ChatExecution.value
|
chat_scene: str = ChatScene.ChatExecution.value()
|
||||||
plugins_prompt_generator: PluginPromptGenerator
|
plugins_prompt_generator: PluginPromptGenerator
|
||||||
select_plugin: str = None
|
select_plugin: str = None
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
|
|||||||
PROMPT_NEED_STREAM_OUT = False
|
PROMPT_NEED_STREAM_OUT = False
|
||||||
|
|
||||||
prompt = PromptTemplate(
|
prompt = PromptTemplate(
|
||||||
template_scene=ChatScene.ChatExecution.value,
|
template_scene=ChatScene.ChatExecution.value(),
|
||||||
input_variables=["input", "constraints", "commands_infos", "response"],
|
input_variables=["input", "constraints", "commands_infos", "response"],
|
||||||
response_format=json.dumps(RESPONSE_FORMAT, indent=4),
|
response_format=json.dumps(RESPONSE_FORMAT, indent=4),
|
||||||
template_define=PROMPT_SCENE_DEFINE,
|
template_define=PROMPT_SCENE_DEFINE,
|
||||||
|
@ -23,7 +23,7 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class ChatNewKnowledge(BaseChat):
|
class ChatNewKnowledge(BaseChat):
|
||||||
chat_scene: str = ChatScene.ChatNewKnowledge.value
|
chat_scene: str = ChatScene.ChatNewKnowledge.value()
|
||||||
|
|
||||||
"""Number of results to return from the query"""
|
"""Number of results to return from the query"""
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
|
|||||||
PROMPT_NEED_NEED_STREAM_OUT = True
|
PROMPT_NEED_NEED_STREAM_OUT = True
|
||||||
|
|
||||||
prompt = PromptTemplate(
|
prompt = PromptTemplate(
|
||||||
template_scene=ChatScene.ChatNewKnowledge.value,
|
template_scene=ChatScene.ChatNewKnowledge.value(),
|
||||||
input_variables=["context", "question"],
|
input_variables=["context", "question"],
|
||||||
response_format=None,
|
response_format=None,
|
||||||
template_define=PROMPT_SCENE_DEFINE,
|
template_define=PROMPT_SCENE_DEFINE,
|
||||||
|
@ -25,7 +25,7 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class ChatDefaultKnowledge(BaseChat):
|
class ChatDefaultKnowledge(BaseChat):
|
||||||
chat_scene: str = ChatScene.ChatDefaultKnowledge.value
|
chat_scene: str = ChatScene.ChatDefaultKnowledge.value()
|
||||||
|
|
||||||
"""Number of results to return from the query"""
|
"""Number of results to return from the query"""
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
|
|||||||
PROMPT_NEED_NEED_STREAM_OUT = True
|
PROMPT_NEED_NEED_STREAM_OUT = True
|
||||||
|
|
||||||
prompt = PromptTemplate(
|
prompt = PromptTemplate(
|
||||||
template_scene=ChatScene.ChatDefaultKnowledge.value,
|
template_scene=ChatScene.ChatDefaultKnowledge.value(),
|
||||||
input_variables=["context", "question"],
|
input_variables=["context", "question"],
|
||||||
response_format=None,
|
response_format=None,
|
||||||
template_define=PROMPT_SCENE_DEFINE,
|
template_define=PROMPT_SCENE_DEFINE,
|
||||||
|
@ -8,7 +8,7 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class InnerChatDBSummary(BaseChat):
|
class InnerChatDBSummary(BaseChat):
|
||||||
chat_scene: str = ChatScene.InnerChatDBSummary.value
|
chat_scene: str = ChatScene.InnerChatDBSummary.value()
|
||||||
|
|
||||||
"""Number of results to return from the query"""
|
"""Number of results to return from the query"""
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
|
|||||||
PROMPT_NEED_NEED_STREAM_OUT = False
|
PROMPT_NEED_NEED_STREAM_OUT = False
|
||||||
|
|
||||||
prompt = PromptTemplate(
|
prompt = PromptTemplate(
|
||||||
template_scene=ChatScene.InnerChatDBSummary.value,
|
template_scene=ChatScene.InnerChatDBSummary.value(),
|
||||||
input_variables=["db_profile_summary", "db_input", "response"],
|
input_variables=["db_profile_summary", "db_input", "response"],
|
||||||
response_format=json.dumps(RESPONSE_FORMAT, indent=4),
|
response_format=json.dumps(RESPONSE_FORMAT, indent=4),
|
||||||
template_define=PROMPT_SCENE_DEFINE,
|
template_define=PROMPT_SCENE_DEFINE,
|
||||||
|
@ -24,7 +24,7 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class ChatUrlKnowledge(BaseChat):
|
class ChatUrlKnowledge(BaseChat):
|
||||||
chat_scene: str = ChatScene.ChatUrlKnowledge.value
|
chat_scene: str = ChatScene.ChatUrlKnowledge.value()
|
||||||
|
|
||||||
"""Number of results to return from the query"""
|
"""Number of results to return from the query"""
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
|
|||||||
PROMPT_NEED_NEED_STREAM_OUT = True
|
PROMPT_NEED_NEED_STREAM_OUT = True
|
||||||
|
|
||||||
prompt = PromptTemplate(
|
prompt = PromptTemplate(
|
||||||
template_scene=ChatScene.ChatUrlKnowledge.value,
|
template_scene=ChatScene.ChatUrlKnowledge.value(),
|
||||||
input_variables=["context", "question"],
|
input_variables=["context", "question"],
|
||||||
response_format=None,
|
response_format=None,
|
||||||
template_define=PROMPT_SCENE_DEFINE,
|
template_define=PROMPT_SCENE_DEFINE,
|
||||||
|
@ -61,4 +61,4 @@ class ChatKnowledge(BaseChat):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def chat_type(self) -> str:
|
def chat_type(self) -> str:
|
||||||
return ChatScene.ChatKnowledge.value
|
return ChatScene.ChatKnowledge.value()
|
||||||
|
@ -39,7 +39,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
|
|||||||
PROMPT_NEED_NEED_STREAM_OUT = True
|
PROMPT_NEED_NEED_STREAM_OUT = True
|
||||||
|
|
||||||
prompt = PromptTemplate(
|
prompt = PromptTemplate(
|
||||||
template_scene=ChatScene.ChatKnowledge.value,
|
template_scene=ChatScene.ChatKnowledge.value(),
|
||||||
input_variables=["context", "question"],
|
input_variables=["context", "question"],
|
||||||
response_format=None,
|
response_format=None,
|
||||||
template_define=PROMPT_SCENE_DEFINE,
|
template_define=PROMPT_SCENE_DEFINE,
|
||||||
|
@ -14,7 +14,7 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class ChatNormal(BaseChat):
|
class ChatNormal(BaseChat):
|
||||||
chat_scene: str = ChatScene.ChatNormal.value
|
chat_scene: str = ChatScene.ChatNormal.value()
|
||||||
|
|
||||||
"""Number of results to return from the query"""
|
"""Number of results to return from the query"""
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
|
|||||||
PROMPT_NEED_NEED_STREAM_OUT = True
|
PROMPT_NEED_NEED_STREAM_OUT = True
|
||||||
|
|
||||||
prompt = PromptTemplate(
|
prompt = PromptTemplate(
|
||||||
template_scene=ChatScene.ChatNormal.value,
|
template_scene=ChatScene.ChatNormal.value(),
|
||||||
input_variables=["input"],
|
input_variables=["input"],
|
||||||
response_format=None,
|
response_format=None,
|
||||||
template_define=PROMPT_SCENE_DEFINE,
|
template_define=PROMPT_SCENE_DEFINE,
|
||||||
|
Loading…
Reference in New Issue
Block a user