mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-08-19 16:57:21 +00:00
Merge remote-tracking branch 'origin/dev_ty_06_end' into llm_framework
This commit is contained in:
commit
d2e82d575e
44
pilot/connections/rdbms/py_study/study_enum.py
Normal file
44
pilot/connections/rdbms/py_study/study_enum.py
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
from enum import Enum
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
class Test(Enum):
|
||||||
|
XXX = ("x", "1", True)
|
||||||
|
YYY =("Y", "2", False)
|
||||||
|
ZZZ = ("Z", "3")
|
||||||
|
def __init__(self, code, v, flag= False):
|
||||||
|
self.code = code
|
||||||
|
self.v = v
|
||||||
|
self.flag = flag
|
||||||
|
|
||||||
|
class Scene:
|
||||||
|
def __init__(self, code, name, describe, param_types:List=[], is_inner: bool = False):
|
||||||
|
self.code = code
|
||||||
|
self.name = name
|
||||||
|
self.describe = describe
|
||||||
|
self.param_types = param_types
|
||||||
|
self.is_inner = is_inner
|
||||||
|
|
||||||
|
|
||||||
|
class ChatScene(Enum):
|
||||||
|
|
||||||
|
ChatWithDbExecute = Scene("chat_with_db_execute", "Chat Data", "Dialogue with your private data through natural language.", ["DB Select"])
|
||||||
|
ChatWithDbQA = Scene("chat_with_db_qa", "Chat Meta Data", "Have a Professional Conversation with Metadata.", ["DB Select"])
|
||||||
|
ChatExecution = Scene("chat_execution", "Chat Plugin", "Use tools through dialogue to accomplish your goals.", ["Plugin Select"])
|
||||||
|
ChatDefaultKnowledge = Scene("chat_default_knowledge", "Chat Default Knowledge", "Dialogue through natural language and private documents and knowledge bases.")
|
||||||
|
ChatNewKnowledge = Scene("chat_new_knowledge", "Chat New Knowledge", "Dialogue through natural language and private documents and knowledge bases.", ["Knowledge Select"])
|
||||||
|
ChatUrlKnowledge = Scene("chat_url_knowledge", "Chat URL", "Dialogue through natural language and private documents and knowledge bases.", ["Url Input"])
|
||||||
|
InnerChatDBSummary = Scene("inner_chat_db_summary", "DB Summary", "Db Summary.", True)
|
||||||
|
|
||||||
|
ChatNormal = Scene("chat_normal", "Chat Normal", "Native LLM large model AI dialogue.")
|
||||||
|
ChatDashboard = Scene("chat_dashboard", "Chat Dashboard", "Provide you with professional analysis reports through natural language.", ["DB Select"])
|
||||||
|
ChatKnowledge = Scene("chat_knowledge", "Chat Knowledge", "Dialogue through natural language and private documents and knowledge bases.", ["Knowledge Space Select"])
|
||||||
|
|
||||||
|
def scene_value(self):
|
||||||
|
return self.value.code;
|
||||||
|
|
||||||
|
def scene_name(self):
|
||||||
|
return self._value_.name;
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print(ChatScene.ChatWithDbExecute.scene_value())
|
||||||
|
# print(ChatScene.ChatWithDbExecute.value.describe)
|
@ -36,8 +36,9 @@ class DuckdbHistoryMemory(BaseChatHistoryMemory):
|
|||||||
if not result:
|
if not result:
|
||||||
# 如果表不存在,则创建新表
|
# 如果表不存在,则创建新表
|
||||||
self.connect.execute(
|
self.connect.execute(
|
||||||
"CREATE TABLE chat_history (conv_uid VARCHAR(100) PRIMARY KEY, chat_mode VARCHAR(50), summary VARCHAR(255), user_name VARCHAR(100), messages TEXT)"
|
"CREATE TABLE chat_history (id integer primary key, conv_uid VARCHAR(100) UNIQUE, chat_mode VARCHAR(50), summary VARCHAR(255), user_name VARCHAR(100), messages TEXT)"
|
||||||
)
|
)
|
||||||
|
self.connect.execute("CREATE SEQUENCE seq_id START 1;")
|
||||||
|
|
||||||
def __get_messages_by_conv_uid(self, conv_uid: str):
|
def __get_messages_by_conv_uid(self, conv_uid: str):
|
||||||
cursor = self.connect.cursor()
|
cursor = self.connect.cursor()
|
||||||
@ -59,7 +60,7 @@ class DuckdbHistoryMemory(BaseChatHistoryMemory):
|
|||||||
try:
|
try:
|
||||||
cursor = self.connect.cursor()
|
cursor = self.connect.cursor()
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
"INSERT INTO chat_history(conv_uid, chat_mode summary, user_name, messages)VALUES(?,?,?,?,?)",
|
"INSERT INTO chat_history(id, conv_uid, chat_mode summary, user_name, messages)VALUES(nextval('seq_id'),?,?,?,?,?)",
|
||||||
[self.chat_seesion_id, chat_mode, summary, user_name, ""],
|
[self.chat_seesion_id, chat_mode, summary, user_name, ""],
|
||||||
)
|
)
|
||||||
cursor.commit()
|
cursor.commit()
|
||||||
@ -81,7 +82,7 @@ class DuckdbHistoryMemory(BaseChatHistoryMemory):
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
"INSERT INTO chat_history(conv_uid, chat_mode, summary, user_name, messages)VALUES(?,?,?,?,?)",
|
"INSERT INTO chat_history(id, conv_uid, chat_mode, summary, user_name, messages)VALUES(nextval('seq_id'),?,?,?,?,?)",
|
||||||
[
|
[
|
||||||
self.chat_seesion_id,
|
self.chat_seesion_id,
|
||||||
once_message.chat_mode,
|
once_message.chat_mode,
|
||||||
@ -115,10 +116,10 @@ class DuckdbHistoryMemory(BaseChatHistoryMemory):
|
|||||||
cursor = duckdb.connect(duckdb_path).cursor()
|
cursor = duckdb.connect(duckdb_path).cursor()
|
||||||
if user_name:
|
if user_name:
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
"SELECT * FROM chat_history where user_name=? limit 20", [user_name]
|
"SELECT * FROM chat_history where user_name=? order by id desc limit 20", [user_name]
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
cursor.execute("SELECT * FROM chat_history limit 20")
|
cursor.execute("SELECT * FROM chat_history order by id desc limit 20")
|
||||||
# 获取查询结果字段名
|
# 获取查询结果字段名
|
||||||
fields = [field[0] for field in cursor.description]
|
fields = [field[0] for field in cursor.description]
|
||||||
data = []
|
data = []
|
||||||
|
@ -66,7 +66,7 @@ def __get_conv_user_message(conversations: dict):
|
|||||||
|
|
||||||
def __new_conversation(chat_mode, user_id) -> ConversationVo:
|
def __new_conversation(chat_mode, user_id) -> ConversationVo:
|
||||||
unique_id = uuid.uuid1()
|
unique_id = uuid.uuid1()
|
||||||
history_mem = DuckdbHistoryMemory(str(unique_id))
|
# history_mem = DuckdbHistoryMemory(str(unique_id))
|
||||||
return ConversationVo(conv_uid=str(unique_id), chat_mode=chat_mode)
|
return ConversationVo(conv_uid=str(unique_id), chat_mode=chat_mode)
|
||||||
|
|
||||||
|
|
||||||
@ -102,12 +102,7 @@ async def read_main():
|
|||||||
|
|
||||||
|
|
||||||
@router.get("/v1/chat/dialogue/list", response_model=Result[ConversationVo])
|
@router.get("/v1/chat/dialogue/list", response_model=Result[ConversationVo])
|
||||||
async def dialogue_list(response: Response, user_id: str = None):
|
async def dialogue_list( user_id: str = None):
|
||||||
# 设置CORS头部信息
|
|
||||||
response.headers["Access-Control-Allow-Origin"] = "*"
|
|
||||||
response.headers["Access-Control-Allow-Methods"] = "GET"
|
|
||||||
response.headers["Access-Control-Request-Headers"] = "content-type"
|
|
||||||
|
|
||||||
dialogues: List = []
|
dialogues: List = []
|
||||||
datas = DuckdbHistoryMemory.conv_list(user_id)
|
datas = DuckdbHistoryMemory.conv_list(user_id)
|
||||||
|
|
||||||
@ -132,19 +127,17 @@ async def dialogue_scenes():
|
|||||||
new_modes: List[ChatScene] = [
|
new_modes: List[ChatScene] = [
|
||||||
ChatScene.ChatWithDbExecute,
|
ChatScene.ChatWithDbExecute,
|
||||||
ChatScene.ChatWithDbQA,
|
ChatScene.ChatWithDbQA,
|
||||||
ChatScene.ChatDashboard,
|
|
||||||
ChatScene.ChatKnowledge,
|
ChatScene.ChatKnowledge,
|
||||||
|
ChatScene.ChatDashboard,
|
||||||
ChatScene.ChatExecution,
|
ChatScene.ChatExecution,
|
||||||
]
|
]
|
||||||
for scene in new_modes:
|
for scene in new_modes:
|
||||||
if not scene.value in [
|
|
||||||
ChatScene.ChatNormal.value,
|
|
||||||
ChatScene.InnerChatDBSummary.value,
|
|
||||||
]:
|
|
||||||
scene_vo = ChatSceneVo(
|
scene_vo = ChatSceneVo(
|
||||||
chat_scene=scene.value,
|
chat_scene=scene.value(),
|
||||||
scene_name=scene.name,
|
scene_name=scene.scene_name(),
|
||||||
param_title="Selection Param",
|
scene_describe= scene.describe(),
|
||||||
|
param_title=",".join(scene.param_types()),
|
||||||
)
|
)
|
||||||
scene_vos.append(scene_vo)
|
scene_vos.append(scene_vo)
|
||||||
return Result.succ(scene_vos)
|
return Result.succ(scene_vos)
|
||||||
@ -152,23 +145,23 @@ async def dialogue_scenes():
|
|||||||
|
|
||||||
@router.post("/v1/chat/dialogue/new", response_model=Result[ConversationVo])
|
@router.post("/v1/chat/dialogue/new", response_model=Result[ConversationVo])
|
||||||
async def dialogue_new(
|
async def dialogue_new(
|
||||||
chat_mode: str = ChatScene.ChatNormal.value, user_id: str = None
|
chat_mode: str = ChatScene.ChatNormal.value(), user_id: str = None
|
||||||
):
|
):
|
||||||
conv_vo = __new_conversation(chat_mode, user_id)
|
conv_vo = __new_conversation(chat_mode, user_id)
|
||||||
return Result.succ(conv_vo)
|
return Result.succ(conv_vo)
|
||||||
|
|
||||||
|
|
||||||
@router.post("/v1/chat/mode/params/list", response_model=Result[dict])
|
@router.post("/v1/chat/mode/params/list", response_model=Result[dict])
|
||||||
async def params_list(chat_mode: str = ChatScene.ChatNormal.value):
|
async def params_list(chat_mode: str = ChatScene.ChatNormal.value()):
|
||||||
if ChatScene.ChatWithDbQA.value == chat_mode:
|
if ChatScene.ChatWithDbQA.value() == chat_mode:
|
||||||
return Result.succ(get_db_list())
|
return Result.succ(get_db_list())
|
||||||
elif ChatScene.ChatWithDbExecute.value == chat_mode:
|
elif ChatScene.ChatWithDbExecute.value() == chat_mode:
|
||||||
return Result.succ(get_db_list())
|
return Result.succ(get_db_list())
|
||||||
elif ChatScene.ChatDashboard.value == chat_mode:
|
elif ChatScene.ChatDashboard.value() == chat_mode:
|
||||||
return Result.succ(get_db_list())
|
return Result.succ(get_db_list())
|
||||||
elif ChatScene.ChatExecution.value == chat_mode:
|
elif ChatScene.ChatExecution.value() == chat_mode:
|
||||||
return Result.succ(plugins_select_info())
|
return Result.succ(plugins_select_info())
|
||||||
elif ChatScene.ChatKnowledge.value == chat_mode:
|
elif ChatScene.ChatKnowledge.value() == chat_mode:
|
||||||
return Result.succ(knowledge_list())
|
return Result.succ(knowledge_list())
|
||||||
else:
|
else:
|
||||||
return Result.succ(None)
|
return Result.succ(None)
|
||||||
@ -201,7 +194,7 @@ async def dialogue_history_messages(con_uid: str):
|
|||||||
async def chat_completions(dialogue: ConversationVo = Body()):
|
async def chat_completions(dialogue: ConversationVo = Body()):
|
||||||
print(f"chat_completions:{dialogue.chat_mode},{dialogue.select_param}")
|
print(f"chat_completions:{dialogue.chat_mode},{dialogue.select_param}")
|
||||||
if not dialogue.chat_mode:
|
if not dialogue.chat_mode:
|
||||||
dialogue.chat_mode = ChatScene.ChatNormal.value
|
dialogue.chat_mode = ChatScene.ChatNormal.value()
|
||||||
if not dialogue.conv_uid:
|
if not dialogue.conv_uid:
|
||||||
conv_vo = __new_conversation(dialogue.chat_mode, dialogue.user_name)
|
conv_vo = __new_conversation(dialogue.chat_mode, dialogue.user_name)
|
||||||
dialogue.conv_uid = conv_vo.conv_uid
|
dialogue.conv_uid = conv_vo.conv_uid
|
||||||
@ -222,17 +215,17 @@ async def chat_completions(dialogue: ConversationVo = Body()):
|
|||||||
"user_input": dialogue.user_input,
|
"user_input": dialogue.user_input,
|
||||||
}
|
}
|
||||||
|
|
||||||
if ChatScene.ChatWithDbQA.value == dialogue.chat_mode:
|
if ChatScene.ChatWithDbQA.value() == dialogue.chat_mode:
|
||||||
chat_param.update({"db_name": dialogue.select_param})
|
chat_param.update({"db_name": dialogue.select_param})
|
||||||
elif ChatScene.ChatWithDbExecute.value == dialogue.chat_mode:
|
elif ChatScene.ChatWithDbExecute.value() == dialogue.chat_mode:
|
||||||
chat_param.update({"db_name": dialogue.select_param})
|
chat_param.update({"db_name": dialogue.select_param})
|
||||||
elif ChatScene.ChatDashboard.value == dialogue.chat_mode:
|
elif ChatScene.ChatDashboard.value() == dialogue.chat_mode:
|
||||||
chat_param.update({"db_name": dialogue.select_param})
|
chat_param.update({"db_name": dialogue.select_param})
|
||||||
## DEFAULT
|
## DEFAULT
|
||||||
chat_param.update({"report_name": "sales_report"})
|
chat_param.update({"report_name": "sales_report"})
|
||||||
elif ChatScene.ChatExecution.value == dialogue.chat_mode:
|
elif ChatScene.ChatExecution.value() == dialogue.chat_mode:
|
||||||
chat_param.update({"plugin_selector": dialogue.select_param})
|
chat_param.update({"plugin_selector": dialogue.select_param})
|
||||||
elif ChatScene.ChatKnowledge.value == dialogue.chat_mode:
|
elif ChatScene.ChatKnowledge.value() == dialogue.chat_mode:
|
||||||
chat_param.update({"knowledge_space": dialogue.select_param})
|
chat_param.update({"knowledge_space": dialogue.select_param})
|
||||||
|
|
||||||
chat: BaseChat = CHAT_FACTORY.get_implementation(dialogue.chat_mode, **chat_param)
|
chat: BaseChat = CHAT_FACTORY.get_implementation(dialogue.chat_mode, **chat_param)
|
||||||
|
@ -26,7 +26,8 @@ class Result(Generic[T], BaseModel):
|
|||||||
class ChatSceneVo(BaseModel):
|
class ChatSceneVo(BaseModel):
|
||||||
chat_scene: str = Field(..., description="chat_scene")
|
chat_scene: str = Field(..., description="chat_scene")
|
||||||
scene_name: str = Field(..., description="chat_scene name show for user")
|
scene_name: str = Field(..., description="chat_scene name show for user")
|
||||||
param_title: str = Field(..., description="chat_scene required parameter title")
|
scene_describe: str = Field("", description="chat_scene describe ")
|
||||||
|
param_title: str = Field("", description="chat_scene required parameter title")
|
||||||
|
|
||||||
|
|
||||||
class ConversationVo(BaseModel):
|
class ConversationVo(BaseModel):
|
||||||
|
@ -1,28 +1,52 @@
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
|
||||||
class Scene:
|
class Scene:
|
||||||
def __init__(self, code, describe, is_inner):
|
def __init__(self, code, name, describe, param_types: List = [], is_inner: bool = False):
|
||||||
self.code = code
|
self.code = code
|
||||||
|
self.name = name
|
||||||
self.describe = describe
|
self.describe = describe
|
||||||
|
self.param_types = param_types
|
||||||
self.is_inner = is_inner
|
self.is_inner = is_inner
|
||||||
|
|
||||||
|
|
||||||
class ChatScene(Enum):
|
class ChatScene(Enum):
|
||||||
ChatWithDbExecute = "chat_with_db_execute"
|
ChatWithDbExecute = Scene("chat_with_db_execute", "Chat Data",
|
||||||
ChatWithDbQA = "chat_with_db_qa"
|
"Dialogue with your private data through natural language.", ["DB Select"])
|
||||||
ChatExecution = "chat_execution"
|
ChatWithDbQA = Scene("chat_with_db_qa", "Chat Meta Data", "Have a Professional Conversation with Metadata.",
|
||||||
ChatDefaultKnowledge = "chat_default_knowledge"
|
["DB Select"])
|
||||||
ChatNewKnowledge = "chat_new_knowledge"
|
ChatExecution = Scene("chat_execution", "Plugin", "Use tools through dialogue to accomplish your goals.",
|
||||||
ChatUrlKnowledge = "chat_url_knowledge"
|
["Plugin Select"])
|
||||||
InnerChatDBSummary = "inner_chat_db_summary"
|
ChatDefaultKnowledge = Scene("chat_default_knowledge", "Chat Default Knowledge",
|
||||||
|
"Dialogue through natural language and private documents and knowledge bases.")
|
||||||
|
ChatNewKnowledge = Scene("chat_new_knowledge", "Chat New Knowledge",
|
||||||
|
"Dialogue through natural language and private documents and knowledge bases.",
|
||||||
|
["Knowledge Select"])
|
||||||
|
ChatUrlKnowledge = Scene("chat_url_knowledge", "Chat URL",
|
||||||
|
"Dialogue through natural language and private documents and knowledge bases.",
|
||||||
|
["Url Input"])
|
||||||
|
InnerChatDBSummary = Scene("inner_chat_db_summary", "DB Summary", "Db Summary.", True)
|
||||||
|
|
||||||
ChatNormal = "chat_normal"
|
ChatNormal = Scene("chat_normal", "Chat Normal", "Native LLM large model AI dialogue.")
|
||||||
ChatDashboard = "chat_dashboard"
|
ChatDashboard = Scene("chat_dashboard", "Dashboard",
|
||||||
ChatKnowledge = "chat_knowledge"
|
"Provide you with professional analysis reports through natural language.", ["DB Select"])
|
||||||
# ChatDb = "chat_db"
|
ChatKnowledge = Scene("chat_knowledge", "Chat Knowledge",
|
||||||
# ChatData= "chat_data"
|
"Dialogue through natural language and private documents and knowledge bases.",
|
||||||
|
["Knowledge Space Select"])
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_valid_mode(mode):
|
def is_valid_mode(mode):
|
||||||
return any(mode == item.value for item in ChatScene)
|
return any(mode == item.value() for item in ChatScene)
|
||||||
|
|
||||||
|
def value(self):
|
||||||
|
return self._value_.code;
|
||||||
|
|
||||||
|
def scene_name(self):
|
||||||
|
return self._value_.name;
|
||||||
|
|
||||||
|
def describe(self):
|
||||||
|
return self._value_.describe;
|
||||||
|
|
||||||
|
def param_types(self):
|
||||||
|
return self._value_.param_types
|
||||||
|
@ -73,10 +73,10 @@ class BaseChat(ABC):
|
|||||||
|
|
||||||
### load prompt template
|
### load prompt template
|
||||||
self.prompt_template: PromptTemplate = CFG.prompt_templates[
|
self.prompt_template: PromptTemplate = CFG.prompt_templates[
|
||||||
self.chat_mode.value
|
self.chat_mode.value()
|
||||||
]
|
]
|
||||||
self.history_message: List[OnceConversation] = self.memory.messages()
|
self.history_message: List[OnceConversation] = self.memory.messages()
|
||||||
self.current_message: OnceConversation = OnceConversation(chat_mode.value)
|
self.current_message: OnceConversation = OnceConversation(chat_mode.value())
|
||||||
self.current_tokens_used: int = 0
|
self.current_tokens_used: int = 0
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
|
@ -23,7 +23,7 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class ChatDashboard(BaseChat):
|
class ChatDashboard(BaseChat):
|
||||||
chat_scene: str = ChatScene.ChatDashboard.value
|
chat_scene: str = ChatScene.ChatDashboard.value()
|
||||||
report_name: str
|
report_name: str
|
||||||
"""Number of results to return from the query"""
|
"""Number of results to return from the query"""
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@ Provide professional data analysis, use as few dimensions as possible, but no le
|
|||||||
Used to support goal: {input}
|
Used to support goal: {input}
|
||||||
|
|
||||||
Pay attention to the length of the output content of the analysis result, do not exceed 4000tokens
|
Pay attention to the length of the output content of the analysis result, do not exceed 4000tokens
|
||||||
According to the characteristics of the analyzed data, choose the best one from the charts provided below to display,chart types:
|
According to the characteristics of the analyzed data, choose the best one from the charts provided below to display, use different types of charts as much as possible,chart types:
|
||||||
{supported_chat_type}
|
{supported_chat_type}
|
||||||
|
|
||||||
Give {dialect} data analysis SQL, analysis title, display method and analytical thinking,respond in the following json format:
|
Give {dialect} data analysis SQL, analysis title, display method and analytical thinking,respond in the following json format:
|
||||||
@ -40,7 +40,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
|
|||||||
PROMPT_NEED_NEED_STREAM_OUT = False
|
PROMPT_NEED_NEED_STREAM_OUT = False
|
||||||
|
|
||||||
prompt = PromptTemplate(
|
prompt = PromptTemplate(
|
||||||
template_scene=ChatScene.ChatDashboard.value,
|
template_scene=ChatScene.ChatDashboard.value(),
|
||||||
input_variables=["input", "table_info", "dialect", "supported_chat_type"],
|
input_variables=["input", "table_info", "dialect", "supported_chat_type"],
|
||||||
response_format=json.dumps(RESPONSE_FORMAT, indent=4),
|
response_format=json.dumps(RESPONSE_FORMAT, indent=4),
|
||||||
template_define=PROMPT_SCENE_DEFINE,
|
template_define=PROMPT_SCENE_DEFINE,
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
"name": "sale_report",
|
"name": "sale_report",
|
||||||
"introduce": "",
|
"introduce": "",
|
||||||
"layout": "TODO",
|
"layout": "TODO",
|
||||||
"supported_chart_type":["HeatMap","sheet", "LineChart", "PieChart", "BarChart"],
|
"supported_chart_type":["HeatMap","sheet", "LineChart", "PieChart", "BarChart", "Scatterplot", "IndicatorValue", "Table"],
|
||||||
"key_metrics":[],
|
"key_metrics":[],
|
||||||
"trends": []
|
"trends": []
|
||||||
}
|
}
|
@ -17,7 +17,7 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class ChatWithDbAutoExecute(BaseChat):
|
class ChatWithDbAutoExecute(BaseChat):
|
||||||
chat_scene: str = ChatScene.ChatWithDbExecute.value
|
chat_scene: str = ChatScene.ChatWithDbExecute.value()
|
||||||
|
|
||||||
"""Number of results to return from the query"""
|
"""Number of results to return from the query"""
|
||||||
|
|
||||||
|
@ -35,15 +35,15 @@ class DbChatOutputParser(BaseOutputParser):
|
|||||||
if len(data) <= 1:
|
if len(data) <= 1:
|
||||||
data.insert(0, ["result"])
|
data.insert(0, ["result"])
|
||||||
df = pd.DataFrame(data[1:], columns=data[0])
|
df = pd.DataFrame(data[1:], columns=data[0])
|
||||||
if CFG.NEW_SERVER_MODE:
|
if not CFG.NEW_SERVER_MODE:
|
||||||
html = df.to_html(index=False, escape=False, sparsify=False)
|
|
||||||
html = "".join(html.split())
|
|
||||||
else:
|
|
||||||
table_style = """<style>
|
table_style = """<style>
|
||||||
table{border-collapse:collapse;width:100%;height:80%;margin:0 auto;float:center;border: 1px solid #007bff; background-color:#333; color:#fff}th,td{border:1px solid #ddd;padding:3px;text-align:center}th{background-color:#C9C3C7;color: #fff;font-weight: bold;}tr:nth-child(even){background-color:#444}tr:hover{background-color:#444}
|
table{border-collapse:collapse;width:100%;height:80%;margin:0 auto;float:center;border: 1px solid #007bff; background-color:#333; color:#fff}th,td{border:1px solid #ddd;padding:3px;text-align:center}th{background-color:#C9C3C7;color: #fff;font-weight: bold;}tr:nth-child(even){background-color:#444}tr:hover{background-color:#444}
|
||||||
</style>"""
|
</style>"""
|
||||||
html_table = df.to_html(index=False, escape=False)
|
html_table = df.to_html(index=False, escape=False)
|
||||||
html = f"<html><head>{table_style}</head><body>{html_table}</body></html>"
|
html = f"<html><head>{table_style}</head><body>{html_table}</body></html>"
|
||||||
|
else:
|
||||||
|
html = df.to_html(index=False, escape=False, sparsify=False)
|
||||||
|
html = "".join(html.split())
|
||||||
|
|
||||||
view_text = f"##### {str(speak)}" + "\n" + html.replace("\n", " ")
|
view_text = f"##### {str(speak)}" + "\n" + html.replace("\n", " ")
|
||||||
return view_text
|
return view_text
|
||||||
|
@ -37,7 +37,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
|
|||||||
PROMPT_NEED_NEED_STREAM_OUT = False
|
PROMPT_NEED_NEED_STREAM_OUT = False
|
||||||
|
|
||||||
prompt = PromptTemplate(
|
prompt = PromptTemplate(
|
||||||
template_scene=ChatScene.ChatWithDbExecute.value,
|
template_scene=ChatScene.ChatWithDbExecute.value(),
|
||||||
input_variables=["input", "table_info", "dialect", "top_k", "response"],
|
input_variables=["input", "table_info", "dialect", "top_k", "response"],
|
||||||
response_format=json.dumps(RESPONSE_FORMAT_SIMPLE, indent=4),
|
response_format=json.dumps(RESPONSE_FORMAT_SIMPLE, indent=4),
|
||||||
template_define=PROMPT_SCENE_DEFINE,
|
template_define=PROMPT_SCENE_DEFINE,
|
||||||
|
@ -15,7 +15,7 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class ChatWithDbQA(BaseChat):
|
class ChatWithDbQA(BaseChat):
|
||||||
chat_scene: str = ChatScene.ChatWithDbQA.value
|
chat_scene: str = ChatScene.ChatWithDbQA.value()
|
||||||
|
|
||||||
"""Number of results to return from the query"""
|
"""Number of results to return from the query"""
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
|
|||||||
PROMPT_NEED_NEED_STREAM_OUT = True
|
PROMPT_NEED_NEED_STREAM_OUT = True
|
||||||
|
|
||||||
prompt = PromptTemplate(
|
prompt = PromptTemplate(
|
||||||
template_scene=ChatScene.ChatWithDbQA.value,
|
template_scene=ChatScene.ChatWithDbQA.value(),
|
||||||
input_variables=["input", "table_info"],
|
input_variables=["input", "table_info"],
|
||||||
response_format=None,
|
response_format=None,
|
||||||
template_define=PROMPT_SCENE_DEFINE,
|
template_define=PROMPT_SCENE_DEFINE,
|
||||||
|
@ -16,7 +16,7 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class ChatWithPlugin(BaseChat):
|
class ChatWithPlugin(BaseChat):
|
||||||
chat_scene: str = ChatScene.ChatExecution.value
|
chat_scene: str = ChatScene.ChatExecution.value()
|
||||||
plugins_prompt_generator: PluginPromptGenerator
|
plugins_prompt_generator: PluginPromptGenerator
|
||||||
select_plugin: str = None
|
select_plugin: str = None
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
|
|||||||
PROMPT_NEED_STREAM_OUT = False
|
PROMPT_NEED_STREAM_OUT = False
|
||||||
|
|
||||||
prompt = PromptTemplate(
|
prompt = PromptTemplate(
|
||||||
template_scene=ChatScene.ChatExecution.value,
|
template_scene=ChatScene.ChatExecution.value(),
|
||||||
input_variables=["input", "constraints", "commands_infos", "response"],
|
input_variables=["input", "constraints", "commands_infos", "response"],
|
||||||
response_format=json.dumps(RESPONSE_FORMAT, indent=4),
|
response_format=json.dumps(RESPONSE_FORMAT, indent=4),
|
||||||
template_define=PROMPT_SCENE_DEFINE,
|
template_define=PROMPT_SCENE_DEFINE,
|
||||||
|
@ -23,7 +23,7 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class ChatNewKnowledge(BaseChat):
|
class ChatNewKnowledge(BaseChat):
|
||||||
chat_scene: str = ChatScene.ChatNewKnowledge.value
|
chat_scene: str = ChatScene.ChatNewKnowledge.value()
|
||||||
|
|
||||||
"""Number of results to return from the query"""
|
"""Number of results to return from the query"""
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
|
|||||||
PROMPT_NEED_NEED_STREAM_OUT = True
|
PROMPT_NEED_NEED_STREAM_OUT = True
|
||||||
|
|
||||||
prompt = PromptTemplate(
|
prompt = PromptTemplate(
|
||||||
template_scene=ChatScene.ChatNewKnowledge.value,
|
template_scene=ChatScene.ChatNewKnowledge.value(),
|
||||||
input_variables=["context", "question"],
|
input_variables=["context", "question"],
|
||||||
response_format=None,
|
response_format=None,
|
||||||
template_define=PROMPT_SCENE_DEFINE,
|
template_define=PROMPT_SCENE_DEFINE,
|
||||||
|
@ -25,7 +25,7 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class ChatDefaultKnowledge(BaseChat):
|
class ChatDefaultKnowledge(BaseChat):
|
||||||
chat_scene: str = ChatScene.ChatDefaultKnowledge.value
|
chat_scene: str = ChatScene.ChatDefaultKnowledge.value()
|
||||||
|
|
||||||
"""Number of results to return from the query"""
|
"""Number of results to return from the query"""
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
|
|||||||
PROMPT_NEED_NEED_STREAM_OUT = True
|
PROMPT_NEED_NEED_STREAM_OUT = True
|
||||||
|
|
||||||
prompt = PromptTemplate(
|
prompt = PromptTemplate(
|
||||||
template_scene=ChatScene.ChatDefaultKnowledge.value,
|
template_scene=ChatScene.ChatDefaultKnowledge.value(),
|
||||||
input_variables=["context", "question"],
|
input_variables=["context", "question"],
|
||||||
response_format=None,
|
response_format=None,
|
||||||
template_define=PROMPT_SCENE_DEFINE,
|
template_define=PROMPT_SCENE_DEFINE,
|
||||||
|
@ -8,7 +8,7 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class InnerChatDBSummary(BaseChat):
|
class InnerChatDBSummary(BaseChat):
|
||||||
chat_scene: str = ChatScene.InnerChatDBSummary.value
|
chat_scene: str = ChatScene.InnerChatDBSummary.value()
|
||||||
|
|
||||||
"""Number of results to return from the query"""
|
"""Number of results to return from the query"""
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
|
|||||||
PROMPT_NEED_NEED_STREAM_OUT = False
|
PROMPT_NEED_NEED_STREAM_OUT = False
|
||||||
|
|
||||||
prompt = PromptTemplate(
|
prompt = PromptTemplate(
|
||||||
template_scene=ChatScene.InnerChatDBSummary.value,
|
template_scene=ChatScene.InnerChatDBSummary.value(),
|
||||||
input_variables=["db_profile_summary", "db_input", "response"],
|
input_variables=["db_profile_summary", "db_input", "response"],
|
||||||
response_format=json.dumps(RESPONSE_FORMAT, indent=4),
|
response_format=json.dumps(RESPONSE_FORMAT, indent=4),
|
||||||
template_define=PROMPT_SCENE_DEFINE,
|
template_define=PROMPT_SCENE_DEFINE,
|
||||||
|
@ -24,7 +24,7 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class ChatUrlKnowledge(BaseChat):
|
class ChatUrlKnowledge(BaseChat):
|
||||||
chat_scene: str = ChatScene.ChatUrlKnowledge.value
|
chat_scene: str = ChatScene.ChatUrlKnowledge.value()
|
||||||
|
|
||||||
"""Number of results to return from the query"""
|
"""Number of results to return from the query"""
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
|
|||||||
PROMPT_NEED_NEED_STREAM_OUT = True
|
PROMPT_NEED_NEED_STREAM_OUT = True
|
||||||
|
|
||||||
prompt = PromptTemplate(
|
prompt = PromptTemplate(
|
||||||
template_scene=ChatScene.ChatUrlKnowledge.value,
|
template_scene=ChatScene.ChatUrlKnowledge.value(),
|
||||||
input_variables=["context", "question"],
|
input_variables=["context", "question"],
|
||||||
response_format=None,
|
response_format=None,
|
||||||
template_define=PROMPT_SCENE_DEFINE,
|
template_define=PROMPT_SCENE_DEFINE,
|
||||||
|
@ -61,4 +61,4 @@ class ChatKnowledge(BaseChat):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def chat_type(self) -> str:
|
def chat_type(self) -> str:
|
||||||
return ChatScene.ChatKnowledge.value
|
return ChatScene.ChatKnowledge.value()
|
||||||
|
@ -39,7 +39,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
|
|||||||
PROMPT_NEED_NEED_STREAM_OUT = True
|
PROMPT_NEED_NEED_STREAM_OUT = True
|
||||||
|
|
||||||
prompt = PromptTemplate(
|
prompt = PromptTemplate(
|
||||||
template_scene=ChatScene.ChatKnowledge.value,
|
template_scene=ChatScene.ChatKnowledge.value(),
|
||||||
input_variables=["context", "question"],
|
input_variables=["context", "question"],
|
||||||
response_format=None,
|
response_format=None,
|
||||||
template_define=PROMPT_SCENE_DEFINE,
|
template_define=PROMPT_SCENE_DEFINE,
|
||||||
|
@ -14,7 +14,7 @@ CFG = Config()
|
|||||||
|
|
||||||
|
|
||||||
class ChatNormal(BaseChat):
|
class ChatNormal(BaseChat):
|
||||||
chat_scene: str = ChatScene.ChatNormal.value
|
chat_scene: str = ChatScene.ChatNormal.value()
|
||||||
|
|
||||||
"""Number of results to return from the query"""
|
"""Number of results to return from the query"""
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@ PROMPT_SEP = SeparatorStyle.SINGLE.value
|
|||||||
PROMPT_NEED_NEED_STREAM_OUT = True
|
PROMPT_NEED_NEED_STREAM_OUT = True
|
||||||
|
|
||||||
prompt = PromptTemplate(
|
prompt = PromptTemplate(
|
||||||
template_scene=ChatScene.ChatNormal.value,
|
template_scene=ChatScene.ChatNormal.value(),
|
||||||
input_variables=["input"],
|
input_variables=["input"],
|
||||||
response_format=None,
|
response_format=None,
|
||||||
template_define=PROMPT_SCENE_DEFINE,
|
template_define=PROMPT_SCENE_DEFINE,
|
||||||
|
@ -91,15 +91,19 @@ if __name__ == "__main__":
|
|||||||
parser.add_argument("--port", type=int, default=5000)
|
parser.add_argument("--port", type=int, default=5000)
|
||||||
parser.add_argument("--concurrency-count", type=int, default=10)
|
parser.add_argument("--concurrency-count", type=int, default=10)
|
||||||
parser.add_argument("--share", default=False, action="store_true")
|
parser.add_argument("--share", default=False, action="store_true")
|
||||||
|
parser.add_argument("-light", "--light", default=False,action="store_true", help="enable light mode")
|
||||||
signal.signal(signal.SIGINT, signal_handler)
|
signal.signal(signal.SIGINT, signal_handler)
|
||||||
|
|
||||||
# init server config
|
# init server config
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
server_init(args)
|
||||||
|
|
||||||
|
if not args.light:
|
||||||
|
print("Model Unified Deployment Mode!")
|
||||||
from pilot.server.llmserver import worker
|
from pilot.server.llmserver import worker
|
||||||
worker.start_check()
|
worker.start_check()
|
||||||
server_init(args)
|
|
||||||
CFG.NEW_SERVER_MODE = True
|
CFG.NEW_SERVER_MODE = True
|
||||||
import uvicorn
|
|
||||||
|
|
||||||
|
|
||||||
|
import uvicorn
|
||||||
uvicorn.run(app, host="0.0.0.0", port=args.port)
|
uvicorn.run(app, host="0.0.0.0", port=args.port)
|
||||||
|
@ -690,6 +690,9 @@ if __name__ == "__main__":
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--model_list_mode", type=str, default="once", choices=["once", "reload"]
|
"--model_list_mode", type=str, default="once", choices=["once", "reload"]
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-new", "--new", action="store_true", help="enable new http mode"
|
||||||
|
)
|
||||||
|
|
||||||
# old version server config
|
# old version server config
|
||||||
parser.add_argument("--host", type=str, default="0.0.0.0")
|
parser.add_argument("--host", type=str, default="0.0.0.0")
|
||||||
|
Loading…
Reference in New Issue
Block a user