mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-08-08 03:44:14 +00:00
WEB API independent
This commit is contained in:
parent
7208dd6c88
commit
fe662cec5e
@ -252,7 +252,6 @@ async def stream_generator(chat):
|
||||
for chunk in model_response.iter_lines(decode_unicode=False, delimiter=b"\0"):
|
||||
if chunk:
|
||||
msg = chat.prompt_template.output_parser.parse_model_stream_resp_ex(chunk, chat.skip_echo_len)
|
||||
chat.current_message.add_ai_message(msg)
|
||||
msg = msg.replace("\n", "\\n")
|
||||
yield f"data:{msg}\n\n"
|
||||
await asyncio.sleep(0.1)
|
||||
@ -260,11 +259,13 @@ async def stream_generator(chat):
|
||||
for chunk in model_response:
|
||||
if chunk:
|
||||
msg = chat.prompt_template.output_parser.parse_model_stream_resp_ex(chunk, chat.skip_echo_len)
|
||||
chat.current_message.add_ai_message(msg)
|
||||
|
||||
msg = msg.replace("\n", "\\n")
|
||||
yield f"data:{msg}\n\n"
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
chat.current_message.add_ai_message(msg)
|
||||
chat.current_message.add_view_message(msg)
|
||||
chat.memory.append(chat.current_message)
|
||||
|
||||
|
||||
|
@ -54,4 +54,5 @@ class ChatWithDbAutoExecute(BaseChat):
|
||||
return input_values
|
||||
|
||||
def do_action(self, prompt_response):
|
||||
print(f"do_action:{prompt_response}")
|
||||
return self.database.run(self.db_connect, prompt_response.sql)
|
||||
|
@ -4,14 +4,13 @@ from pilot.common.schema import ExampleType
|
||||
EXAMPLES = [
|
||||
{
|
||||
"messages": [
|
||||
{"type": "human", "data": {"content": "查询xxx", "example": True}},
|
||||
{"type": "human", "data": {"content": "查询用户test1所在的城市", "example": True}},
|
||||
{
|
||||
"type": "ai",
|
||||
"data": {
|
||||
"content": """{
|
||||
\"thoughts\": \"thought text\",
|
||||
\"speak\": \"thoughts summary to say to user\",
|
||||
\"command\": {\"name\": \"command name\", \"args\": {\"arg name\": \"value\"}},
|
||||
\"sql\": \"SELECT city FROM users where user_name='test1'\",
|
||||
}""",
|
||||
"example": True,
|
||||
},
|
||||
@ -20,14 +19,13 @@ EXAMPLES = [
|
||||
},
|
||||
{
|
||||
"messages": [
|
||||
{"type": "human", "data": {"content": "查询xxx", "example": True}},
|
||||
{"type": "human", "data": {"content": "查询成都的用户的订单信息", "example": True}},
|
||||
{
|
||||
"type": "ai",
|
||||
"data": {
|
||||
"content": """{
|
||||
\"thoughts\": \"thought text\",
|
||||
\"speak\": \"thoughts summary to say to user\",
|
||||
\"command\": {\"name\": \"command name\", \"args\": {\"arg name\": \"value\"}},
|
||||
\"sql\": \"SELECT b.* FROM users a LEFT JOIN tran_order b ON a.user_name=b.user_name where a.city='成都'\",
|
||||
}""",
|
||||
"example": True,
|
||||
},
|
||||
|
@ -6,8 +6,9 @@ import pandas as pd
|
||||
from pilot.utils import build_logger
|
||||
from pilot.out_parser.base import BaseOutputParser, T
|
||||
from pilot.configs.model_config import LOGDIR
|
||||
from pilot.configs.config import Config
|
||||
|
||||
|
||||
CFG = Config()
|
||||
class SqlAction(NamedTuple):
|
||||
sql: str
|
||||
thoughts: Dict
|
||||
@ -32,11 +33,16 @@ class DbChatOutputParser(BaseOutputParser):
|
||||
if len(data) <= 1:
|
||||
data.insert(0, ["result"])
|
||||
df = pd.DataFrame(data[1:], columns=data[0])
|
||||
if CFG.NEW_SERVER_MODE:
|
||||
html = df.to_html(index=False, escape=False, sparsify=False)
|
||||
html = ''.join(html.split())
|
||||
else:
|
||||
table_style = """<style>
|
||||
table{border-collapse:collapse;width:100%;height:80%;margin:0 auto;float:center;border: 1px solid #007bff; background-color:#333; color:#fff}th,td{border:1px solid #ddd;padding:3px;text-align:center}th{background-color:#C9C3C7;color: #fff;font-weight: bold;}tr:nth-child(even){background-color:#444}tr:hover{background-color:#444}
|
||||
</style>"""
|
||||
html_table = df.to_html(index=False, escape=False)
|
||||
html = f"<html><head>{table_style}</head><body>{html_table}</body></html>"
|
||||
|
||||
view_text = f"##### {str(speak)}" + "\n" + html.replace("\n", " ")
|
||||
return view_text
|
||||
|
||||
|
@ -63,6 +63,7 @@ class ChatWithPlugin(BaseChat):
|
||||
return input_values
|
||||
|
||||
def do_action(self, prompt_response):
|
||||
print(f"do_action:{prompt_response}")
|
||||
## plugin command run
|
||||
return execute_command(
|
||||
str(prompt_response.command.get("name")),
|
||||
|
@ -12,6 +12,7 @@ logger = build_logger("webserver", LOGDIR + "DbChatOutputParser.log")
|
||||
|
||||
|
||||
class NormalChatOutputParser(BaseOutputParser):
|
||||
|
||||
def parse_prompt_response(self, model_out_text) -> T:
|
||||
return model_out_text
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user