mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-08-11 21:22:28 +00:00
feat(editor): ChatExcel
ChatExcel devlop part 5
This commit is contained in:
parent
b34338aa0c
commit
f71f770e68
@ -34,7 +34,6 @@ def response_line_chart(speak: str, df: DataFrame) -> str:
|
|||||||
plt.title("")
|
plt.title("")
|
||||||
|
|
||||||
buf = io.BytesIO()
|
buf = io.BytesIO()
|
||||||
ax.set_facecolor("lightgray")
|
|
||||||
plt.savefig(buf, format="png", dpi=100)
|
plt.savefig(buf, format="png", dpi=100)
|
||||||
buf.seek(0)
|
buf.seek(0)
|
||||||
data = base64.b64encode(buf.getvalue()).decode("ascii")
|
data = base64.b64encode(buf.getvalue()).decode("ascii")
|
||||||
@ -63,7 +62,6 @@ def response_bar_chart(speak: str, df: DataFrame) -> str:
|
|||||||
plt.title("")
|
plt.title("")
|
||||||
|
|
||||||
buf = io.BytesIO()
|
buf = io.BytesIO()
|
||||||
ax.set_facecolor("lightgray")
|
|
||||||
plt.savefig(buf, format="png", dpi=100)
|
plt.savefig(buf, format="png", dpi=100)
|
||||||
buf.seek(0)
|
buf.seek(0)
|
||||||
data = base64.b64encode(buf.getvalue()).decode("ascii")
|
data = base64.b64encode(buf.getvalue()).decode("ascii")
|
||||||
@ -95,7 +93,6 @@ def response_pie_chart(speak: str, df: DataFrame) -> str:
|
|||||||
# plt.title(columns[0])
|
# plt.title(columns[0])
|
||||||
|
|
||||||
buf = io.BytesIO()
|
buf = io.BytesIO()
|
||||||
ax.set_facecolor("lightgray")
|
|
||||||
plt.savefig(buf, format="png", dpi=100)
|
plt.savefig(buf, format="png", dpi=100)
|
||||||
buf.seek(0)
|
buf.seek(0)
|
||||||
data = base64.b64encode(buf.getvalue()).decode("ascii")
|
data = base64.b64encode(buf.getvalue()).decode("ascii")
|
||||||
|
@ -84,7 +84,9 @@ async def get_editor_sql(con_uid: str, round: int):
|
|||||||
if int(once["chat_order"]) == round:
|
if int(once["chat_order"]) == round:
|
||||||
for element in once["messages"]:
|
for element in once["messages"]:
|
||||||
if element["type"] == "ai":
|
if element["type"] == "ai":
|
||||||
return Result.succ(json.loads(element["data"]["content"]))
|
logger.info(f'history ai json resp:{element["data"]["content"]}')
|
||||||
|
context = element["data"]["content"].replace("\\n", " ").replace("\n", " ")
|
||||||
|
return Result.succ(json.loads(context))
|
||||||
return Result.faild(msg="not have sql!")
|
return Result.faild(msg="not have sql!")
|
||||||
|
|
||||||
|
|
||||||
|
@ -119,10 +119,16 @@ class BaseOutputParser(ABC):
|
|||||||
ai_response = ai_response.replace("assistant:", "")
|
ai_response = ai_response.replace("assistant:", "")
|
||||||
ai_response = ai_response.replace("Assistant:", "")
|
ai_response = ai_response.replace("Assistant:", "")
|
||||||
ai_response = ai_response.replace("ASSISTANT:", "")
|
ai_response = ai_response.replace("ASSISTANT:", "")
|
||||||
ai_response = ai_response.replace("\n", " ")
|
|
||||||
ai_response = ai_response.replace("\_", "_")
|
ai_response = ai_response.replace("\_", "_")
|
||||||
ai_response = ai_response.replace("\*", "*")
|
ai_response = ai_response.replace("\*", "*")
|
||||||
ai_response = ai_response.replace("\t", "")
|
ai_response = ai_response.replace("\t", "")
|
||||||
|
|
||||||
|
ai_response = (
|
||||||
|
ai_response.strip()
|
||||||
|
.replace("\\n", " ")
|
||||||
|
.replace("\n", " ")
|
||||||
|
.replace("\\", " ")
|
||||||
|
)
|
||||||
print("un_stream ai response:", ai_response)
|
print("un_stream ai response:", ai_response)
|
||||||
return ai_response
|
return ai_response
|
||||||
else:
|
else:
|
||||||
@ -204,8 +210,8 @@ class BaseOutputParser(ABC):
|
|||||||
cleaned_output = self.__extract_json(cleaned_output)
|
cleaned_output = self.__extract_json(cleaned_output)
|
||||||
cleaned_output = (
|
cleaned_output = (
|
||||||
cleaned_output.strip()
|
cleaned_output.strip()
|
||||||
.replace("\n", " ")
|
|
||||||
.replace("\\n", " ")
|
.replace("\\n", " ")
|
||||||
|
.replace("\n", " ")
|
||||||
.replace("\\", " ")
|
.replace("\\", " ")
|
||||||
)
|
)
|
||||||
cleaned_output = self.__illegal_json_ends(cleaned_output)
|
cleaned_output = self.__illegal_json_ends(cleaned_output)
|
||||||
|
Loading…
Reference in New Issue
Block a user