feat: Support retry for 'Chat Data' (#1419)

This commit is contained in:
Fangyin Cheng
2024-04-16 00:03:18 +08:00
committed by GitHub
parent 2e2e120ace
commit 461607e421
6 changed files with 159 additions and 59 deletions

View File

@@ -21,8 +21,11 @@ from dbgpt.model.cluster import WorkerManagerFactory
from dbgpt.serve.conversation.serve import Serve as ConversationServe
from dbgpt.util import get_or_create_event_loop
from dbgpt.util.executor_utils import ExecutorFactory, blocking_func_to_async
from dbgpt.util.retry import async_retry
from dbgpt.util.tracer import root_tracer, trace
from .exceptions import BaseAppException
logger = logging.getLogger(__name__)
CFG = Config()
@@ -321,70 +324,67 @@ class BaseChat(ABC):
"BaseChat.nostream_call", metadata=payload.to_dict()
)
logger.info(f"Request: \n{payload}")
ai_response_text = ""
payload.span_id = span.span_id
try:
with root_tracer.start_span("BaseChat.invoke_worker_manager.generate"):
model_output = await self.call_llm_operator(payload)
### output parse
ai_response_text = (
self.prompt_template.output_parser.parse_model_nostream_resp(
model_output, self.prompt_template.sep
)
ai_response_text, view_message = await self._no_streaming_call_with_retry(
payload
)
### model result deal
self.current_message.add_ai_message(ai_response_text)
prompt_define_response = (
self.prompt_template.output_parser.parse_prompt_response(
ai_response_text
)
)
metadata = {
"model_output": model_output.to_dict(),
"ai_response_text": ai_response_text,
"prompt_define_response": self._parse_prompt_define_response(
prompt_define_response
),
}
with root_tracer.start_span("BaseChat.do_action", metadata=metadata):
### run
result = await blocking_func_to_async(
self._executor, self.do_action, prompt_define_response
)
### llm speaker
speak_to_user = self.get_llm_speak(prompt_define_response)
# view_message = self.prompt_template.output_parser.parse_view_response(
# speak_to_user, result
# )
view_message = await blocking_func_to_async(
self._executor,
self.prompt_template.output_parser.parse_view_response,
speak_to_user,
result,
prompt_define_response,
)
view_message = view_message.replace("\n", "\\n")
self.current_message.add_view_message(view_message)
self.message_adjust()
span.end()
except Exception as e:
print(traceback.format_exc())
logger.error("model response parase faild" + str(e))
self.current_message.add_view_message(
f"""<span style=\"color:red\">ERROR!</span>{str(e)}\n {ai_response_text} """
)
except BaseAppException as e:
self.current_message.add_view_message(e.view)
span.end(metadata={"error": str(e)})
### store dialogue
except Exception as e:
view_message = f"<span style='color:red'>ERROR!</span> {str(e)}"
self.current_message.add_view_message(view_message)
span.end(metadata={"error": str(e)})
# Store current conversation
await blocking_func_to_async(
self._executor, self.current_message.end_current_round
)
return self.current_ai_response()
@async_retry(
retries=CFG.DBGPT_APP_SCENE_NON_STREAMING_RETRIES_BASE,
parallel_executions=CFG.DBGPT_APP_SCENE_NON_STREAMING_RETRIES_BASE,
catch_exceptions=(Exception, BaseAppException),
)
async def _no_streaming_call_with_retry(self, payload):
with root_tracer.start_span("BaseChat.invoke_worker_manager.generate"):
model_output = await self.call_llm_operator(payload)
ai_response_text = self.prompt_template.output_parser.parse_model_nostream_resp(
model_output, self.prompt_template.sep
)
prompt_define_response = (
self.prompt_template.output_parser.parse_prompt_response(ai_response_text)
)
metadata = {
"model_output": model_output.to_dict(),
"ai_response_text": ai_response_text,
"prompt_define_response": self._parse_prompt_define_response(
prompt_define_response
),
}
with root_tracer.start_span("BaseChat.do_action", metadata=metadata):
result = await blocking_func_to_async(
self._executor, self.do_action, prompt_define_response
)
speak_to_user = self.get_llm_speak(prompt_define_response)
view_message = await blocking_func_to_async(
self._executor,
self.prompt_template.output_parser.parse_view_response,
speak_to_user,
result,
prompt_define_response,
)
return ai_response_text, view_message.replace("\n", "\\n")
async def get_llm_response(self):
payload = await self._build_model_request()
logger.info(f"Request: \n{payload}")

View File

@@ -9,6 +9,8 @@ from dbgpt._private.config import Config
from dbgpt.core.interface.output_parser import BaseOutputParser
from dbgpt.util.json_utils import serialize
from ...exceptions import AppActionException
CFG = Config()
@@ -66,9 +68,10 @@ class DbChatOutputParser(BaseOutputParser):
param = {}
api_call_element = ET.Element("chart-view")
err_msg = None
success = False
try:
if not prompt_response.sql or len(prompt_response.sql) <= 0:
return f"""{speak}"""
raise AppActionException("Can not find sql in response", speak)
df = data(prompt_response.sql)
param["type"] = prompt_response.display
@@ -77,20 +80,26 @@ class DbChatOutputParser(BaseOutputParser):
df.to_json(orient="records", date_format="iso", date_unit="s")
)
view_json_str = json.dumps(param, default=serialize, ensure_ascii=False)
success = True
except Exception as e:
logger.error("parse_view_response error!" + str(e))
err_param = {}
err_param["sql"] = f"{prompt_response.sql}"
err_param["type"] = "response_table"
err_param = {
"sql": f"{prompt_response.sql}",
"type": "response_table",
"data": [],
}
# err_param["err_msg"] = str(e)
err_param["data"] = []
err_msg = str(e)
view_json_str = json.dumps(err_param, default=serialize, ensure_ascii=False)
# api_call_element.text = view_json_str
api_call_element.set("content", view_json_str)
result = ET.tostring(api_call_element, encoding="utf-8")
if err_msg:
return f"""{speak} \\n <span style=\"color:red\">ERROR!</span>{err_msg} \n {result.decode("utf-8")}"""
if not success:
view_content = (
f'{speak} \\n <span style="color:red">ERROR!</span>'
f"{err_msg} \n {result.decode('utf-8')}"
)
raise AppActionException("Generate view content failed", view_content)
else:
return speak + "\n" + result.decode("utf-8")

View File

@@ -0,0 +1,22 @@
"""Exceptions for Application."""
import logging
logger = logging.getLogger(__name__)
class BaseAppException(Exception):
"""Base Exception for App"""
def __init__(self, message: str, view: str):
"""Base Exception for App"""
super().__init__(message)
self.message = message
self.view = view
class AppActionException(BaseAppException):
"""Exception for App Action."""
def __init__(self, message: str, view: str):
"""Exception for App Action"""
super().__init__(message, view)