typos fix

This commit is contained in:
JohnSaxon 2023-10-17 10:33:35 +08:00
parent 4806e16d5f
commit c4ae0fd981
21 changed files with 78 additions and 78 deletions

View File

@ -111,7 +111,7 @@ def load_native_plugins(cfg: Config):
print("save file")
cfg.set_plugins(scan_plugins(cfg.debug_mode))
else:
print("get file faildresponse code", response.status_code)
print("get file failedresponse code", response.status_code)
except Exception as e:
print("load plugin from git exception!" + str(e))

View File

@ -106,13 +106,13 @@ class ClickhouseConnect(RDBMSDatabase):
return [
(table_comment[0], table_comment[1]) for table_comment in table_comments
]
def table_simple_info(self):
# group_concat() not supported in clickhouse, use arrayStringConcat+groupArray instead; and quotes need to be escaped
_sql = f"""
select concat(TABLE_NAME, \'(\' , arrayStringConcat(groupArray(column_name),\'-\'), \')\') as schema_info
from information_schema.COLUMNS where table_schema=\'{self.get_current_db_name()}\' group by TABLE_NAME; """
from information_schema.COLUMNS where table_schema=\'{self.get_current_db_name()}\' group by TABLE_NAME; """
cursor = self.session.execute(text(_sql))
results = cursor.fetchall()
return results

View File

@ -172,7 +172,7 @@ async def test_connect(db_config: DBConfig = Body()):
CFG.LOCAL_DB_MANAGE.test_connect(db_config)
return Result.succ(True)
except Exception as e:
return Result.faild(code="E1001", msg=str(e))
return Result.failed(code="E1001", msg=str(e))
@router.post("/v1/chat/db/summary", response_model=Result[bool])
@ -305,7 +305,7 @@ async def params_load(
return Result.succ(get_hist_messages(conv_uid))
except Exception as e:
logger.error("excel load error!", e)
return Result.faild(code="E000X", msg=f"File Load Error {e}")
return Result.failed(code="E000X", msg=f"File Load Error {e}")
@router.post("/v1/chat/dialogue/delete")
@ -352,7 +352,7 @@ async def get_chat_instance(dialogue: ConversationVo = Body()) -> BaseChat:
if not ChatScene.is_valid_mode(dialogue.chat_mode):
raise StopAsyncIteration(
Result.faild("Unsupported Chat Mode," + dialogue.chat_mode + "!")
Result.failed("Unsupported Chat Mode," + dialogue.chat_mode + "!")
)
chat_param = {
@ -430,7 +430,7 @@ async def model_types(controller: BaseModelController = Depends(get_model_contro
return Result.succ(list(types))
except Exception as e:
return Result.faild(code="E000X", msg=f"controller model types error {e}")
return Result.failed(code="E000X", msg=f"controller model types error {e}")
@router.get("/v1/model/supports")
@ -440,7 +440,7 @@ async def model_supports(worker_manager: WorkerManager = Depends(get_worker_mana
models = await worker_manager.supported_models()
return Result.succ(FlatSupportedModel.from_supports(models))
except Exception as e:
return Result.faild(code="E000X", msg=f"Fetch supportd models error {e}")
return Result.failed(code="E000X", msg=f"Fetch supportd models error {e}")
async def no_stream_generator(chat):

View File

@ -107,7 +107,7 @@ async def get_editor_sql(con_uid: str, round: int):
.replace("\n", " ")
)
return Result.succ(json.loads(context))
return Result.faild(msg="not have sql!")
return Result.failed(msg="not have sql!")
@router.post("/v1/editor/sql/run", response_model=Result[SqlRunData])
@ -116,7 +116,7 @@ async def editor_sql_run(run_param: dict = Body()):
db_name = run_param["db_name"]
sql = run_param["sql"]
if not db_name and not sql:
return Result.faild("SQL run param error")
return Result.failed("SQL run param error")
conn = CFG.LOCAL_DB_MANAGE.get_connect(db_name)
try:
@ -169,7 +169,7 @@ async def sql_editor_submit(sql_edit_context: ChatSqlEditContext = Body()):
)
history_mem.update(history_messages)
return Result.succ(None)
return Result.faild(msg="Edit Faild!")
return Result.failed(msg="Edit Failed!")
@router.get("/v1/editor/chart/list", response_model=Result[ChartList])
@ -191,7 +191,7 @@ async def get_editor_chart_list(con_uid: str):
charts=json.loads(element["data"]["content"]),
)
return Result.succ(chart_list)
return Result.faild(msg="Not have charts!")
return Result.failed(msg="Not have charts!")
@router.post("/v1/editor/chart/info", response_model=Result[ChartDetail])
@ -210,7 +210,7 @@ async def get_editor_chart_info(param: dict = Body()):
logger.error(
"this dashboard dialogue version too old, can't support editor!"
)
return Result.faild(
return Result.failed(
msg="this dashboard dialogue version too old, can't support editor!"
)
for element in last_round["messages"]:
@ -234,7 +234,7 @@ async def get_editor_chart_info(param: dict = Body()):
)
return Result.succ(detail)
return Result.faild(msg="Can't Find Chart Detail Info!")
return Result.failed(msg="Can't Find Chart Detail Info!")
@router.post("/v1/editor/chart/run", response_model=Result[ChartRunData])
@ -244,7 +244,7 @@ async def editor_chart_run(run_param: dict = Body()):
sql = run_param["sql"]
chart_type = run_param["chart_type"]
if not db_name and not sql:
return Result.faild("SQL run param error")
return Result.failed("SQL run param error")
try:
dashboard_data_loader: DashboardDataLoader = DashboardDataLoader()
db_conn = CFG.LOCAL_DB_MANAGE.get_connect(db_name)
@ -334,7 +334,7 @@ async def chart_editor_submit(chart_edit_context: ChatChartEditContext = Body())
)
except Exception as e:
logger.error(f"edit chart exception!{str(e)}", e)
return Result.faild(msg=f"Edit chart exception!{str(e)}")
return Result.failed(msg=f"Edit chart exception!{str(e)}")
history_mem.update(history_messages)
return Result.succ(None)
return Result.faild(msg="Edit Faild!")
return Result.failed(msg="Edit Failed!")

View File

@ -17,11 +17,11 @@ class Result(Generic[T], BaseModel):
return Result(success=True, err_code=None, err_msg=None, data=data)
@classmethod
def faild(cls, msg):
def failed(cls, msg):
return Result(success=False, err_code="E000X", err_msg=msg, data=None)
@classmethod
def faild(cls, code, msg):
def failed(cls, code, msg):
return Result(success=False, err_code=code, err_msg=msg, data=None)

View File

@ -7,4 +7,4 @@ async def validation_exception_handler(request: Request, exc: RequestValidationE
message = ""
for error in exc.errors():
message += ".".join(error.get("loc")) + ":" + error.get("msg") + ";"
return Result.faild(code="E0001", msg=message)
return Result.failed(code="E0001", msg=message)

View File

@ -223,7 +223,7 @@ class BaseChat(ABC):
span.end()
except Exception as e:
print(traceback.format_exc())
logger.error("model response parase faild" + str(e))
logger.error("model response parase failed" + str(e))
self.current_message.add_view_message(
f"""<span style=\"color:red\">ERROR!</span>{str(e)}\n {ai_response_text} """
)

View File

@ -52,8 +52,8 @@ class DashboardDataLoader:
values.append(value_item)
return field_names, values
except Exception as e:
logger.debug("Prepare Chart Data Faild!" + str(e))
raise ValueError("Prepare Chart Data Faild!")
logger.debug("Prepare Chart Data Failed!" + str(e))
raise ValueError("Prepare Chart Data Failed!")
def get_chart_values_by_db(self, db_name: str, chart_sql: str):
logger.info(f"get_chart_values_by_db:{db_name},{chart_sql}")

View File

@ -42,7 +42,7 @@ RESPONSE_FORMAT = [
PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_NEED_STREAM_OUT = False
PROMPT_NEED_STREAM_OUT = False
prompt = PromptTemplate(
template_scene=ChatScene.ChatDashboard.value(),
@ -50,9 +50,9 @@ prompt = PromptTemplate(
response_format=json.dumps(RESPONSE_FORMAT, indent=4),
template_define=PROMPT_SCENE_DEFINE,
template=_DEFAULT_TEMPLATE,
stream_out=PROMPT_NEED_NEED_STREAM_OUT,
stream_out=PROMPT_NEED_STREAM_OUT,
output_parser=ChatDashboardOutputParser(
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_NEED_STREAM_OUT
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_STREAM_OUT
),
)
CFG.prompt_template_registry.register(prompt, is_default=True)

View File

@ -51,7 +51,7 @@ _PROMPT_SCENE_DEFINE = (
PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_NEED_STREAM_OUT = True
PROMPT_NEED_STREAM_OUT = True
# Temperature is a configuration hyperparameter that controls the randomness of language model output.
# A high temperature produces more unpredictable and creative results, while a low temperature produces more common and conservative output.
@ -63,9 +63,9 @@ prompt = PromptTemplate(
input_variables=["user_input", "table_name", "disply_type"],
template_define=_PROMPT_SCENE_DEFINE,
template=_DEFAULT_TEMPLATE,
stream_out=PROMPT_NEED_NEED_STREAM_OUT,
stream_out=PROMPT_NEED_STREAM_OUT,
output_parser=ChatExcelOutputParser(
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_NEED_STREAM_OUT
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_STREAM_OUT
),
need_historical_messages=True,
# example_selector=sql_data_example,

View File

@ -67,7 +67,7 @@ PROMPT_SCENE_DEFINE = (
PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_NEED_STREAM_OUT = False
PROMPT_NEED_STREAM_OUT = False
# Temperature is a configuration hyperparameter that controls the randomness of language model output.
# A high temperature produces more unpredictable and creative results, while a low temperature produces more common and conservative output.
@ -80,9 +80,9 @@ prompt = PromptTemplate(
response_format=json.dumps(RESPONSE_FORMAT_SIMPLE, ensure_ascii=False, indent=4),
template_define=PROMPT_SCENE_DEFINE,
template=_DEFAULT_TEMPLATE,
stream_out=PROMPT_NEED_NEED_STREAM_OUT,
stream_out=PROMPT_NEED_STREAM_OUT,
output_parser=LearningExcelOutputParser(
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_NEED_STREAM_OUT
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_STREAM_OUT
),
# example_selector=sql_data_example,
temperature=PROMPT_TEMPERATURE,

View File

@ -33,7 +33,7 @@ RESPONSE_FORMAT_SIMPLE = {
PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_NEED_STREAM_OUT = False
PROMPT_NEED_STREAM_OUT = False
# Temperature is a configuration hyperparameter that controls the randomness of language model output.
# A high temperature produces more unpredictable and creative results, while a low temperature produces more common and conservative output.
@ -46,9 +46,9 @@ prompt = PromptTemplate(
response_format=json.dumps(RESPONSE_FORMAT_SIMPLE, ensure_ascii=False, indent=4),
template_define=PROMPT_SCENE_DEFINE,
template=_DEFAULT_TEMPLATE,
stream_out=PROMPT_NEED_NEED_STREAM_OUT,
stream_out=PROMPT_NEED_STREAM_OUT,
output_parser=DbChatOutputParser(
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_NEED_STREAM_OUT
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_STREAM_OUT
),
# example_selector=sql_data_example,
temperature=PROMPT_TEMPERATURE,

View File

@ -36,7 +36,7 @@ RESPONSE_FORMAT_SIMPLE = {
PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_NEED_STREAM_OUT = False
PROMPT_NEED_STREAM_OUT = False
# Temperature is a configuration hyperparameter that controls the randomness of language model output.
# A high temperature produces more unpredictable and creative results, while a low temperature produces more common and conservative output.
@ -50,9 +50,9 @@ prompt = PromptTemplate(
template_is_strict=False,
template_define=PROMPT_SCENE_DEFINE,
template=_DEFAULT_TEMPLATE,
stream_out=PROMPT_NEED_NEED_STREAM_OUT,
stream_out=PROMPT_NEED_STREAM_OUT,
output_parser=DbChatOutputParser(
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_NEED_STREAM_OUT
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_STREAM_OUT
),
# example_selector=sql_data_example,
temperature=PROMPT_TEMPERATURE,

View File

@ -54,7 +54,7 @@ _DEFAULT_TEMPLATE = (
PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_NEED_STREAM_OUT = True
PROMPT_NEED_STREAM_OUT = True
prompt = PromptTemplate(
template_scene=ChatScene.ChatWithDbQA.value(),
@ -62,9 +62,9 @@ prompt = PromptTemplate(
response_format=None,
template_define=PROMPT_SCENE_DEFINE,
template=_DEFAULT_TEMPLATE,
stream_out=PROMPT_NEED_NEED_STREAM_OUT,
stream_out=PROMPT_NEED_STREAM_OUT,
output_parser=NormalChatOutputParser(
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_NEED_STREAM_OUT
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_STREAM_OUT
),
)

View File

@ -33,7 +33,7 @@ RESPONSE_FORMAT = {"table": ["orders", "products"]}
PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_NEED_STREAM_OUT = False
PROMPT_NEED_STREAM_OUT = False
prompt = PromptTemplate(
template_scene=ChatScene.InnerChatDBSummary.value(),
@ -41,9 +41,9 @@ prompt = PromptTemplate(
response_format=json.dumps(RESPONSE_FORMAT, indent=4),
template_define=PROMPT_SCENE_DEFINE,
template=_DEFAULT_TEMPLATE + PROMPT_RESPONSE,
stream_out=PROMPT_NEED_NEED_STREAM_OUT,
stream_out=PROMPT_NEED_STREAM_OUT,
output_parser=NormalChatOutputParser(
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_NEED_STREAM_OUT
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_STREAM_OUT
),
)

View File

@ -33,7 +33,7 @@ _DEFAULT_TEMPLATE = (
PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_NEED_STREAM_OUT = True
PROMPT_NEED_STREAM_OUT = True
prompt = PromptTemplate(
template_scene=ChatScene.ChatKnowledge.value(),
@ -41,9 +41,9 @@ prompt = PromptTemplate(
response_format=None,
template_define=PROMPT_SCENE_DEFINE,
template=_DEFAULT_TEMPLATE,
stream_out=PROMPT_NEED_NEED_STREAM_OUT,
stream_out=PROMPT_NEED_STREAM_OUT,
output_parser=NormalChatOutputParser(
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_NEED_STREAM_OUT
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_STREAM_OUT
),
)

View File

@ -33,7 +33,7 @@ _DEFAULT_TEMPLATE = (
PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_NEED_STREAM_OUT = True
PROMPT_NEED_STREAM_OUT = True
prompt = PromptTemplate(
template_scene=ChatScene.ChatKnowledge.value(),
@ -41,9 +41,9 @@ prompt = PromptTemplate(
response_format=None,
template_define=None,
template=_DEFAULT_TEMPLATE,
stream_out=PROMPT_NEED_NEED_STREAM_OUT,
stream_out=PROMPT_NEED_STREAM_OUT,
output_parser=NormalChatOutputParser(
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_NEED_STREAM_OUT
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_STREAM_OUT
),
)

View File

@ -11,7 +11,7 @@ CFG = Config()
PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_NEED_STREAM_OUT = True
PROMPT_NEED_STREAM_OUT = True
prompt = PromptTemplate(
template_scene=ChatScene.ChatNormal.value(),
@ -19,9 +19,9 @@ prompt = PromptTemplate(
response_format=None,
template_define=PROMPT_SCENE_DEFINE,
template=None,
stream_out=PROMPT_NEED_NEED_STREAM_OUT,
stream_out=PROMPT_NEED_STREAM_OUT,
output_parser=NormalChatOutputParser(
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_NEED_STREAM_OUT
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_STREAM_OUT
),
)

View File

@ -45,7 +45,7 @@ def space_add(request: KnowledgeSpaceRequest):
knowledge_space_service.create_knowledge_space(request)
return Result.succ([])
except Exception as e:
return Result.faild(code="E000X", msg=f"space add error {e}")
return Result.failed(code="E000X", msg=f"space add error {e}")
@router.post("/knowledge/space/list")
@ -54,7 +54,7 @@ def space_list(request: KnowledgeSpaceRequest):
try:
return Result.succ(knowledge_space_service.get_knowledge_space(request))
except Exception as e:
return Result.faild(code="E000X", msg=f"space list error {e}")
return Result.failed(code="E000X", msg=f"space list error {e}")
@router.post("/knowledge/space/delete")
@ -63,7 +63,7 @@ def space_delete(request: KnowledgeSpaceRequest):
try:
return Result.succ(knowledge_space_service.delete_space(request.name))
except Exception as e:
return Result.faild(code="E000X", msg=f"space list error {e}")
return Result.failed(code="E000X", msg=f"space list error {e}")
@router.post("/knowledge/{space_name}/arguments")
@ -72,7 +72,7 @@ def arguments(space_name: str):
try:
return Result.succ(knowledge_space_service.arguments(space_name))
except Exception as e:
return Result.faild(code="E000X", msg=f"space list error {e}")
return Result.failed(code="E000X", msg=f"space list error {e}")
@router.post("/knowledge/{space_name}/argument/save")
@ -83,7 +83,7 @@ def arguments_save(space_name: str, argument_request: SpaceArgumentRequest):
knowledge_space_service.argument_save(space_name, argument_request)
)
except Exception as e:
return Result.faild(code="E000X", msg=f"space list error {e}")
return Result.failed(code="E000X", msg=f"space list error {e}")
@router.post("/knowledge/{space_name}/document/add")
@ -97,7 +97,7 @@ def document_add(space_name: str, request: KnowledgeDocumentRequest):
)
# return Result.succ([])
except Exception as e:
return Result.faild(code="E000X", msg=f"document add error {e}")
return Result.failed(code="E000X", msg=f"document add error {e}")
@router.post("/knowledge/{space_name}/document/list")
@ -108,7 +108,7 @@ def document_list(space_name: str, query_request: DocumentQueryRequest):
knowledge_space_service.get_knowledge_documents(space_name, query_request)
)
except Exception as e:
return Result.faild(code="E000X", msg=f"document list error {e}")
return Result.failed(code="E000X", msg=f"document list error {e}")
@router.post("/knowledge/{space_name}/document/delete")
@ -119,7 +119,7 @@ def document_delete(space_name: str, query_request: DocumentQueryRequest):
knowledge_space_service.delete_document(space_name, query_request.doc_name)
)
except Exception as e:
return Result.faild(code="E000X", msg=f"document list error {e}")
return Result.failed(code="E000X", msg=f"document list error {e}")
@router.post("/knowledge/{space_name}/document/upload")
@ -156,9 +156,9 @@ async def document_upload(
)
)
# return Result.succ([])
return Result.faild(code="E000X", msg=f"doc_file is None")
return Result.failed(code="E000X", msg=f"doc_file is None")
except Exception as e:
return Result.faild(code="E000X", msg=f"document add error {e}")
return Result.failed(code="E000X", msg=f"document add error {e}")
@router.post("/knowledge/{space_name}/document/sync")
@ -170,7 +170,7 @@ def document_sync(space_name: str, request: DocumentSyncRequest):
)
return Result.succ([])
except Exception as e:
return Result.faild(code="E000X", msg=f"document sync error {e}")
return Result.failed(code="E000X", msg=f"document sync error {e}")
@router.post("/knowledge/{space_name}/chunk/list")
@ -179,7 +179,7 @@ def document_list(space_name: str, query_request: ChunkQueryRequest):
try:
return Result.succ(knowledge_space_service.get_document_chunks(query_request))
except Exception as e:
return Result.faild(code="E000X", msg=f"document chunk list error {e}")
return Result.failed(code="E000X", msg=f"document chunk list error {e}")
@router.post("/knowledge/{vector_name}/query")

View File

@ -33,9 +33,9 @@ async def model_params():
params.append(model_dict)
return Result.succ(params)
if not worker_instance:
return Result.faild(code="E000X", msg=f"can not find worker manager")
return Result.failed(code="E000X", msg=f"can not find worker manager")
except Exception as e:
return Result.faild(code="E000X", msg=f"model stop failed {e}")
return Result.failed(code="E000X", msg=f"model stop failed {e}")
@router.get("/v1/worker/model/list")
@ -78,7 +78,7 @@ async def model_list():
return Result.succ(responses)
except Exception as e:
return Result.faild(code="E000X", msg=f"model list error {e}")
return Result.failed(code="E000X", msg=f"model list error {e}")
@router.post("/v1/worker/model/stop")
@ -91,11 +91,11 @@ async def model_stop(request: WorkerStartupRequest):
ComponentType.WORKER_MANAGER_FACTORY, WorkerManagerFactory
).create()
if not worker_manager:
return Result.faild(code="E000X", msg=f"can not find worker manager")
return Result.failed(code="E000X", msg=f"can not find worker manager")
request.params = {}
return Result.succ(await worker_manager.model_shutdown(request))
except Exception as e:
return Result.faild(code="E000X", msg=f"model stop failed {e}")
return Result.failed(code="E000X", msg=f"model stop failed {e}")
@router.post("/v1/worker/model/start")
@ -106,7 +106,7 @@ async def model_start(request: WorkerStartupRequest):
ComponentType.WORKER_MANAGER_FACTORY, WorkerManagerFactory
).create()
if not worker_manager:
return Result.faild(code="E000X", msg=f"can not find worker manager")
return Result.failed(code="E000X", msg=f"can not find worker manager")
return Result.succ(await worker_manager.model_startup(request))
except Exception as e:
return Result.faild(code="E000X", msg=f"model start failed {e}")
return Result.failed(code="E000X", msg=f"model start failed {e}")

View File

@ -11,12 +11,12 @@ prompt_manage_service = PromptManageService()
@router.post("/prompt/add")
def prompt_add(request: PromptManageRequest):
print(f"/space/add params: {request}")
print(f"/prompt/add params: {request}")
try:
prompt_manage_service.create_prompt(request)
return Result.succ([])
except Exception as e:
return Result.faild(code="E010X", msg=f"prompt add error {e}")
return Result.failed(code="E010X", msg=f"prompt add error {e}")
@router.post("/prompt/list")
@ -25,7 +25,7 @@ def prompt_list(request: PromptManageRequest):
try:
return Result.succ(prompt_manage_service.get_prompts(request))
except Exception as e:
return Result.faild(code="E010X", msg=f"prompt list error {e}")
return Result.failed(code="E010X", msg=f"prompt list error {e}")
@router.post("/prompt/update")
@ -34,7 +34,7 @@ def prompt_update(request: PromptManageRequest):
try:
return Result.succ(prompt_manage_service.update_prompt(request))
except Exception as e:
return Result.faild(code="E010X", msg=f"prompt update error {e}")
return Result.failed(code="E010X", msg=f"prompt update error {e}")
@router.post("/prompt/delete")
@ -43,4 +43,4 @@ def prompt_delete(request: PromptManageRequest):
try:
return Result.succ(prompt_manage_service.delete_prompt(request.prompt_name))
except Exception as e:
return Result.faild(code="E010X", msg=f"prompt delete error {e}")
return Result.failed(code="E010X", msg=f"prompt delete error {e}")