feat(db editor): db editor bug fix

This commit is contained in:
yhjun1026
2024-08-12 16:17:07 +08:00
parent a3924d5260
commit 4735952536
8 changed files with 127 additions and 114 deletions

View File

@@ -1,7 +1,7 @@
import json
import logging
import time
from typing import List
from typing import Dict, List
from fastapi import APIRouter, Body, Depends
@@ -44,7 +44,7 @@ def get_edit_service() -> EditorService:
return EditorService.get_instance(CFG.SYSTEM_APP)
@router.get("/v1/editor/db/tables", response_model=Result[DbTable])
@router.get("/v1/editor/db/tables", response_model=Result[DataNode])
async def get_editor_tables(
db_name: str, page_index: int, page_size: int, search_str: str = ""
):
@@ -71,15 +71,20 @@ async def get_editor_tables(
return Result.succ(db_node)
@router.get("/v1/editor/sql/rounds", response_model=Result[ChatDbRounds])
@router.get("/v1/editor/sql/rounds", response_model=Result[List[ChatDbRounds]])
async def get_editor_sql_rounds(
con_uid: str, editor_service: EditorService = Depends(get_edit_service)
):
logger.info("get_editor_sql_rounds:{con_uid}")
return Result.succ(editor_service.get_editor_sql_rounds(con_uid))
try:
chat_rounds = editor_service.get_editor_sql_rounds(con_uid)
return Result.succ(data=chat_rounds)
except Exception as e:
logger.exception("Get editor sql rounds failed!")
return Result.failed(msg=str(e))
@router.get("/v1/editor/sql", response_model=Result[dict])
@router.get("/v1/editor/sql", response_model=Result[List[Dict]])
async def get_editor_sql(
con_uid: str, round: int, editor_service: EditorService = Depends(get_edit_service)
):
@@ -183,7 +188,7 @@ async def editor_chart_run(run_param: dict = Body()):
result_info="",
run_cost=(end_time - start_time) / 1000,
colunms=colunms,
values=sql_result,
values=[list(row) for row in sql_result],
)
return Result.succ(
ChartRunData(
@@ -191,6 +196,7 @@ async def editor_chart_run(run_param: dict = Body()):
)
)
except Exception as e:
logger.exception("Chart sql run failed!")
sql_result = SqlRunData(result_info=str(e), run_cost=0, colunms=[], values=[])
return Result.succ(
ChartRunData(sql_data=sql_result, chart_values=[], chart_type=chart_type)

View File

@@ -70,7 +70,7 @@ class EditorService(BaseComponent):
def get_editor_sql_by_round(
self, conv_uid: str, round_index: int
) -> Optional[Dict]:
) -> Optional[List[Dict]]:
storage_conv: StorageConversation = self.get_storage_conv(conv_uid)
messages_by_round = _split_messages_by_round(storage_conv.messages)
for one_round_message in messages_by_round:

View File

@@ -17,9 +17,9 @@ class DataNode(BaseModel):
class SqlRunData(BaseModel):
result_info: Optional[str]
run_cost: Optional[str]
colunms: Optional[List[str]]
values: Optional[List]
run_cost: float
colunms: Optional[List[str]] = []
values: Optional[List] = []
class ChartRunData(BaseModel):

View File

@@ -5,13 +5,12 @@ from enum import Enum
from typing import Any, Dict, Generic, List, Literal, Optional, TypeVar
from dbgpt._private.pydantic import BaseModel, ConfigDict, Field, model_to_dict
from dbgpt.core.awel import CommonLLMHttpResponseBody
T = TypeVar("T")
class Result(BaseModel, Generic[T]):
success: Optional[bool]
success: bool
err_code: Optional[str] = None
err_msg: Optional[str] = None
data: Optional[T] = None
@@ -42,19 +41,20 @@ class ChatSceneVo(BaseModel):
class ConversationVo(BaseModel):
model_config = ConfigDict(protected_namespaces=())
"""
dialogue_uid
"""
conv_uid: Optional[str] = ""
conv_uid: str = ""
"""
user input
"""
user_input: Optional[str] = ""
user_input: str = ""
"""
user
"""
user_name: Optional[str] = None
user_name: Optional[str] = Field(None, description="user name")
"""
the scene of chat
"""
@@ -68,46 +68,49 @@ class ConversationVo(BaseModel):
"""
chat scene select param
"""
select_param: Optional[Any] = None
select_param: Optional[str] = Field(None, description="chat scene select param")
"""
llm model name
"""
model_name: Optional[str] = None
model_name: Optional[str] = Field(None, description="llm model name")
"""Used to control whether the content is returned incrementally or in full each time.
If this parameter is not provided, the default is full return.
"""
incremental: Optional[bool] = False
incremental: bool = False
sys_code: Optional[str] = None
sys_code: Optional[str] = Field(None, description="System code")
ext_info: Optional[dict] = {}
class MessageVo(BaseModel):
model_config = ConfigDict(protected_namespaces=())
"""
role that sends out the current message
"""
role: Optional[str]
role: str
"""
current message
"""
context: Optional[str]
context: str
""" message postion order """
order: Optional[int]
order: int
"""
time the current message was sent
"""
time_stamp: Optional[Any] = None
time_stamp: Optional[Any] = Field(
None, description="time the current message was sent"
)
"""
model_name
"""
model_name: Optional[str]
model_name: str
class DeltaMessage(BaseModel):

View File

@@ -20,100 +20,104 @@ class DashboardDataLoader:
def get_chart_values_by_data(self, field_names, datas, chart_sql: str):
logger.info(f"get_chart_values_by_conn:{chart_sql}")
# try:
values: List[ValueItem] = []
data_map = {}
data_map.update(
{
f"{field_name}": [row[index] for row in datas]
for index, field_name in enumerate(field_names)
}
)
# to Check Whether there are data in it
if len(datas) != 0:
# try to find string index
string_index = next(
(
index
for index, value in enumerate(datas[0])
if isinstance(value, str)
),
-1,
try:
values: List[ValueItem] = []
data_map = {}
data_map.update(
{
f"{field_name}": [row[index] for row in datas]
for index, field_name in enumerate(field_names)
}
)
# try to find datetime index
datetime_index = next(
(
index
for index, value in enumerate(datas[0])
if isinstance(value, (datetime.date, datetime.datetime))
),
-1,
)
# on the other aspect the primary key including "id"
id_index = next(
(index for index, value in enumerate(field_names) if "id" in value), -1
)
# while there are no datetime and there are no string
if string_index == -1 and datetime_index == -1 and id_index == -1:
# ignore Null Value in the data
result = [
sum(values for values in data if values is not None)
for data in zip(*datas)
]
for index, field_name in enumerate(field_names):
value_item = ValueItem(
name=field_name,
type=f"{field_name}_amount",
value=str(result[index]),
)
values.append(value_item)
# there are string index (or/and) datetime; first choose string->datetime->id
else:
# triple judge index
primary_index = (
string_index
if string_index != -1
else (datetime_index if datetime_index != -1 else id_index)
# to Check Whether there are data in it
if len(datas) != 0:
# try to find string index
string_index = next(
(
index
for index, value in enumerate(datas[0])
if isinstance(value, str)
),
-1,
)
temp_field_name = field_names[:primary_index]
temp_field_name.extend(field_names[primary_index + 1 :])
for field_name in temp_field_name:
for data in datas:
# None Data won't be ok for the chart
if not any(item is None for item in data):
value_item = ValueItem(
name=str(data[primary_index]),
type=field_name,
value=str(data[field_names.index(field_name)])
if not isinstance(
type(data[field_names.index(field_name)]),
(datetime.datetime, datetime.date),
)
else str(
data[field_names.index(field_name)].strftime(
"%Y%m%d"
)
),
)
values.append(value_item)
# handle None Data as "0" for number and "19700101" for datetime
else:
value_item = ValueItem(
name=data[string_index],
type=field_name,
value="0"
if not isinstance(
type(data[field_names.index(field_name)]),
(datetime.datetime, datetime.date),
# try to find datetime index
datetime_index = next(
(
index
for index, value in enumerate(datas[0])
if isinstance(value, (datetime.date, datetime.datetime))
),
-1,
)
# on the other aspect the primary key including "id"
id_index = next(
(index for index, value in enumerate(field_names) if "id" in value),
-1,
)
# while there are no datetime and there are no string
if string_index == -1 and datetime_index == -1 and id_index == -1:
# ignore Null Value in the data
result = [
sum(values for values in data if values is not None)
for data in zip(*datas)
]
for index, field_name in enumerate(field_names):
value_item = ValueItem(
name=field_name,
type=f"{field_name}_amount",
value=str(result[index]),
)
values.append(value_item)
# there are string index (or/and) datetime; first choose string->datetime->id
else:
# triple judge index
primary_index = (
string_index
if string_index != -1
else (datetime_index if datetime_index != -1 else id_index)
)
temp_field_name = field_names[:primary_index]
temp_field_name.extend(field_names[primary_index + 1 :])
for field_name in temp_field_name:
for data in datas:
# None Data won't be ok for the chart
if not any(item is None for item in data):
value_item = ValueItem(
name=str(data[primary_index]),
type=field_name,
value=str(data[field_names.index(field_name)])
if not isinstance(
type(data[field_names.index(field_name)]),
(datetime.datetime, datetime.date),
)
else str(
data[field_names.index(field_name)].strftime(
"%Y%m%d"
)
),
)
else "19700101",
)
values.append(value_item)
values.append(value_item)
# handle None Data as "0" for number and "19700101" for datetime
else:
value_item = ValueItem(
name=data[string_index],
type=field_name,
value="0"
if not isinstance(
type(data[field_names.index(field_name)]),
(datetime.datetime, datetime.date),
)
else "19700101",
)
values.append(value_item)
return field_names, values
except Exception as e:
logger.exception(f"get_chart_values_by_conn failed:{str(e)}")
raise e
def get_chart_values_by_db(self, db_name: str, chart_sql: str):
logger.info(f"get_chart_values_by_db:{db_name},{chart_sql}")

View File

@@ -1051,7 +1051,7 @@ class GptsAppDao(BaseDao):
"""
chat_normal_ctx = NativeTeamContext(
chat_scene="chat_normal",
scene_name="LLM Dialogue",
scene_name="Chat Normal",
scene_describe="Native LLM dialogue",
param_title="",
show_disable=False,

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.8 KiB

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 21 KiB

After

Width:  |  Height:  |  Size: 4.8 KiB