feat(core): Upgrade pydantic to 2.x (#1428)

This commit is contained in:
Fangyin Cheng
2024-04-20 09:41:16 +08:00
committed by GitHub
parent baa1e3f9f6
commit 57be1ece18
103 changed files with 1146 additions and 534 deletions

View File

@@ -10,6 +10,7 @@ from fastapi import APIRouter, Body, Depends, File, UploadFile
from fastapi.responses import StreamingResponse
from dbgpt._private.config import Config
from dbgpt._private.pydantic import model_to_dict, model_to_json
from dbgpt.app.knowledge.request.request import KnowledgeSpaceRequest
from dbgpt.app.knowledge.service import KnowledgeService
from dbgpt.app.openapi.api_view_model import (
@@ -147,7 +148,7 @@ def get_executor() -> Executor:
).create()
@router.get("/v1/chat/db/list", response_model=Result[DBConfig])
@router.get("/v1/chat/db/list", response_model=Result[List[DBConfig]])
async def db_connect_list():
return Result.succ(CFG.local_db_manager.get_db_list())
@@ -189,7 +190,7 @@ async def db_summary(db_name: str, db_type: str):
return Result.succ(True)
@router.get("/v1/chat/db/support/type", response_model=Result[DbTypeInfo])
@router.get("/v1/chat/db/support/type", response_model=Result[List[DbTypeInfo]])
async def db_support_types():
support_types = CFG.local_db_manager.get_all_completed_types()
db_type_infos = []
@@ -223,7 +224,7 @@ async def dialogue_scenes():
return Result.succ(scene_vos)
@router.post("/v1/chat/mode/params/list", response_model=Result[dict])
@router.post("/v1/chat/mode/params/list", response_model=Result[dict | list])
async def params_list(chat_mode: str = ChatScene.ChatNormal.value()):
if ChatScene.ChatWithDbQA.value() == chat_mode:
return Result.succ(get_db_list())
@@ -378,7 +379,9 @@ async def chat_completions(
)
else:
with root_tracer.start_span(
"get_chat_instance", span_type=SpanType.CHAT, metadata=dialogue.dict()
"get_chat_instance",
span_type=SpanType.CHAT,
metadata=model_to_dict(dialogue),
):
chat: BaseChat = await get_chat_instance(dialogue)
@@ -458,7 +461,10 @@ async def stream_generator(chat, incremental: bool, model_name: str):
chunk = ChatCompletionStreamResponse(
id=chat.chat_session_id, choices=[choice_data], model=model_name
)
yield f"data: {chunk.json(exclude_unset=True, ensure_ascii=False)}\n\n"
json_chunk = model_to_json(
chunk, exclude_unset=True, ensure_ascii=False
)
yield f"data: {json_chunk}\n\n"
else:
# TODO generate an openai-compatible streaming responses
msg = msg.replace("\n", "\\n")