mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-09-14 21:51:25 +00:00
feat(core): Upgrade pydantic to 2.x (#1428)
This commit is contained in:
@@ -7,7 +7,7 @@ from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
from sqlalchemy import Column, DateTime, Integer, String, Text, UniqueConstraint
|
||||
|
||||
from dbgpt._private.pydantic import BaseModel
|
||||
from dbgpt._private.pydantic import BaseModel, ConfigDict, Field, model_to_json
|
||||
from dbgpt.agent.plan.awel.team_awel_layout import AWELTeamContext
|
||||
from dbgpt.agent.resource.resource_api import AgentResource
|
||||
from dbgpt.serve.agent.team.base import TeamMode
|
||||
@@ -17,6 +17,8 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GptsAppDetail(BaseModel):
|
||||
model_config = ConfigDict(arbitrary_types_allowed=True)
|
||||
|
||||
app_code: Optional[str] = None
|
||||
app_name: Optional[str] = None
|
||||
agent_name: Optional[str] = None
|
||||
@@ -28,11 +30,6 @@ class GptsAppDetail(BaseModel):
|
||||
created_at: datetime = datetime.now()
|
||||
updated_at: datetime = datetime.now()
|
||||
|
||||
class Config:
|
||||
"""Configuration for this pydantic object."""
|
||||
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
def to_dict(self):
|
||||
return {k: self._serialize(v) for k, v in self.__dict__.items()}
|
||||
|
||||
@@ -86,6 +83,8 @@ class GptsAppDetail(BaseModel):
|
||||
|
||||
|
||||
class GptsApp(BaseModel):
|
||||
model_config = ConfigDict(arbitrary_types_allowed=True)
|
||||
|
||||
app_code: Optional[str] = None
|
||||
app_name: Optional[str] = None
|
||||
app_describe: Optional[str] = None
|
||||
@@ -100,11 +99,6 @@ class GptsApp(BaseModel):
|
||||
updated_at: datetime = datetime.now()
|
||||
details: List[GptsAppDetail] = []
|
||||
|
||||
class Config:
|
||||
"""Configuration for this pydantic object."""
|
||||
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
def to_dict(self):
|
||||
return {k: self._serialize(v) for k, v in self.__dict__.items()}
|
||||
|
||||
@@ -146,7 +140,9 @@ class GptsAppResponse(BaseModel):
|
||||
total_count: Optional[int] = 0
|
||||
total_page: Optional[int] = 0
|
||||
current_page: Optional[int] = 0
|
||||
app_list: Optional[List[GptsApp]] = []
|
||||
app_list: Optional[List[GptsApp]] = Field(
|
||||
default_factory=list, description="app list"
|
||||
)
|
||||
|
||||
|
||||
class GptsAppCollection(BaseModel):
|
||||
@@ -207,7 +203,8 @@ class GptsAppEntity(Model):
|
||||
team_context = Column(
|
||||
Text,
|
||||
nullable=True,
|
||||
comment="The execution logic and team member content that teams with different working modes rely on",
|
||||
comment="The execution logic and team member content that teams with different "
|
||||
"working modes rely on",
|
||||
)
|
||||
|
||||
user_code = Column(String(255), nullable=True, comment="user code")
|
||||
@@ -565,7 +562,7 @@ def _parse_team_context(team_context: Optional[Union[str, AWELTeamContext]] = No
|
||||
parse team_context to str
|
||||
"""
|
||||
if isinstance(team_context, AWELTeamContext):
|
||||
return team_context.json()
|
||||
return model_to_json(team_context)
|
||||
return team_context
|
||||
|
||||
|
||||
|
@@ -1,9 +1,12 @@
|
||||
from datetime import datetime
|
||||
from typing import List
|
||||
|
||||
from sqlalchemy import Column, DateTime, Integer, String, UniqueConstraint, func
|
||||
|
||||
from dbgpt.storage.metadata import BaseDao, Model
|
||||
|
||||
from ..model import MyPluginVO
|
||||
|
||||
|
||||
class MyPluginEntity(Model):
|
||||
__tablename__ = "my_plugin"
|
||||
@@ -27,6 +30,28 @@ class MyPluginEntity(Model):
|
||||
)
|
||||
UniqueConstraint("user_code", "name", name="uk_name")
|
||||
|
||||
@classmethod
|
||||
def to_vo(cls, entities: List["MyPluginEntity"]) -> List[MyPluginVO]:
|
||||
results = []
|
||||
for entity in entities:
|
||||
results.append(
|
||||
MyPluginVO(
|
||||
id=entity.id,
|
||||
tenant=entity.tenant,
|
||||
user_code=entity.user_code,
|
||||
user_name=entity.user_name,
|
||||
sys_code=entity.sys_code,
|
||||
name=entity.name,
|
||||
file_name=entity.file_name,
|
||||
type=entity.type,
|
||||
version=entity.version,
|
||||
use_count=entity.use_count,
|
||||
succ_count=entity.succ_count,
|
||||
gmt_created=entity.gmt_created.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
)
|
||||
)
|
||||
return results
|
||||
|
||||
|
||||
class MyPluginDao(BaseDao):
|
||||
def add(self, engity: MyPluginEntity):
|
||||
|
@@ -1,4 +1,5 @@
|
||||
from datetime import datetime
|
||||
from typing import List
|
||||
|
||||
import pytz
|
||||
from sqlalchemy import (
|
||||
@@ -14,6 +15,8 @@ from sqlalchemy import (
|
||||
|
||||
from dbgpt.storage.metadata import BaseDao, Model
|
||||
|
||||
from ..model import PluginHubVO
|
||||
|
||||
# TODO We should consider that the production environment does not have permission to execute the DDL
|
||||
char_set_sql = DDL("ALTER TABLE plugin_hub CONVERT TO CHARACTER SET utf8mb4")
|
||||
|
||||
@@ -40,6 +43,27 @@ class PluginHubEntity(Model):
|
||||
UniqueConstraint("name", name="uk_name")
|
||||
Index("idx_q_type", "type")
|
||||
|
||||
@classmethod
|
||||
def to_vo(cls, entities: List["PluginHubEntity"]) -> List[PluginHubVO]:
|
||||
results = []
|
||||
for entity in entities:
|
||||
vo = PluginHubVO(
|
||||
id=entity.id,
|
||||
name=entity.name,
|
||||
description=entity.description,
|
||||
author=entity.author,
|
||||
email=entity.email,
|
||||
type=entity.type,
|
||||
version=entity.version,
|
||||
storage_channel=entity.storage_channel,
|
||||
storage_url=entity.storage_url,
|
||||
download_param=entity.download_param,
|
||||
installed=entity.installed,
|
||||
gmt_created=entity.gmt_created.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
)
|
||||
results.append(vo)
|
||||
return results
|
||||
|
||||
|
||||
class PluginHubDao(BaseDao):
|
||||
def add(self, engity: PluginHubEntity):
|
||||
|
Reference in New Issue
Block a user