mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-09-09 04:49:26 +00:00
chore:v0.6.1 version (#2083)
This commit is contained in:
@@ -1 +1 @@
|
|||||||
version = "0.6.0"
|
version = "0.6.1"
|
||||||
|
@@ -6,11 +6,9 @@ from fastapi import APIRouter, Depends, HTTPException
|
|||||||
from fastapi.security.http import HTTPAuthorizationCredentials, HTTPBearer
|
from fastapi.security.http import HTTPAuthorizationCredentials, HTTPBearer
|
||||||
|
|
||||||
from dbgpt.component import ComponentType, SystemApp
|
from dbgpt.component import ComponentType, SystemApp
|
||||||
from dbgpt.core.interface.evaluation import metric_manage
|
|
||||||
from dbgpt.model.cluster import BaseModelController, WorkerManager, WorkerManagerFactory
|
from dbgpt.model.cluster import BaseModelController, WorkerManager, WorkerManagerFactory
|
||||||
from dbgpt.rag.evaluation.answer import AnswerRelevancyMetric
|
|
||||||
from dbgpt.serve.core import Result
|
from dbgpt.serve.core import Result
|
||||||
from dbgpt.serve.evaluate.api.schemas import EvaluateServeRequest, EvaluateServeResponse
|
from dbgpt.serve.evaluate.api.schemas import EvaluateServeRequest
|
||||||
from dbgpt.serve.evaluate.config import SERVE_SERVICE_COMPONENT_NAME
|
from dbgpt.serve.evaluate.config import SERVE_SERVICE_COMPONENT_NAME
|
||||||
from dbgpt.serve.evaluate.service.service import Service
|
from dbgpt.serve.evaluate.service.service import Service
|
||||||
|
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
from dbgpt._private.pydantic import BaseModel, Field
|
from dbgpt._private.pydantic import BaseModel, Field
|
||||||
|
|
||||||
@@ -11,11 +11,6 @@ class EvaluationScene(Enum):
|
|||||||
APP = "app"
|
APP = "app"
|
||||||
|
|
||||||
|
|
||||||
class DatasetStorageType(Enum):
|
|
||||||
OSS = "oss"
|
|
||||||
DB = "db"
|
|
||||||
|
|
||||||
|
|
||||||
class EvaluateServeRequest(BaseModel):
|
class EvaluateServeRequest(BaseModel):
|
||||||
evaluate_code: Optional[str] = Field(None, description="evaluation code")
|
evaluate_code: Optional[str] = Field(None, description="evaluation code")
|
||||||
scene_key: Optional[str] = Field(None, description="evaluation scene key")
|
scene_key: Optional[str] = Field(None, description="evaluation scene key")
|
||||||
@@ -42,22 +37,3 @@ class EvaluateServeRequest(BaseModel):
|
|||||||
class EvaluateServeResponse(EvaluateServeRequest):
|
class EvaluateServeResponse(EvaluateServeRequest):
|
||||||
class Config:
|
class Config:
|
||||||
title = f"EvaluateServeResponse for {SERVE_APP_NAME_HUMP}"
|
title = f"EvaluateServeResponse for {SERVE_APP_NAME_HUMP}"
|
||||||
|
|
||||||
|
|
||||||
class DatasetServeRequest(BaseModel):
|
|
||||||
code: Optional[str] = Field(None, description="dataset code")
|
|
||||||
name: Optional[str] = Field(None, description="dataset name")
|
|
||||||
file_type: Optional[str] = Field(None, description="dataset file type")
|
|
||||||
storage_type: Optional[str] = Field(None, comment="datasets storage type")
|
|
||||||
storage_position: Optional[str] = Field(None, comment="datasets storage position")
|
|
||||||
datasets_count: Optional[int] = Field(None, comment="datasets row count")
|
|
||||||
have_answer: Optional[bool] = Field(None, comment="datasets have answer")
|
|
||||||
members: Optional[str] = Field(None, comment="datasets manager members")
|
|
||||||
user_name: Optional[str] = Field(None, description="user name")
|
|
||||||
user_id: Optional[str] = Field(None, description="user id")
|
|
||||||
sys_code: Optional[str] = Field(None, description="system code")
|
|
||||||
|
|
||||||
|
|
||||||
class DatasetServeResponse(DatasetServeRequest):
|
|
||||||
gmt_create: Optional[str] = Field(None, description="create time")
|
|
||||||
gmt_modified: Optional[str] = Field(None, description="create time")
|
|
||||||
|
@@ -6,13 +6,13 @@ import uuid
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Any, Dict, Union
|
from typing import Any, Dict, Union
|
||||||
|
|
||||||
from sqlalchemy import Column, DateTime, Index, Integer, String, Text, UniqueConstraint
|
from sqlalchemy import Column, DateTime, Integer, String, Text, UniqueConstraint
|
||||||
|
|
||||||
from dbgpt.agent.core.schema import Status
|
from dbgpt.agent.core.schema import Status
|
||||||
from dbgpt.storage.metadata import BaseDao, Model, db
|
from dbgpt.storage.metadata import BaseDao, Model
|
||||||
|
|
||||||
from ..api.schemas import EvaluateServeRequest, EvaluateServeResponse
|
from ..api.schemas import EvaluateServeRequest, EvaluateServeResponse
|
||||||
from ..config import SERVER_APP_TABLE_NAME, ServeConfig
|
from ..config import ServeConfig
|
||||||
|
|
||||||
|
|
||||||
class ServeEntity(Model):
|
class ServeEntity(Model):
|
||||||
|
@@ -1,122 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
from typing import Any, Dict, Union
|
|
||||||
|
|
||||||
from sqlalchemy import Column, DateTime, Index, Integer, String, Text, UniqueConstraint
|
|
||||||
|
|
||||||
from dbgpt.storage.metadata import BaseDao, Model, db
|
|
||||||
|
|
||||||
from ..api.schemas import DatasetServeRequest, DatasetServeResponse
|
|
||||||
from ..config import SERVER_APP_TABLE_NAME, ServeConfig
|
|
||||||
|
|
||||||
|
|
||||||
class DatasetServeEntity(Model):
|
|
||||||
__tablename__ = "evaluate_datasets"
|
|
||||||
__table_args__ = (
|
|
||||||
UniqueConstraint(
|
|
||||||
"code",
|
|
||||||
name="uk_dataset",
|
|
||||||
),
|
|
||||||
UniqueConstraint(
|
|
||||||
"name",
|
|
||||||
name="uk_dataset_name",
|
|
||||||
),
|
|
||||||
)
|
|
||||||
id = Column(Integer, primary_key=True, comment="Auto increment id")
|
|
||||||
code = Column(String(256), comment="evaluate datasets Code")
|
|
||||||
name = Column(String(1000), comment="evaluate datasets Name")
|
|
||||||
file_type = Column(String(256), comment="datasets file type")
|
|
||||||
storage_type = Column(String(256), comment="datasets storage type")
|
|
||||||
storage_position = Column(Text, comment="datasets storage position")
|
|
||||||
datasets_count = Column(Integer, comment="datasets row count")
|
|
||||||
have_answer = Column(String(10), comment="datasets have answer")
|
|
||||||
members = Column(String(1000), comment="evaluate datasets members")
|
|
||||||
user_id = Column(String(100), index=True, nullable=True, comment="User id")
|
|
||||||
user_name = Column(String(128), index=True, nullable=True, comment="User name")
|
|
||||||
sys_code = Column(String(128), index=True, nullable=True, comment="System code")
|
|
||||||
gmt_create = Column(DateTime, default=datetime.now, comment="Record creation time")
|
|
||||||
gmt_modified = Column(
|
|
||||||
DateTime,
|
|
||||||
default=datetime.now,
|
|
||||||
onupdate=datetime.now,
|
|
||||||
comment="Record update time",
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"ServeEntity(id={self.id}, code='{self.code}', name='{self.name}', file_type='{self.file_type}', storage_type='{self.storage_type}', storage_position='{self.storage_position}', datasets_count='{self.datasets_count}', user_id='{self.user_id}', user_name='{self.user_name}', sys_code='{self.sys_code}', gmt_create='{self.gmt_create}', gmt_modified='{self.gmt_modified}')"
|
|
||||||
|
|
||||||
|
|
||||||
class DatasetServeDao(
|
|
||||||
BaseDao[DatasetServeEntity, DatasetServeRequest, DatasetServeResponse]
|
|
||||||
):
|
|
||||||
"""The DAO class for Prompt"""
|
|
||||||
|
|
||||||
def __init__(self, serve_config: ServeConfig):
|
|
||||||
super().__init__()
|
|
||||||
self._serve_config = serve_config
|
|
||||||
|
|
||||||
def from_request(
|
|
||||||
self, request: Union[DatasetServeRequest, Dict[str, Any]]
|
|
||||||
) -> DatasetServeEntity:
|
|
||||||
"""Convert the request to an entity
|
|
||||||
|
|
||||||
Args:
|
|
||||||
request (Union[DatasetServeRequest, Dict[str, Any]]): The request
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
T: The entity
|
|
||||||
"""
|
|
||||||
request_dict = (
|
|
||||||
request.dict() if isinstance(request, DatasetServeRequest) else request
|
|
||||||
)
|
|
||||||
entity = DatasetServeEntity(**request_dict)
|
|
||||||
return entity
|
|
||||||
|
|
||||||
def to_request(self, entity: DatasetServeEntity) -> DatasetServeRequest:
|
|
||||||
"""Convert the entity to a request
|
|
||||||
|
|
||||||
Args:
|
|
||||||
entity (T): The entity
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
REQ: The request
|
|
||||||
"""
|
|
||||||
return DatasetServeRequest(
|
|
||||||
code=entity.code,
|
|
||||||
name=entity.name,
|
|
||||||
file_type=entity.file_type,
|
|
||||||
storage_type=entity.storage_type,
|
|
||||||
storage_position=entity.storage_position,
|
|
||||||
datasets_count=entity.datasets_count,
|
|
||||||
have_answer=entity.have_answer,
|
|
||||||
members=entity.members,
|
|
||||||
user_name=entity.user_name,
|
|
||||||
user_id=entity.user_id,
|
|
||||||
sys_code=entity.sys_code,
|
|
||||||
)
|
|
||||||
|
|
||||||
def to_response(self, entity: DatasetServeEntity) -> DatasetServeResponse:
|
|
||||||
"""Convert the entity to a response
|
|
||||||
|
|
||||||
Args:
|
|
||||||
entity (T): The entity
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
RES: The response
|
|
||||||
"""
|
|
||||||
gmt_created_str = entity.gmt_create.strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
gmt_modified_str = entity.gmt_modified.strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
return DatasetServeResponse(
|
|
||||||
code=entity.code,
|
|
||||||
name=entity.name,
|
|
||||||
file_type=entity.file_type,
|
|
||||||
storage_type=entity.storage_type,
|
|
||||||
storage_position=entity.storage_position,
|
|
||||||
datasets_count=entity.datasets_count,
|
|
||||||
have_answer=entity.have_answer,
|
|
||||||
members=entity.members,
|
|
||||||
user_name=entity.user_name,
|
|
||||||
user_id=entity.user_id,
|
|
||||||
sys_code=entity.sys_code,
|
|
||||||
gmt_create=gmt_created_str,
|
|
||||||
gmt_modified=gmt_modified_str,
|
|
||||||
)
|
|
@@ -1,6 +1,3 @@
|
|||||||
import asyncio
|
|
||||||
import io
|
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
@@ -19,7 +16,6 @@ from dbgpt.rag.embedding.embedding_factory import EmbeddingFactory
|
|||||||
from dbgpt.rag.evaluation import RetrieverEvaluator
|
from dbgpt.rag.evaluation import RetrieverEvaluator
|
||||||
from dbgpt.rag.evaluation.answer import AnswerRelevancyMetric
|
from dbgpt.rag.evaluation.answer import AnswerRelevancyMetric
|
||||||
from dbgpt.rag.evaluation.retriever import RetrieverSimilarityMetric
|
from dbgpt.rag.evaluation.retriever import RetrieverSimilarityMetric
|
||||||
from dbgpt.serve.core import BaseService
|
|
||||||
from dbgpt.serve.rag.operators.knowledge_space import SpaceRetrieverOperator
|
from dbgpt.serve.rag.operators.knowledge_space import SpaceRetrieverOperator
|
||||||
from dbgpt.storage.metadata import BaseDao
|
from dbgpt.storage.metadata import BaseDao
|
||||||
from dbgpt.storage.vector_store.base import VectorStoreConfig
|
from dbgpt.storage.vector_store.base import VectorStoreConfig
|
||||||
@@ -27,6 +23,7 @@ from dbgpt.storage.vector_store.base import VectorStoreConfig
|
|||||||
from ...agent.agents.controller import multi_agents
|
from ...agent.agents.controller import multi_agents
|
||||||
from ...agent.evaluation.evaluation import AgentEvaluator, AgentOutputOperator
|
from ...agent.evaluation.evaluation import AgentEvaluator, AgentOutputOperator
|
||||||
from ...agent.evaluation.evaluation_metric import IntentMetric
|
from ...agent.evaluation.evaluation_metric import IntentMetric
|
||||||
|
from ...core import BaseService
|
||||||
from ...prompt.service.service import Service as PromptService
|
from ...prompt.service.service import Service as PromptService
|
||||||
from ...rag.connector import VectorStoreConnector
|
from ...rag.connector import VectorStoreConnector
|
||||||
from ...rag.service.service import Service as RagService
|
from ...rag.service.service import Service as RagService
|
||||||
@@ -70,7 +67,6 @@ class Service(BaseService[ServeEntity, EvaluateServeRequest, EvaluateServeRespon
|
|||||||
self._serve_config = ServeConfig.from_app_config(
|
self._serve_config = ServeConfig.from_app_config(
|
||||||
system_app.config, SERVE_CONFIG_KEY_PREFIX
|
system_app.config, SERVE_CONFIG_KEY_PREFIX
|
||||||
)
|
)
|
||||||
self._dao = self._dao or ServeDao(self._serve_config)
|
|
||||||
self._system_app = system_app
|
self._system_app = system_app
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
2
setup.py
2
setup.py
@@ -20,7 +20,7 @@ with open("README.md", mode="r", encoding="utf-8") as fh:
|
|||||||
IS_DEV_MODE = os.getenv("IS_DEV_MODE", "true").lower() == "true"
|
IS_DEV_MODE = os.getenv("IS_DEV_MODE", "true").lower() == "true"
|
||||||
# If you modify the version, please modify the version in the following files:
|
# If you modify the version, please modify the version in the following files:
|
||||||
# dbgpt/_version.py
|
# dbgpt/_version.py
|
||||||
DB_GPT_VERSION = os.getenv("DB_GPT_VERSION", "0.6.0")
|
DB_GPT_VERSION = os.getenv("DB_GPT_VERSION", "0.6.1")
|
||||||
|
|
||||||
BUILD_NO_CACHE = os.getenv("BUILD_NO_CACHE", "true").lower() == "true"
|
BUILD_NO_CACHE = os.getenv("BUILD_NO_CACHE", "true").lower() == "true"
|
||||||
LLAMA_CPP_GPU_ACCELERATION = (
|
LLAMA_CPP_GPU_ACCELERATION = (
|
||||||
|
Reference in New Issue
Block a user