refactor: The first refactored version for sdk release (#907)

Co-authored-by: chengfangyin2 <chengfangyin3@jd.com>
This commit is contained in:
FangYin Cheng
2023-12-08 14:45:59 +08:00
committed by GitHub
parent e7e4aff667
commit cd725db1fb
573 changed files with 2094 additions and 3571 deletions

4
.gitignore vendored
View File

@@ -26,9 +26,9 @@ sdist/
var/
wheels/
models/
/models/
# Soft link
models
/models
plugins/
pip-wheel-metadata/

17
dbgpt/__init__.py Normal file
View File

@@ -0,0 +1,17 @@
from dbgpt.component import SystemApp, BaseComponent
__ALL__ = ["SystemApp", "BaseComponent"]
_CORE_LIBS = ["core", "rag", "model", "agent", "datasource", "vis", "storage", "train"]
_SERVE_LIBS = ["serve"]
_LIBS = _CORE_LIBS + _SERVE_LIBS
def __getattr__(name: str):
# Lazy load
import importlib
if name in _LIBS:
return importlib.import_module("." + name, __name__)
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")

View File

@@ -0,0 +1,4 @@
"""This is a private module.
You should not import anything from this module.
"""

View File

@@ -1,10 +1,8 @@
import asyncio
from typing import Coroutine, List, Any
from starlette.responses import StreamingResponse
from pilot.scene.base_chat import BaseChat
from pilot.scene.chat_factory import ChatFactory
from dbgpt.app.scene import BaseChat, ChatFactory
chat_factory = ChatFactory()

View File

@@ -5,11 +5,11 @@ from __future__ import annotations
import os
from typing import List, Optional, TYPE_CHECKING
from pilot.singleton import Singleton
from dbgpt.util.singleton import Singleton
if TYPE_CHECKING:
from auto_gpt_plugin_template import AutoGPTPluginTemplate
from pilot.component import SystemApp
from dbgpt.component import SystemApp
class Config(metaclass=Singleton):
@@ -120,7 +120,7 @@ class Config(metaclass=Singleton):
)
self.speak_mode = False
from pilot.prompts.prompt_registry import PromptTemplateRegistry
from dbgpt.core._private.prompt_registry import PromptTemplateRegistry
self.prompt_template_registry = PromptTemplateRegistry()
### Related configuration of built-in commands

View File

@@ -1,4 +1,4 @@
from pydantic import Field, BaseModel
from dbgpt._private.pydantic import Field, BaseModel
DEFAULT_CONTEXT_WINDOW = 3900
DEFAULT_NUM_OUTPUTS = 256

View File

@@ -0,0 +1,33 @@
import pydantic
if pydantic.VERSION.startswith("1."):
PYDANTIC_VERSION = 1
from pydantic import (
BaseModel,
Extra,
Field,
NonNegativeFloat,
NonNegativeInt,
PositiveFloat,
PositiveInt,
ValidationError,
root_validator,
validator,
PrivateAttr,
)
else:
PYDANTIC_VERSION = 2
# pydantic 2.x
from pydantic.v1 import (
BaseModel,
Extra,
Field,
NonNegativeFloat,
NonNegativeInt,
PositiveFloat,
PositiveInt,
ValidationError,
root_validator,
validator,
PrivateAttr,
)

View File

@@ -3,8 +3,8 @@ import json
import requests
from pilot.base_modules.agent.commands.command_mange import command
from pilot.configs.config import Config
from dbgpt.agent.commands.command_mange import command
from dbgpt._private.config import Config
CFG = Config()

View File

@@ -7,8 +7,8 @@ import logging
import requests
from PIL import Image
from pilot.base_modules.agent.commands.command_mange import command
from pilot.configs.config import Config
from dbgpt.agent.commands.command_mange import command
from dbgpt._private.config import Config
logger = logging.getLogger(__name__)
CFG = Config()

View File

@@ -7,7 +7,7 @@ from typing import Dict
from .exception_not_commands import NotCommands
from .generator import PluginPromptGenerator
from pilot.configs.config import Config
from dbgpt._private.config import Config
def _resolve_pathlike_command_args(command_args):
@@ -36,7 +36,7 @@ def execute_ai_response_json(
Returns:
"""
from pilot.speech.say import say_text
from dbgpt.util.speech.say import say_text
cfg = Config()

View File

@@ -1,20 +1,17 @@
import functools
import importlib
import inspect
import time
import json
import logging
import xml.etree.ElementTree as ET
import pandas as pd
from pilot.common.json_utils import serialize
from dbgpt.util.json_utils import serialize
from datetime import datetime
from typing import Any, Callable, Optional, List
from pydantic import BaseModel
from pilot.base_modules.agent.common.schema import Status, ApiTagType
from pilot.base_modules.agent.commands.command import execute_command
from pilot.base_modules.agent.commands.generator import PluginPromptGenerator
from pilot.common.string_utils import extract_content_open_ending, extract_content
from dbgpt._private.pydantic import BaseModel
from dbgpt.agent.common.schema import Status
from dbgpt.agent.commands.command import execute_command
from dbgpt.util.string_utils import extract_content_open_ending, extract_content
# Unique identifier for auto-gpt commands
AUTO_GPT_COMMAND_IDENTIFIER = "auto_gpt_command"

View File

@@ -1,6 +1,6 @@
from pandas import DataFrame
from pilot.base_modules.agent.commands.command_mange import command
from dbgpt.agent.commands.command_mange import command
import pandas as pd
import uuid
import os
@@ -11,14 +11,15 @@ matplotlib.use("Agg")
import matplotlib.pyplot as plt
import matplotlib.ticker as mtick
from matplotlib.font_manager import FontManager
from pilot.common.string_utils import is_scientific_notation
from dbgpt.util.string_utils import is_scientific_notation
from dbgpt.configs.model_config import PILOT_PATH
import logging
logger = logging.getLogger(__name__)
static_message_img_path = os.path.join(os.getcwd(), "message/img")
static_message_img_path = os.path.join(PILOT_PATH, "message/img")
def data_pre_classification(df: DataFrame):

View File

@@ -1,6 +1,6 @@
from pandas import DataFrame
from pilot.base_modules.agent.commands.command_mange import command
from dbgpt.agent.commands.command_mange import command
import logging

View File

@@ -1,6 +1,6 @@
from pandas import DataFrame
from pilot.base_modules.agent.commands.command_mange import command
from dbgpt.agent.commands.command_mange import command
import logging

View File

@@ -1,5 +1,4 @@
""" A module for generating custom prompt strings."""
import json
from typing import Any, Callable, Dict, List, Optional

View File

@@ -1,5 +1,3 @@
import json
import time
import logging
from fastapi import (
APIRouter,
@@ -7,12 +5,10 @@ from fastapi import (
UploadFile,
File,
)
from abc import ABC, abstractmethod
from abc import ABC
from typing import List
from pilot.configs.model_config import LOGDIR
from pilot.openapi.api_view_model import (
from dbgpt.app.openapi.api_view_model import (
Result,
)
@@ -21,15 +17,14 @@ from .model import (
PagenationFilter,
PagenationResult,
PluginHubFilter,
MyPluginFilter,
)
from .hub.agent_hub import AgentHub
from .db.plugin_hub_db import PluginHubEntity
from .plugins_util import scan_plugins
from .commands.generator import PluginPromptGenerator
from pilot.configs.model_config import PLUGINS_DIR
from pilot.component import BaseComponent, ComponentType, SystemApp
from dbgpt.configs.model_config import PLUGINS_DIR
from dbgpt.component import BaseComponent, ComponentType, SystemApp
router = APIRouter()
logger = logging.getLogger(__name__)
@@ -79,6 +74,7 @@ async def agent_hub_update(update_param: PluginHubParam = Body()):
if update_param.branch is not None and len(update_param.branch) > 0
else None
)
# TODO change it to async
agent_hub.refresh_hub_from_git(update_param.url, branch, authorization)
return Result.succ(None)
except Exception as e:

View File

@@ -1,10 +1,9 @@
from datetime import datetime
from typing import List
from sqlalchemy import Column, Integer, String, Index, DateTime, func
from sqlalchemy import Column, Integer, String, DateTime, func
from sqlalchemy import UniqueConstraint
from pilot.base_modules.meta_data.base_dao import BaseDao
from pilot.base_modules.meta_data.meta_data import (
from dbgpt.storage.metadata import BaseDao
from dbgpt.storage.metadata.meta_data import (
Base,
engine,
session,

View File

@@ -1,12 +1,10 @@
from datetime import datetime
import pytz
from typing import List
from sqlalchemy import Column, Integer, String, Index, DateTime, func, Boolean, DDL
from sqlalchemy import Column, Integer, String, Index, DateTime, func, DDL
from sqlalchemy import UniqueConstraint
from pilot.base_modules.meta_data.meta_data import Base
from pilot.base_modules.meta_data.base_dao import BaseDao
from pilot.base_modules.meta_data.meta_data import (
from dbgpt.storage.metadata import BaseDao
from dbgpt.storage.metadata.meta_data import (
Base,
engine,
session,

View File

@@ -1,7 +1,7 @@
from typing import TypedDict, Optional, Dict, List
from dataclasses import dataclass
from pydantic import BaseModel, Field
from typing import TypeVar, Generic, Any
from dbgpt._private.pydantic import BaseModel, Field
T = TypeVar("T")

View File

@@ -4,22 +4,19 @@ import json
import os
import glob
import zipfile
import fnmatch
import requests
import git
import threading
import datetime
import logging
from pathlib import Path
from typing import List
from urllib.parse import urlparse
from zipimport import zipimporter
import requests
from auto_gpt_plugin_template import AutoGPTPluginTemplate
from pilot.configs.config import Config
from pilot.configs.model_config import PLUGINS_DIR
from dbgpt._private.config import Config
from dbgpt.configs.model_config import PLUGINS_DIR
logger = logging.getLogger(__name__)

View File

@@ -1,10 +1,12 @@
"""The app package.
This package will not be uploaded to PyPI. So, your can't import it if some other package depends on it.
"""
import os
import random
import sys
from dotenv import load_dotenv
from pilot.base_modules.agent import PluginPromptGenerator
if "pytest" in sys.argv or "pytest" in sys.modules or os.getenv("CI"):
print("Setting random seed to 42")

View File

@@ -1,22 +1,22 @@
import click
import os
from pilot.server.base import WebWerverParameters
from pilot.configs.model_config import LOGDIR
from pilot.utils.parameter_utils import EnvArgumentParser
from pilot.utils.command_utils import _run_current_with_daemon, _stop_service
from dbgpt.app.base import WebServerParameters
from dbgpt.configs.model_config import LOGDIR
from dbgpt.util.parameter_utils import EnvArgumentParser
from dbgpt.util.command_utils import _run_current_with_daemon, _stop_service
@click.command(name="webserver")
@EnvArgumentParser.create_click_option(WebWerverParameters)
@EnvArgumentParser.create_click_option(WebServerParameters)
def start_webserver(**kwargs):
"""Start webserver(dbgpt_server.py)"""
if kwargs["daemon"]:
log_file = os.path.join(LOGDIR, "webserver_uvicorn.log")
_run_current_with_daemon("WebServer", log_file)
else:
from pilot.server.dbgpt_server import run_webserver
from dbgpt.app.dbgpt_server import run_webserver
run_webserver(WebWerverParameters(**kwargs))
run_webserver(WebServerParameters(**kwargs))
@click.command(name="webserver")

View File

@@ -2,13 +2,13 @@ import signal
import os
import threading
import sys
from typing import Optional, Any
from typing import Optional
from dataclasses import dataclass, field
from pilot.configs.config import Config
from pilot.component import SystemApp
from pilot.utils.parameter_utils import BaseParameters
from pilot.base_modules.meta_data.meta_data import ddl_init_and_upgrade
from dbgpt._private.config import Config
from dbgpt.component import SystemApp
from dbgpt.util.parameter_utils import BaseParameters
from dbgpt.storage.metadata.meta_data import ddl_init_and_upgrade
ROOT_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(ROOT_PATH)
@@ -21,15 +21,15 @@ def signal_handler(sig, frame):
def async_db_summary(system_app: SystemApp):
"""async db schema into vector db"""
from pilot.summary.db_summary_client import DBSummaryClient
from dbgpt.rag.summary.db_summary_client import DBSummaryClient
client = DBSummaryClient(system_app=system_app)
thread = threading.Thread(target=client.init_db_summary)
thread.start()
def server_init(param: "WebWerverParameters", system_app: SystemApp):
from pilot.base_modules.agent.commands.command_mange import CommandRegistry
def server_init(param: "WebServerParameters", system_app: SystemApp):
from dbgpt.agent.commands.command_mange import CommandRegistry
# logger.info(f"args: {args}")
@@ -44,8 +44,8 @@ def server_init(param: "WebWerverParameters", system_app: SystemApp):
# Loader plugins and commands
command_categories = [
"pilot.base_modules.agent.commands.built_in.audio_text",
"pilot.base_modules.agent.commands.built_in.image_gen",
"dbgpt.agent.commands.built_in.audio_text",
"dbgpt.agent.commands.built_in.image_gen",
]
# exclude commands
command_categories = [
@@ -58,9 +58,9 @@ def server_init(param: "WebWerverParameters", system_app: SystemApp):
cfg.command_registry = command_registry
command_disply_commands = [
"pilot.base_modules.agent.commands.disply_type.show_chart_gen",
"pilot.base_modules.agent.commands.disply_type.show_table_gen",
"pilot.base_modules.agent.commands.disply_type.show_text_gen",
"dbgpt.agent.commands.disply_type.show_chart_gen",
"dbgpt.agent.commands.disply_type.show_table_gen",
"dbgpt.agent.commands.disply_type.show_text_gen",
]
command_disply_registry = CommandRegistry()
for command in command_disply_commands:
@@ -69,7 +69,7 @@ def server_init(param: "WebWerverParameters", system_app: SystemApp):
def _create_model_start_listener(system_app: SystemApp):
from pilot.connections.manages.connection_manager import ConnectManager
from dbgpt.datasource.manages.connection_manager import ConnectManager
cfg = Config()
@@ -84,7 +84,7 @@ def _create_model_start_listener(system_app: SystemApp):
@dataclass
class WebWerverParameters(BaseParameters):
class WebServerParameters(BaseParameters):
host: Optional[str] = field(
default="0.0.0.0", metadata={"help": "Webserver deploy host"}
)

View File

@@ -1,14 +1,14 @@
"""
This code file will be deprecated in the future.
We have integrated fastchat. For details, see: pilot/model/model_adapter.py
We have integrated fastchat. For details, see: dbgpt/model/model_adapter.py
"""
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from functools import cache
from typing import List, Dict, Tuple
from pilot.model.conversation import Conversation, get_conv_template
from pilot.scene.base_message import ModelMessage, ModelMessageRoleType
from dbgpt.model.conversation import Conversation, get_conv_template
from dbgpt.core.interface.message import ModelMessage, ModelMessageRoleType
class BaseChatAdpter:
@@ -20,7 +20,7 @@ class BaseChatAdpter:
def get_generate_stream_func(self, model_path: str):
"""Return the generate stream handler func"""
from pilot.model.inference import generate_stream
from dbgpt.model.inference import generate_stream
return generate_stream
@@ -134,7 +134,7 @@ class VicunaChatAdapter(BaseChatAdpter):
return None
def get_generate_stream_func(self, model_path: str):
from pilot.model.llm_out.vicuna_base_llm import generate_stream
from dbgpt.model.llm_out.vicuna_base_llm import generate_stream
if self._is_llama2_based(model_path):
return super().get_generate_stream_func(model_path)
@@ -148,7 +148,7 @@ class ChatGLMChatAdapter(BaseChatAdpter):
return "chatglm" in model_path
def get_generate_stream_func(self, model_path: str):
from pilot.model.llm_out.chatglm_llm import chatglm_generate_stream
from dbgpt.model.llm_out.chatglm_llm import chatglm_generate_stream
return chatglm_generate_stream
@@ -160,7 +160,7 @@ class GuanacoChatAdapter(BaseChatAdpter):
return "guanaco" in model_path
def get_generate_stream_func(self, model_path: str):
from pilot.model.llm_out.guanaco_llm import guanaco_generate_stream
from dbgpt.model.llm_out.guanaco_llm import guanaco_generate_stream
return guanaco_generate_stream
@@ -172,7 +172,7 @@ class FalconChatAdapter(BaseChatAdpter):
return "falcon" in model_path
def get_generate_stream_func(self, model_path: str):
from pilot.model.llm_out.falcon_llm import falcon_generate_output
from dbgpt.model.llm_out.falcon_llm import falcon_generate_output
return falcon_generate_output
@@ -182,7 +182,7 @@ class ProxyllmChatAdapter(BaseChatAdpter):
return "proxyllm" in model_path
def get_generate_stream_func(self, model_path: str):
from pilot.model.llm_out.proxy_llm import proxyllm_generate_stream
from dbgpt.model.llm_out.proxy_llm import proxyllm_generate_stream
return proxyllm_generate_stream
@@ -192,7 +192,7 @@ class GorillaChatAdapter(BaseChatAdpter):
return "gorilla" in model_path
def get_generate_stream_func(self, model_path: str):
from pilot.model.llm_out.gorilla_llm import generate_stream
from dbgpt.model.llm_out.gorilla_llm import generate_stream
return generate_stream
@@ -202,7 +202,7 @@ class GPT4AllChatAdapter(BaseChatAdpter):
return "gptj-6b" in model_path
def get_generate_stream_func(self, model_path: str):
from pilot.model.llm_out.gpt4all_llm import gpt4all_generate_stream
from dbgpt.model.llm_out.gpt4all_llm import gpt4all_generate_stream
return gpt4all_generate_stream
@@ -245,7 +245,7 @@ class WizardLMChatAdapter(BaseChatAdpter):
class LlamaCppChatAdapter(BaseChatAdpter):
def match(self, model_path: str):
from pilot.model.adapter import LlamaCppAdapater
from dbgpt.model.adapter import LlamaCppAdapater
if "llama-cpp" == model_path:
return True
@@ -256,7 +256,7 @@ class LlamaCppChatAdapter(BaseChatAdpter):
return get_conv_template("llama-2")
def get_generate_stream_func(self, model_path: str):
from pilot.model.llm_out.llama_cpp_llm import generate_stream
from dbgpt.model.llm_out.llama_cpp_llm import generate_stream
return generate_stream

View File

@@ -2,14 +2,13 @@ from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Any, Type
import os
from pilot.component import ComponentType, SystemApp
from pilot.configs.config import Config
from pilot.configs.model_config import MODEL_DISK_CACHE_DIR
from pilot.utils.executor_utils import DefaultExecutorFactory
from pilot.embedding_engine.embedding_factory import EmbeddingFactory
from pilot.server.base import WebWerverParameters
from dbgpt.component import ComponentType, SystemApp
from dbgpt._private.config import Config
from dbgpt.configs.model_config import MODEL_DISK_CACHE_DIR
from dbgpt.util.executor_utils import DefaultExecutorFactory
from dbgpt.rag.embedding_engine.embedding_factory import EmbeddingFactory
from dbgpt.app.base import WebServerParameters
if TYPE_CHECKING:
from langchain.embeddings.base import Embeddings
@@ -21,23 +20,23 @@ CFG = Config()
def initialize_components(
param: WebWerverParameters,
param: WebServerParameters,
system_app: SystemApp,
embedding_model_name: str,
embedding_model_path: str,
):
from pilot.model.cluster.controller.controller import controller
from dbgpt.model.cluster.controller.controller import controller
# Register global default executor factory first
system_app.register(DefaultExecutorFactory)
system_app.register_instance(controller)
# Register global default RAGGraphFactory
# from pilot.graph_engine.graph_factory import DefaultRAGGraphFactory
# from dbgpt.graph_engine.graph_factory import DefaultRAGGraphFactory
# system_app.register(DefaultRAGGraphFactory)
from pilot.base_modules.agent.controller import module_agent
from dbgpt.agent.controller import module_agent
system_app.register_instance(module_agent)
@@ -49,7 +48,7 @@ def initialize_components(
def _initialize_embedding_model(
param: WebWerverParameters,
param: WebServerParameters,
system_app: SystemApp,
embedding_model_name: str,
embedding_model_path: str,
@@ -79,8 +78,8 @@ class RemoteEmbeddingFactory(EmbeddingFactory):
def create(
self, model_name: str = None, embedding_cls: Type = None
) -> "Embeddings":
from pilot.model.cluster import WorkerManagerFactory
from pilot.model.cluster.embedding.remote_embedding import RemoteEmbeddings
from dbgpt.model.cluster import WorkerManagerFactory
from dbgpt.model.cluster.embedding.remote_embedding import RemoteEmbeddings
if embedding_cls:
raise NotImplementedError
@@ -116,9 +115,9 @@ class LocalEmbeddingFactory(EmbeddingFactory):
return self._model
def _load_model(self) -> "Embeddings":
from pilot.model.cluster.embedding.loader import EmbeddingLoader
from pilot.model.cluster.worker.embedding_worker import _parse_embedding_params
from pilot.model.parameter import (
from dbgpt.model.cluster.embedding.loader import EmbeddingLoader
from dbgpt.model.cluster.worker.embedding_worker import _parse_embedding_params
from dbgpt.model.parameter import (
EMBEDDING_NAME_TO_PARAMETER_CLASS_CONFIG,
BaseEmbeddingModelParameters,
EmbeddingModelParameters,
@@ -140,7 +139,7 @@ class LocalEmbeddingFactory(EmbeddingFactory):
def _initialize_model_cache(system_app: SystemApp):
from pilot.cache import initialize_cache
from dbgpt.storage.cache import initialize_cache
if not CFG.MODEL_CACHE_ENABLE:
logger.info("Model cache is not enable")
@@ -153,7 +152,7 @@ def _initialize_model_cache(system_app: SystemApp):
def _initialize_awel(system_app: SystemApp):
from pilot.awel import initialize_awel
from pilot.configs.model_config import _DAG_DEFINITION_DIR
from dbgpt.core.awel import initialize_awel
from dbgpt.configs.model_config import _DAG_DEFINITION_DIR
initialize_awel(system_app, _DAG_DEFINITION_DIR)

View File

@@ -2,53 +2,54 @@ import os
import argparse
import sys
from typing import List
import logging
ROOT_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(ROOT_PATH)
from pilot.configs.config import Config
from pilot.configs.model_config import LLM_MODEL_CONFIG, EMBEDDING_MODEL_CONFIG, LOGDIR
from pilot.component import SystemApp
from dbgpt._private.config import Config
from dbgpt.configs.model_config import (
LLM_MODEL_CONFIG,
EMBEDDING_MODEL_CONFIG,
LOGDIR,
ROOT_PATH,
)
from dbgpt.component import SystemApp
from pilot.server.base import (
from dbgpt.app.base import (
server_init,
WebWerverParameters,
WebServerParameters,
_create_model_start_listener,
)
from pilot.server.component_configs import initialize_components
from dbgpt.app.component_configs import initialize_components
from fastapi.staticfiles import StaticFiles
from fastapi import FastAPI, applications
from fastapi.openapi.docs import get_swagger_ui_html
from fastapi.openapi.utils import get_openapi
from fastapi.exceptions import RequestValidationError
from fastapi.middleware.cors import CORSMiddleware
from pilot.server.knowledge.api import router as knowledge_router
from pilot.server.prompt.api import router as prompt_router
from pilot.server.llm_manage.api import router as llm_manage_api
from dbgpt.app.knowledge.api import router as knowledge_router
from dbgpt.app.prompt.api import router as prompt_router
from dbgpt.app.llm_manage.api import router as llm_manage_api
from pilot.openapi.api_v1.api_v1 import router as api_v1
from pilot.openapi.base import validation_exception_handler
from pilot.openapi.api_v1.editor.api_editor_v1 import router as api_editor_route_v1
from pilot.openapi.api_v1.feedback.api_fb_v1 import router as api_fb_v1
from pilot.base_modules.agent.commands.disply_type.show_chart_gen import (
from dbgpt.app.openapi.api_v1.api_v1 import router as api_v1
from dbgpt.app.openapi.base import validation_exception_handler
from dbgpt.app.openapi.api_v1.editor.api_editor_v1 import router as api_editor_route_v1
from dbgpt.app.openapi.api_v1.feedback.api_fb_v1 import router as api_fb_v1
from dbgpt.agent.commands.disply_type.show_chart_gen import (
static_message_img_path,
)
from pilot.model.cluster import initialize_worker_manager_in_client
from pilot.utils.utils import (
from dbgpt.model.cluster import initialize_worker_manager_in_client
from dbgpt.util.utils import (
setup_logging,
_get_logging_level,
logging_str_to_uvicorn_level,
setup_http_service_logging,
)
from pilot.utils.tracer import root_tracer, initialize_tracer, SpanType, SpanTypeRunName
from pilot.utils.parameter_utils import _get_dict_from_obj
from pilot.utils.system_utils import get_system_info
from pilot.base_modules.agent.controller import router as agent_route
from dbgpt.util.tracer import root_tracer, initialize_tracer, SpanType, SpanTypeRunName
from dbgpt.util.parameter_utils import _get_dict_from_obj
from dbgpt.util.system_utils import get_system_info
static_file_path = os.path.join(os.getcwd(), "server/static")
static_file_path = os.path.join(ROOT_PATH, "dbgpt", "app/static")
CFG = Config()
@@ -106,15 +107,15 @@ app.add_exception_handler(RequestValidationError, validation_exception_handler)
def _get_webserver_params(args: List[str] = None):
from pilot.utils.parameter_utils import EnvArgumentParser
from dbgpt.util.parameter_utils import EnvArgumentParser
parser: argparse.ArgumentParser = EnvArgumentParser.create_argparse_option(
WebWerverParameters
WebServerParameters
)
return WebWerverParameters(**vars(parser.parse_args(args=args)))
return WebServerParameters(**vars(parser.parse_args(args=args)))
def initialize_app(param: WebWerverParameters = None, args: List[str] = None):
def initialize_app(param: WebServerParameters = None, args: List[str] = None):
"""Initialize app
If you use gunicorn as a process manager, initialize_app can be invoke in `on_starting` hook.
Args:
@@ -127,7 +128,7 @@ def initialize_app(param: WebWerverParameters = None, args: List[str] = None):
if not param.log_level:
param.log_level = _get_logging_level()
setup_logging(
"pilot", logging_level=param.log_level, logger_filename=param.log_file
"dbgpt", logging_level=param.log_level, logger_filename=param.log_file
)
# Before start
@@ -180,7 +181,7 @@ def initialize_app(param: WebWerverParameters = None, args: List[str] = None):
return param
def run_uvicorn(param: WebWerverParameters):
def run_uvicorn(param: WebServerParameters):
import uvicorn
setup_http_service_logging()
@@ -192,7 +193,7 @@ def run_uvicorn(param: WebWerverParameters):
)
def run_webserver(param: WebWerverParameters = None):
def run_webserver(param: WebServerParameters = None):
if not param:
param = _get_webserver_params()
initialize_tracer(

View File

@@ -3,7 +3,7 @@ import logging
import os
import functools
from pilot.configs.model_config import DATASETS_DIR
from dbgpt.configs.model_config import DATASETS_DIR
_DEFAULT_API_ADDRESS: str = "http://127.0.0.1:5000"
API_ADDRESS: str = _DEFAULT_API_ADDRESS
@@ -129,7 +129,7 @@ def load(
chunk_overlap: int,
):
"""Load your local documents to DB-GPT"""
from pilot.server.knowledge._cli.knowledge_client import knowledge_init
from dbgpt.app.knowledge._cli.knowledge_client import knowledge_init
knowledge_init(
API_ADDRESS,
@@ -165,7 +165,7 @@ def load(
)
def delete(space_name: str, doc_name: str, y: bool):
"""Delete your knowledge space or document in space"""
from pilot.server.knowledge._cli.knowledge_client import knowledge_delete
from dbgpt.app.knowledge._cli.knowledge_client import knowledge_delete
knowledge_delete(API_ADDRESS, space_name, doc_name, confirm=y)
@@ -227,7 +227,7 @@ def list(
output: str,
):
"""List knowledge space"""
from pilot.server.knowledge._cli.knowledge_client import knowledge_list
from dbgpt.app.knowledge._cli.knowledge_client import knowledge_list
knowledge_list(
API_ADDRESS, space_name, page, page_size, doc_id, show_content, output

View File

@@ -6,18 +6,18 @@ import logging
from urllib.parse import urljoin
from concurrent.futures import ThreadPoolExecutor, as_completed
from pilot.openapi.api_view_model import Result
from pilot.server.knowledge.request.request import (
from dbgpt.app.openapi.api_view_model import Result
from dbgpt.app.knowledge.request.request import (
KnowledgeQueryRequest,
KnowledgeDocumentRequest,
ChunkQueryRequest,
DocumentQueryRequest,
)
from pilot.embedding_engine.knowledge_type import KnowledgeType
from pilot.server.knowledge.request.request import DocumentSyncRequest
from dbgpt.rag.embedding_engine.knowledge_type import KnowledgeType
from dbgpt.app.knowledge.request.request import DocumentSyncRequest
from pilot.server.knowledge.request.request import KnowledgeSpaceRequest
from dbgpt.app.knowledge.request.request import KnowledgeSpaceRequest
HTTP_HEADERS = {"Content-Type": "application/json"}

View File

@@ -5,19 +5,19 @@ import logging
from fastapi import APIRouter, File, UploadFile, Form
from pilot.configs.config import Config
from pilot.configs.model_config import (
from dbgpt._private.config import Config
from dbgpt.configs.model_config import (
EMBEDDING_MODEL_CONFIG,
KNOWLEDGE_UPLOAD_ROOT_PATH,
)
from pilot.openapi.api_v1.api_v1 import no_stream_generator, stream_generator
from dbgpt.app.openapi.api_v1.api_v1 import no_stream_generator, stream_generator
from pilot.openapi.api_view_model import Result
from pilot.embedding_engine.embedding_engine import EmbeddingEngine
from pilot.embedding_engine.embedding_factory import EmbeddingFactory
from dbgpt.app.openapi.api_view_model import Result
from dbgpt.rag.embedding_engine.embedding_engine import EmbeddingEngine
from dbgpt.rag.embedding_engine.embedding_factory import EmbeddingFactory
from pilot.server.knowledge.service import KnowledgeService
from pilot.server.knowledge.request.request import (
from dbgpt.app.knowledge.service import KnowledgeService
from dbgpt.app.knowledge.request.request import (
KnowledgeQueryRequest,
KnowledgeQueryResponse,
KnowledgeDocumentRequest,
@@ -29,8 +29,8 @@ from pilot.server.knowledge.request.request import (
DocumentSummaryRequest,
)
from pilot.server.knowledge.request.request import KnowledgeSpaceRequest
from pilot.utils.tracer import root_tracer, SpanType
from dbgpt.app.knowledge.request.request import KnowledgeSpaceRequest
from dbgpt.util.tracer import root_tracer, SpanType
logger = logging.getLogger(__name__)
@@ -253,8 +253,8 @@ async def document_summary(request: DocumentSummaryRequest):
async def entity_extract(request: EntityExtractRequest):
logger.info(f"Received params: {request}")
try:
from pilot.scene.base import ChatScene
from pilot.common.chat_util import llm_chat_response_nostream
from dbgpt.app.scene import ChatScene
from dbgpt._private.chat_util import llm_chat_response_nostream
import uuid
chat_param = {

View File

@@ -3,14 +3,14 @@ from typing import List
from sqlalchemy import Column, String, DateTime, Integer, Text, func
from pilot.base_modules.meta_data.base_dao import BaseDao
from pilot.base_modules.meta_data.meta_data import (
from dbgpt.storage.metadata import BaseDao
from dbgpt.storage.metadata.meta_data import (
Base,
engine,
session,
META_DATA_DATABASE,
)
from pilot.configs.config import Config
from dbgpt._private.config import Config
CFG = Config()

View File

@@ -2,14 +2,14 @@ from datetime import datetime
from sqlalchemy import Column, String, DateTime, Integer, Text, func
from pilot.base_modules.meta_data.base_dao import BaseDao
from pilot.base_modules.meta_data.meta_data import (
from dbgpt.storage.metadata import BaseDao
from dbgpt.storage.metadata.meta_data import (
Base,
engine,
session,
META_DATA_DATABASE,
)
from pilot.configs.config import Config
from dbgpt._private.config import Config
CFG = Config()

View File

@@ -1,6 +1,6 @@
from typing import List, Optional
from pydantic import BaseModel
from dbgpt._private.pydantic import BaseModel
from fastapi import UploadFile

View File

@@ -1,6 +1,6 @@
from typing import List
from pydantic import BaseModel
from dbgpt._private.pydantic import BaseModel
class ChunkQueryResponse(BaseModel):

View File

@@ -2,29 +2,29 @@ import json
import logging
from datetime import datetime
from pilot.vector_store.connector import VectorStoreConnector
from dbgpt.storage.vector_store.connector import VectorStoreConnector
from pilot.configs.config import Config
from pilot.configs.model_config import (
from dbgpt._private.config import Config
from dbgpt.configs.model_config import (
EMBEDDING_MODEL_CONFIG,
KNOWLEDGE_UPLOAD_ROOT_PATH,
)
from pilot.component import ComponentType
from pilot.utils.executor_utils import ExecutorFactory, blocking_func_to_async
from dbgpt.component import ComponentType
from dbgpt.util.executor_utils import ExecutorFactory, blocking_func_to_async
from pilot.server.knowledge.chunk_db import (
from dbgpt.app.knowledge.chunk_db import (
DocumentChunkEntity,
DocumentChunkDao,
)
from pilot.server.knowledge.document_db import (
from dbgpt.app.knowledge.document_db import (
KnowledgeDocumentDao,
KnowledgeDocumentEntity,
)
from pilot.server.knowledge.space_db import (
from dbgpt.app.knowledge.space_db import (
KnowledgeSpaceDao,
KnowledgeSpaceEntity,
)
from pilot.server.knowledge.request.request import (
from dbgpt.app.knowledge.request.request import (
KnowledgeSpaceRequest,
KnowledgeDocumentRequest,
DocumentQueryRequest,
@@ -35,7 +35,7 @@ from pilot.server.knowledge.request.request import (
)
from enum import Enum
from pilot.server.knowledge.request.response import (
from dbgpt.app.knowledge.request.response import (
ChunkQueryResponse,
DocumentQueryResponse,
SpaceQueryResponse,
@@ -192,9 +192,9 @@ class KnowledgeService:
- space: Knowledge Space Name
- sync_request: DocumentSyncRequest
"""
from pilot.embedding_engine.embedding_engine import EmbeddingEngine
from pilot.embedding_engine.embedding_factory import EmbeddingFactory
from pilot.embedding_engine.pre_text_splitter import PreTextSplitter
from dbgpt.rag.embedding_engine.embedding_engine import EmbeddingEngine
from dbgpt.rag.embedding_engine.embedding_factory import EmbeddingFactory
from dbgpt.rag.embedding_engine.pre_text_splitter import PreTextSplitter
from langchain.text_splitter import (
RecursiveCharacterTextSplitter,
SpacyTextSplitter,
@@ -432,7 +432,7 @@ class KnowledgeService:
f"async_knowledge_graph, doc:{doc.doc_name}, chunk_size:{len(chunk_docs)}, begin embedding to graph store"
)
try:
from pilot.rag.graph_engine.graph_factory import RAGGraphFactory
from dbgpt.rag.graph_engine.graph_factory import RAGGraphFactory
rag_engine = CFG.SYSTEM_APP.get_component(
ComponentType.RAG_GRAPH_DEFAULT.value, RAGGraphFactory
@@ -454,10 +454,10 @@ class KnowledgeService:
- doc: KnowledgeDocumentEntity
"""
texts = [doc.page_content for doc in chunk_docs]
from pilot.common.prompt_util import PromptHelper
from dbgpt.util.prompt_util import PromptHelper
prompt_helper = PromptHelper()
from pilot.scene.chat_knowledge.summary.prompt import prompt
from dbgpt.app.scene.chat_knowledge.summary.prompt import prompt
texts = prompt_helper.repack(prompt_template=prompt.template, text_chunks=texts)
logger.info(
@@ -501,7 +501,7 @@ class KnowledgeService:
return knowledge_document_dao.update_knowledge_document(doc)
def _build_default_context(self):
from pilot.scene.chat_knowledge.v1.prompt import (
from dbgpt.app.scene.chat_knowledge.v1.prompt import (
PROMPT_SCENE_DEFINE,
_DEFAULT_TEMPLATE,
)
@@ -556,7 +556,7 @@ class KnowledgeService:
Returns:
chat: BaseChat, refine summary chat.
"""
from pilot.scene.base import ChatScene
from dbgpt.app.scene import ChatScene
chat_param = {
"chat_session_id": conn_uid,
@@ -568,7 +568,7 @@ class KnowledgeService:
executor = CFG.SYSTEM_APP.get_component(
ComponentType.EXECUTOR_DEFAULT, ExecutorFactory
).create()
from pilot.openapi.api_v1.api_v1 import CHAT_FACTORY
from dbgpt.app.openapi.api_v1.api_v1 import CHAT_FACTORY
chat = await blocking_func_to_async(
executor,
@@ -596,8 +596,8 @@ class KnowledgeService:
Returns:
Document: refine summary context document.
"""
from pilot.scene.base import ChatScene
from pilot.common.chat_util import llm_chat_response_nostream
from dbgpt.app.scene import ChatScene
from dbgpt._private.chat_util import llm_chat_response_nostream
import uuid
tasks = []
@@ -618,7 +618,7 @@ class KnowledgeService:
ChatScene.ExtractSummary.value(), **{"chat_param": chat_param}
)
)
from pilot.common.chat_util import run_async_tasks
from dbgpt._private.chat_util import run_async_tasks
summary_iters = await run_async_tasks(
tasks=tasks, concurrency_limit=concurrency_limit
@@ -629,8 +629,8 @@ class KnowledgeService:
summary_iters,
)
)
from pilot.common.prompt_util import PromptHelper
from pilot.scene.chat_knowledge.summary.prompt import prompt
from dbgpt.util.prompt_util import PromptHelper
from dbgpt.app.scene.chat_knowledge.summary.prompt import prompt
prompt_helper = PromptHelper()
summary_iters = prompt_helper.repack(

View File

@@ -2,15 +2,15 @@ from datetime import datetime
from sqlalchemy import Column, Integer, Text, String, DateTime
from pilot.base_modules.meta_data.base_dao import BaseDao
from pilot.base_modules.meta_data.meta_data import (
from dbgpt.storage.metadata import BaseDao
from dbgpt.storage.metadata.meta_data import (
Base,
engine,
session,
META_DATA_DATABASE,
)
from pilot.configs.config import Config
from pilot.server.knowledge.request.request import KnowledgeSpaceRequest
from dbgpt._private.config import Config
from dbgpt.app.knowledge.request.request import KnowledgeSpaceRequest
CFG = Config()

View File

@@ -1,14 +1,12 @@
from typing import List
from fastapi import APIRouter
from pilot.component import ComponentType
from pilot.configs.config import Config
from dbgpt.component import ComponentType
from dbgpt._private.config import Config
from pilot.model.cluster import WorkerStartupRequest, WorkerManagerFactory
from pilot.openapi.api_view_model import Result
from dbgpt.model.cluster import WorkerStartupRequest, WorkerManagerFactory
from dbgpt.app.openapi.api_view_model import Result
from pilot.server.llm_manage.request.request import ModelResponse
from dbgpt.app.llm_manage.request.request import ModelResponse
CFG = Config()
router = APIRouter()
@@ -18,7 +16,7 @@ router = APIRouter()
async def model_params():
print(f"/worker/model/params")
try:
from pilot.model.cluster import WorkerManagerFactory
from dbgpt.model.cluster import WorkerManagerFactory
worker_manager = CFG.SYSTEM_APP.get_component(
ComponentType.WORKER_MANAGER_FACTORY, WorkerManagerFactory
@@ -42,7 +40,7 @@ async def model_params():
async def model_list():
print(f"/worker/model/list")
try:
from pilot.model.cluster.controller.controller import BaseModelController
from dbgpt.model.cluster.controller.controller import BaseModelController
controller = CFG.SYSTEM_APP.get_component(
ComponentType.MODEL_CONTROLLER, BaseModelController
@@ -85,7 +83,7 @@ async def model_list():
async def model_stop(request: WorkerStartupRequest):
print(f"/v1/worker/model/stop:")
try:
from pilot.model.cluster.controller.controller import BaseModelController
from dbgpt.model.cluster.controller.controller import BaseModelController
worker_manager = CFG.SYSTEM_APP.get_component(
ComponentType.WORKER_MANAGER_FACTORY, WorkerManagerFactory

View File

@@ -7,9 +7,9 @@ import sys
ROOT_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(ROOT_PATH)
from pilot.configs.config import Config
from pilot.configs.model_config import LLM_MODEL_CONFIG, EMBEDDING_MODEL_CONFIG
from pilot.model.cluster import run_worker_manager
from dbgpt._private.config import Config
from dbgpt.configs.model_config import LLM_MODEL_CONFIG, EMBEDDING_MODEL_CONFIG
from dbgpt.model.cluster import run_worker_manager
CFG = Config()

View File

@@ -6,23 +6,18 @@ import aiofiles
import logging
from fastapi import (
APIRouter,
Request,
File,
UploadFile,
Form,
Body,
BackgroundTasks,
Depends,
)
from fastapi.responses import StreamingResponse
from fastapi.exceptions import RequestValidationError
from typing import List, Optional
import tempfile
from concurrent.futures import Executor
from pilot.component import ComponentType
from pilot.openapi.api_view_model import (
from dbgpt.component import ComponentType
from dbgpt.app.openapi.api_view_model import (
Result,
ConversationVo,
MessageVo,
@@ -31,24 +26,20 @@ from pilot.openapi.api_view_model import (
DeltaMessage,
ChatCompletionStreamResponse,
)
from pilot.connections.db_conn_info import DBConfig, DbTypeInfo
from pilot.configs.config import Config
from pilot.server.knowledge.service import KnowledgeService
from pilot.server.knowledge.request.request import KnowledgeSpaceRequest
from dbgpt.datasource.db_conn_info import DBConfig, DbTypeInfo
from dbgpt._private.config import Config
from dbgpt.app.knowledge.service import KnowledgeService
from dbgpt.app.knowledge.request.request import KnowledgeSpaceRequest
from pilot.scene.base_chat import BaseChat
from pilot.scene.base import ChatScene
from pilot.scene.chat_factory import ChatFactory
from pilot.common.schema import DBType
from pilot.scene.message import OnceConversation
from pilot.configs.model_config import LLM_MODEL_CONFIG, KNOWLEDGE_UPLOAD_ROOT_PATH
from pilot.summary.db_summary_client import DBSummaryClient
from pilot.memory.chat_history.chat_hisotry_factory import ChatHistory
from pilot.model.cluster import BaseModelController, WorkerManager, WorkerManagerFactory
from pilot.model.base import FlatSupportedModel
from pilot.utils.tracer import root_tracer, SpanType
from pilot.utils.executor_utils import (
from dbgpt.app.scene import BaseChat, ChatScene, ChatFactory
from dbgpt.core.interface.message import OnceConversation
from dbgpt.configs.model_config import KNOWLEDGE_UPLOAD_ROOT_PATH
from dbgpt.rag.summary.db_summary_client import DBSummaryClient
from dbgpt.storage.chat_history.chat_hisotry_factory import ChatHistory
from dbgpt.model.cluster import BaseModelController, WorkerManager, WorkerManagerFactory
from dbgpt.model.base import FlatSupportedModel
from dbgpt.util.tracer import root_tracer, SpanType
from dbgpt.util.executor_utils import (
ExecutorFactory,
blocking_func_to_async,
DefaultExecutorFactory,

View File

@@ -8,14 +8,14 @@ from fastapi import (
from typing import List
import logging
from pilot.configs.config import Config
from dbgpt._private.config import Config
from pilot.scene.chat_factory import ChatFactory
from dbgpt.app.scene import ChatFactory
from pilot.openapi.api_view_model import (
from dbgpt.app.openapi.api_view_model import (
Result,
)
from pilot.openapi.editor_view_model import (
from dbgpt.app.openapi.editor_view_model import (
ChatDbRounds,
ChartList,
ChartDetail,
@@ -24,11 +24,15 @@ from pilot.openapi.editor_view_model import (
DbTable,
)
from pilot.openapi.api_v1.editor.sql_editor import DataNode, ChartRunData, SqlRunData
from pilot.scene.message import OnceConversation
from pilot.scene.chat_dashboard.data_loader import DashboardDataLoader
from pilot.scene.chat_db.data_loader import DbDataLoader
from pilot.memory.chat_history.chat_hisotry_factory import ChatHistory
from dbgpt.app.openapi.api_v1.editor.sql_editor import (
DataNode,
ChartRunData,
SqlRunData,
)
from dbgpt.core.interface.message import OnceConversation
from dbgpt.app.scene.chat_dashboard.data_loader import DashboardDataLoader
from dbgpt.app.scene.chat_db.data_loader import DbDataLoader
from dbgpt.storage.chat_history.chat_hisotry_factory import ChatHistory
router = APIRouter()
CFG = Config()
@@ -56,8 +60,8 @@ async def get_editor_tables(
key=field[0],
type=field[1],
default_value=field[2],
can_null=field[3],
comment=field[-1],
can_null=field[3] or "YES",
comment=str(field[-1]),
)
)
@@ -145,7 +149,7 @@ async def sql_editor_submit(sql_edit_context: ChatSqlEditContext = Body()):
logger.info(f"sql_editor_submit:{sql_edit_context.__dict__}")
chat_history_fac = ChatHistory()
history_mem = chat_history_fac.get_store_instance(sql_edit_context.con_uid)
history_mem = chat_history_fac.get_store_instance(sql_edit_context.conv_uid)
history_messages: List[OnceConversation] = history_mem.get_messages()
if history_messages:
conn = CFG.LOCAL_DB_MANAGE.get_connect(sql_edit_context.db_name)

View File

@@ -1,6 +1,6 @@
from typing import List
from pydantic import BaseModel, Field, root_validator, validator, Extra
from pilot.scene.chat_dashboard.data_preparation.report_schma import ValueItem
from dbgpt._private.pydantic import BaseModel
from dbgpt.app.scene.chat_dashboard.data_preparation.report_schma import ValueItem
class DataNode(BaseModel):

View File

@@ -1,11 +1,10 @@
from fastapi import APIRouter, Body, Request
from pilot.openapi.api_v1.feedback.feed_back_model import FeedBackBody
from pilot.openapi.api_v1.feedback.feed_back_db import (
from dbgpt.app.openapi.api_v1.feedback.feed_back_model import FeedBackBody
from dbgpt.app.openapi.api_v1.feedback.feed_back_db import (
ChatFeedBackDao,
ChatFeedBackEntity,
)
from pilot.openapi.api_view_model import Result
from dbgpt.app.openapi.api_view_model import Result
router = APIRouter()
chat_feed_back = ChatFeedBackDao()

View File

@@ -2,14 +2,14 @@ from datetime import datetime
from sqlalchemy import Column, Integer, Text, String, DateTime
from pilot.base_modules.meta_data.base_dao import BaseDao
from pilot.base_modules.meta_data.meta_data import (
from dbgpt.storage.metadata import BaseDao
from dbgpt.storage.metadata.meta_data import (
Base,
engine,
session,
META_DATA_DATABASE,
)
from pilot.openapi.api_v1.feedback.feed_back_model import FeedBackBody
from dbgpt.app.openapi.api_v1.feedback.feed_back_model import FeedBackBody
class ChatFeedBackEntity(Base):

View File

@@ -1,4 +1,4 @@
from pydantic.main import BaseModel
from dbgpt._private.pydantic import BaseModel
from typing import Optional

View File

@@ -1,4 +1,4 @@
from pydantic import BaseModel, Field
from dbgpt._private.pydantic import BaseModel, Field
from typing import TypeVar, Generic, Any, Optional, Literal, List
import uuid
import time

View File

@@ -1,6 +1,6 @@
from fastapi import Request
from fastapi.exceptions import RequestValidationError
from pilot.openapi.api_view_model import Result
from dbgpt.app.openapi.api_view_model import Result
async def validation_exception_handler(request: Request, exc: RequestValidationError):

View File

@@ -1,4 +1,4 @@
from pydantic import BaseModel, Field
from dbgpt._private.pydantic import BaseModel, Field
from typing import List, Any

View File

@@ -1,8 +1,8 @@
from fastapi import APIRouter, File, UploadFile, Form
from fastapi import APIRouter
from pilot.openapi.api_view_model import Result
from pilot.server.prompt.service import PromptManageService
from pilot.server.prompt.request.request import PromptManageRequest
from dbgpt.app.openapi.api_view_model import Result
from dbgpt.app.prompt.service import PromptManageService
from dbgpt.app.prompt.request.request import PromptManageRequest
router = APIRouter()

View File

@@ -2,16 +2,16 @@ from datetime import datetime
from sqlalchemy import Column, Integer, Text, String, DateTime
from pilot.base_modules.meta_data.base_dao import BaseDao
from pilot.base_modules.meta_data.meta_data import (
from dbgpt.storage.metadata import BaseDao
from dbgpt.storage.metadata.meta_data import (
Base,
engine,
session,
META_DATA_DATABASE,
)
from pilot.configs.config import Config
from dbgpt._private.config import Config
from pilot.server.prompt.request.request import PromptManageRequest
from dbgpt.app.prompt.request.request import PromptManageRequest
CFG = Config()

View File

@@ -1,8 +1,8 @@
from typing import List
from pydantic import BaseModel
from dbgpt._private.pydantic import BaseModel
from typing import Optional
from pydantic import BaseModel
from dbgpt._private.pydantic import BaseModel
class PromptManageRequest(BaseModel):

View File

@@ -1,5 +1,5 @@
from typing import List
from pydantic import BaseModel
from dbgpt._private.pydantic import BaseModel
class PromptQueryResponse(BaseModel):

View File

@@ -1,8 +1,8 @@
from datetime import datetime
from pilot.server.prompt.request.request import PromptManageRequest
from pilot.server.prompt.request.response import PromptQueryResponse
from pilot.server.prompt.prompt_manage_db import PromptManageDao, PromptManageEntity
from dbgpt.app.prompt.request.request import PromptManageRequest
from dbgpt.app.prompt.request.response import PromptQueryResponse
from dbgpt.app.prompt.prompt_manage_db import PromptManageDao, PromptManageEntity
prompt_manage_dao = PromptManageDao()

View File

@@ -0,0 +1,3 @@
from dbgpt.app.scene.base_chat import BaseChat
from dbgpt.app.scene.chat_factory import ChatFactory
from dbgpt.app.scene.base import ChatScene

View File

@@ -6,18 +6,18 @@ import logging
from abc import ABC, abstractmethod
from typing import Any, List, Dict
from pilot.configs.config import Config
from pilot.component import ComponentType
from pilot.prompts.prompt_new import PromptTemplate
from pilot.scene.base_message import ModelMessage, ModelMessageRoleType
from pilot.scene.message import OnceConversation
from pilot.utils import get_or_create_event_loop
from pilot.utils.executor_utils import ExecutorFactory, blocking_func_to_async
from pilot.utils.tracer import root_tracer, trace
from pydantic import Extra
from pilot.memory.chat_history.chat_hisotry_factory import ChatHistory
from pilot.awel import BaseOperator, SimpleCallDataInputSource, InputOperator, DAG
from pilot.model.operator.model_operator import ModelOperator, ModelStreamOperator
from dbgpt._private.config import Config
from dbgpt.component import ComponentType
from dbgpt.core.interface.prompt import PromptTemplate
from dbgpt.core.interface.message import ModelMessage, ModelMessageRoleType
from dbgpt.core.interface.message import OnceConversation
from dbgpt.util import get_or_create_event_loop
from dbgpt.util.executor_utils import ExecutorFactory, blocking_func_to_async
from dbgpt.util.tracer import root_tracer, trace
from dbgpt._private.pydantic import Extra
from dbgpt.storage.chat_history.chat_hisotry_factory import ChatHistory
from dbgpt.core.awel import BaseOperator, SimpleCallDataInputSource, InputOperator, DAG
from dbgpt.model.operator.model_operator import ModelOperator, ModelStreamOperator
logger = logging.getLogger(__name__)
headers = {"User-Agent": "dbgpt Client"}
@@ -535,16 +535,16 @@ def _build_model_operator(
Returns:
BaseOperator: The final operator in the constructed DAG, typically a join node.
"""
from pilot.model.cluster import WorkerManagerFactory
from pilot.awel import JoinOperator
from pilot.model.operator.model_operator import (
from dbgpt.model.cluster import WorkerManagerFactory
from dbgpt.core.awel import JoinOperator
from dbgpt.model.operator.model_operator import (
ModelCacheBranchOperator,
CachedModelStreamOperator,
CachedModelOperator,
ModelSaveCacheOperator,
ModelStreamSaveCacheOperator,
)
from pilot.cache import CacheManager
from dbgpt.storage.cache import CacheManager
# Fetch worker and cache managers from the system configuration
worker_manager = CFG.SYSTEM_APP.get_component(

View File

@@ -1,17 +1,13 @@
from typing import List, Dict
import logging
from pilot.scene.base_chat import BaseChat
from pilot.scene.base import ChatScene
from pilot.configs.config import Config
from pilot.base_modules.agent.commands.command import execute_command
from pilot.base_modules.agent.commands.command_mange import ApiCall
from pilot.base_modules.agent import PluginPromptGenerator
from pilot.common.string_utils import extract_content
from .prompt import prompt
from pilot.component import ComponentType
from pilot.base_modules.agent.controller import ModuleAgent
from pilot.utils.tracer import root_tracer, trace
from dbgpt.app.scene import BaseChat, ChatScene
from dbgpt._private.config import Config
from dbgpt.agent.commands.command_mange import ApiCall
from dbgpt.agent import PluginPromptGenerator
from dbgpt.component import ComponentType
from dbgpt.agent.controller import ModuleAgent
from dbgpt.util.tracer import root_tracer, trace
CFG = Config()

View File

@@ -1,4 +1,4 @@
from pilot.prompts.example_base import ExampleSelector
from dbgpt.core._private.example_base import ExampleSelector
## Two examples are defined by default
EXAMPLES = [

View File

@@ -1,6 +1,5 @@
import json
from typing import Dict, NamedTuple
from pilot.out_parser.base import BaseOutputParser, T
from dbgpt.core.interface.output_parser import BaseOutputParser
class PluginAction(NamedTuple):

View File

@@ -1,11 +1,8 @@
import json
from pilot.prompts.prompt_new import PromptTemplate
from pilot.configs.config import Config
from pilot.scene.base import ChatScene
from pilot.common.schema import SeparatorStyle, ExampleType
from dbgpt.core.interface.prompt import PromptTemplate
from dbgpt._private.config import Config
from dbgpt.app.scene import ChatScene
from pilot.scene.chat_execution.out_parser import PluginChatOutputParser
from pilot.scene.chat_execution.example import plugin_example
from dbgpt.app.scene.chat_execution.out_parser import PluginChatOutputParser
CFG = Config()
@@ -65,8 +62,6 @@ _PROMPT_SCENE_DEFINE = (
RESPONSE_FORMAT = None
EXAMPLE_TYPE = ExampleType.ONE_SHOT
PROMPT_SEP = SeparatorStyle.SINGLE.value
### Whether the model service is streaming output
PROMPT_NEED_STREAM_OUT = True
@@ -77,9 +72,7 @@ prompt = PromptTemplate(
template_define=_PROMPT_SCENE_DEFINE,
template=_DEFAULT_TEMPLATE,
stream_out=PROMPT_NEED_STREAM_OUT,
output_parser=PluginChatOutputParser(
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_STREAM_OUT
),
output_parser=PluginChatOutputParser(is_stream_out=PROMPT_NEED_STREAM_OUT),
temperature=1
# example_selector=plugin_example,
)

View File

@@ -3,17 +3,15 @@ import os
import uuid
from typing import List, Dict
from pilot.scene.base_chat import BaseChat
from pilot.scene.base import ChatScene
from pilot.configs.config import Config
from pilot.scene.chat_dashboard.data_preparation.report_schma import (
from dbgpt.app.scene import BaseChat, ChatScene
from dbgpt._private.config import Config
from dbgpt.app.scene.chat_dashboard.data_preparation.report_schma import (
ChartData,
ReportData,
)
from pilot.scene.chat_dashboard.prompt import prompt
from pilot.scene.chat_dashboard.data_loader import DashboardDataLoader
from pilot.utils.executor_utils import blocking_func_to_async
from pilot.utils.tracer import root_tracer, trace
from dbgpt.app.scene.chat_dashboard.data_loader import DashboardDataLoader
from dbgpt.util.executor_utils import blocking_func_to_async
from dbgpt.util.tracer import trace
CFG = Config()
@@ -57,7 +55,7 @@ class ChatDashboard(BaseChat):
@trace()
async def generate_input_values(self) -> Dict:
try:
from pilot.summary.db_summary_client import DBSummaryClient
from dbgpt.rag.summary.db_summary_client import DBSummaryClient
except ImportError:
raise ValueError("Could not import DBSummaryClient. ")

View File

@@ -2,8 +2,8 @@ from typing import List
from decimal import Decimal
import logging
from pilot.configs.config import Config
from pilot.scene.chat_dashboard.data_preparation.report_schma import ValueItem
from dbgpt._private.config import Config
from dbgpt.app.scene.chat_dashboard.data_preparation.report_schma import ValueItem
CFG = Config()
logger = logging.getLogger(__name__)

View File

@@ -1,4 +1,4 @@
from pydantic import BaseModel
from dbgpt._private.pydantic import BaseModel
from typing import List, Any

View File

@@ -2,8 +2,8 @@ import json
import logging
from typing import NamedTuple, List
from pilot.out_parser.base import BaseOutputParser, T
from pilot.scene.base import ChatScene
from dbgpt.core.interface.output_parser import BaseOutputParser
from dbgpt.app.scene import ChatScene
class ChartItem(NamedTuple):
@@ -17,8 +17,8 @@ logger = logging.getLogger(__name__)
class ChatDashboardOutputParser(BaseOutputParser):
def __init__(self, sep: str, is_stream_out: bool):
super().__init__(sep=sep, is_stream_out=is_stream_out)
def __init__(self, is_stream_out: bool, **kwargs):
super().__init__(is_stream_out=is_stream_out, **kwargs)
def parse_prompt_response(self, model_out_text):
clean_str = super().parse_prompt_response(model_out_text)

View File

@@ -1,9 +1,8 @@
import json
from pilot.prompts.prompt_new import PromptTemplate
from pilot.configs.config import Config
from pilot.scene.base import ChatScene
from pilot.scene.chat_dashboard.out_parser import ChatDashboardOutputParser, ChartItem
from pilot.common.schema import SeparatorStyle
from dbgpt.core.interface.prompt import PromptTemplate
from dbgpt._private.config import Config
from dbgpt.app.scene import ChatScene
from dbgpt.app.scene.chat_dashboard.out_parser import ChatDashboardOutputParser
CFG = Config()
@@ -40,7 +39,6 @@ RESPONSE_FORMAT = [
}
]
PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_STREAM_OUT = False
@@ -51,8 +49,6 @@ prompt = PromptTemplate(
template_define=PROMPT_SCENE_DEFINE,
template=_DEFAULT_TEMPLATE,
stream_out=PROMPT_NEED_STREAM_OUT,
output_parser=ChatDashboardOutputParser(
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_STREAM_OUT
),
output_parser=ChatDashboardOutputParser(is_stream_out=PROMPT_NEED_STREAM_OUT),
)
CFG.prompt_template_registry.register(prompt, is_default=True)

View File

@@ -1,23 +1,20 @@
import json
import os
import asyncio
import logging
from typing import List, Any, Dict
from pilot.scene.base_chat import BaseChat, logger
from pilot.scene.base import ChatScene
from pilot.common.sql_database import Database
from pilot.configs.config import Config
from pilot.base_modules.agent.commands.command_mange import ApiCall
from pilot.scene.chat_data.chat_excel.excel_analyze.prompt import prompt
from pilot.scene.chat_data.chat_excel.excel_reader import ExcelReader
from pilot.scene.chat_data.chat_excel.excel_learning.chat import ExcelLearning
from pilot.common.path_utils import has_path
from pilot.configs.model_config import LLM_MODEL_CONFIG, KNOWLEDGE_UPLOAD_ROOT_PATH
from pilot.base_modules.agent.common.schema import Status
from pilot.utils.tracer import root_tracer, trace
from typing import Dict
from dbgpt.app.scene import BaseChat, ChatScene
from dbgpt._private.config import Config
from dbgpt.agent.commands.command_mange import ApiCall
from dbgpt.app.scene.chat_data.chat_excel.excel_reader import ExcelReader
from dbgpt.app.scene.chat_data.chat_excel.excel_learning.chat import ExcelLearning
from dbgpt.util.path_utils import has_path
from dbgpt.configs.model_config import KNOWLEDGE_UPLOAD_ROOT_PATH
from dbgpt.util.tracer import root_tracer, trace
CFG = Config()
logger = logging.getLogger(__name__)
class ChatExcel(BaseChat):
"""a Excel analyzer to analyze Excel Data"""

View File

@@ -1,8 +1,8 @@
import json
import logging
from typing import Dict, NamedTuple, List
from pilot.out_parser.base import BaseOutputParser, T
from pilot.configs.config import Config
from typing import NamedTuple
from dbgpt.core.interface.output_parser import BaseOutputParser
from dbgpt._private.config import Config
CFG = Config()
@@ -17,8 +17,8 @@ logger = logging.getLogger(__name__)
class ChatExcelOutputParser(BaseOutputParser):
def __init__(self, sep: str, is_stream_out: bool):
super().__init__(sep=sep, is_stream_out=is_stream_out)
def __init__(self, is_stream_out: bool, **kwargs):
super().__init__(is_stream_out=is_stream_out, **kwargs)
def parse_prompt_response(self, model_out_text):
clean_str = super().parse_prompt_response(model_out_text)

View File

@@ -1,11 +1,9 @@
import json
from pilot.prompts.prompt_new import PromptTemplate
from pilot.configs.config import Config
from pilot.scene.base import ChatScene
from pilot.scene.chat_data.chat_excel.excel_analyze.out_parser import (
from dbgpt.core.interface.prompt import PromptTemplate
from dbgpt._private.config import Config
from dbgpt.app.scene import ChatScene
from dbgpt.app.scene.chat_data.chat_excel.excel_analyze.out_parser import (
ChatExcelOutputParser,
)
from pilot.common.schema import SeparatorStyle
CFG = Config()
@@ -53,7 +51,6 @@ _PROMPT_SCENE_DEFINE = (
_PROMPT_SCENE_DEFINE_EN if CFG.LANGUAGE == "en" else _PROMPT_SCENE_DEFINE_ZH
)
PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_STREAM_OUT = True
@@ -68,9 +65,7 @@ prompt = PromptTemplate(
template_define=_PROMPT_SCENE_DEFINE,
template=_DEFAULT_TEMPLATE,
stream_out=PROMPT_NEED_STREAM_OUT,
output_parser=ChatExcelOutputParser(
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_STREAM_OUT
),
output_parser=ChatExcelOutputParser(is_stream_out=PROMPT_NEED_STREAM_OUT),
need_historical_messages=True,
# example_selector=sql_data_example,
temperature=PROMPT_TEMPERATURE,

View File

@@ -1,18 +1,11 @@
import json
from typing import Any, Dict
from pilot.scene.base_message import HumanMessage, ViewMessage, AIMessage
from pilot.scene.base_chat import BaseChat
from pilot.scene.base import ChatScene
from pilot.common.sql_database import Database
from pilot.configs.config import Config
from pilot.scene.chat_data.chat_excel.excel_learning.prompt import prompt
from pilot.scene.chat_data.chat_excel.excel_reader import ExcelReader
from pilot.json_utils.utilities import DateTimeEncoder
from pilot.utils.executor_utils import blocking_func_to_async
from pilot.utils.tracer import root_tracer, trace
CFG = Config()
from dbgpt.core.interface.message import ViewMessage, AIMessage
from dbgpt.app.scene import BaseChat, ChatScene
from dbgpt.util.json_utils import DateTimeEncoder
from dbgpt.util.executor_utils import blocking_func_to_async
from dbgpt.util.tracer import trace
class ExcelLearning(BaseChat):

View File

@@ -1,10 +1,7 @@
import json
import logging
from typing import Dict, NamedTuple, List
from pilot.out_parser.base import BaseOutputParser, T
from pilot.configs.config import Config
CFG = Config()
from typing import NamedTuple, List
from dbgpt.core.interface.output_parser import BaseOutputParser
class ExcelResponse(NamedTuple):
@@ -17,8 +14,8 @@ logger = logging.getLogger(__name__)
class LearningExcelOutputParser(BaseOutputParser):
def __init__(self, sep: str, is_stream_out: bool):
super().__init__(sep=sep, is_stream_out=is_stream_out)
def __init__(self, is_stream_out: bool, **kwargs):
super().__init__(is_stream_out=is_stream_out, **kwargs)
self.is_downgraded = False
def parse_prompt_response(self, model_out_text):

View File

@@ -1,11 +1,10 @@
import json
from pilot.prompts.prompt_new import PromptTemplate
from pilot.configs.config import Config
from pilot.scene.base import ChatScene
from pilot.scene.chat_data.chat_excel.excel_learning.out_parser import (
from dbgpt.core.interface.prompt import PromptTemplate
from dbgpt._private.config import Config
from dbgpt.app.scene import ChatScene
from dbgpt.app.scene.chat_data.chat_excel.excel_learning.out_parser import (
LearningExcelOutputParser,
)
from pilot.common.schema import SeparatorStyle
CFG = Config()
@@ -66,8 +65,6 @@ PROMPT_SCENE_DEFINE = (
)
PROMPT_SEP = SeparatorStyle.SINGLE.value
PROMPT_NEED_STREAM_OUT = False
# Temperature is a configuration hyperparameter that controls the randomness of language model output.
@@ -82,9 +79,7 @@ prompt = PromptTemplate(
template_define=PROMPT_SCENE_DEFINE,
template=_DEFAULT_TEMPLATE,
stream_out=PROMPT_NEED_STREAM_OUT,
output_parser=LearningExcelOutputParser(
sep=PROMPT_SEP, is_stream_out=PROMPT_NEED_STREAM_OUT
),
output_parser=LearningExcelOutputParser(is_stream_out=PROMPT_NEED_STREAM_OUT),
# example_selector=sql_data_example,
temperature=PROMPT_TEMPERATURE,
)

Some files were not shown because too many files have changed in this diff Show More