mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-09-17 23:18:20 +00:00
feat(core): APP use new SDK component (#1050)
This commit is contained in:
@@ -1,9 +1,11 @@
|
||||
import requests
|
||||
import json
|
||||
from typing import List
|
||||
from dbgpt.model.proxy.llms.proxy_model import ProxyModel
|
||||
|
||||
import requests
|
||||
|
||||
from dbgpt.core.interface.message import ModelMessage, ModelMessageRoleType
|
||||
from dbgpt.model.parameter import ProxyModelParameters
|
||||
from dbgpt.model.proxy.llms.proxy_model import ProxyModel
|
||||
|
||||
BAICHUAN_DEFAULT_MODEL = "Baichuan2-Turbo-192k"
|
||||
|
||||
|
@@ -1,5 +1,7 @@
|
||||
import requests
|
||||
from typing import List
|
||||
|
||||
import requests
|
||||
|
||||
from dbgpt.core.interface.message import ModelMessage, ModelMessageRoleType
|
||||
from dbgpt.model.proxy.llms.proxy_model import ProxyModel
|
||||
|
||||
|
@@ -1,15 +1,17 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import importlib.metadata as metadata
|
||||
import logging
|
||||
import os
|
||||
from typing import List
|
||||
import logging
|
||||
import importlib.metadata as metadata
|
||||
from dbgpt.model.proxy.llms.proxy_model import ProxyModel
|
||||
from dbgpt.model.parameter import ProxyModelParameters
|
||||
from dbgpt.core.interface.message import ModelMessage, ModelMessageRoleType
|
||||
|
||||
import httpx
|
||||
|
||||
from dbgpt.core.interface.message import ModelMessage, ModelMessageRoleType
|
||||
from dbgpt.model.parameter import ProxyModelParameters
|
||||
from dbgpt.model.proxy.llms.proxy_model import ProxyModel
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@@ -1,7 +1,7 @@
|
||||
from typing import List, Tuple, Dict, Any
|
||||
from typing import Any, Dict, List, Tuple
|
||||
|
||||
from dbgpt.model.proxy.llms.proxy_model import ProxyModel
|
||||
from dbgpt.core.interface.message import ModelMessage, parse_model_messages
|
||||
from dbgpt.model.proxy.llms.proxy_model import ProxyModel
|
||||
|
||||
GEMINI_DEFAULT_MODEL = "gemini-pro"
|
||||
|
||||
|
@@ -1,12 +1,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Union, List, Optional, TYPE_CHECKING
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, List, Optional, Union
|
||||
|
||||
from dbgpt.model.parameter import ProxyModelParameters
|
||||
from dbgpt.model.utils.token_utils import ProxyTokenizerWrapper
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from dbgpt.core.interface.message import ModelMessage, BaseMessage
|
||||
from dbgpt.core.interface.message import BaseMessage, ModelMessage
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@@ -1,14 +1,15 @@
|
||||
import json
|
||||
import base64
|
||||
import hmac
|
||||
import hashlib
|
||||
from websockets.sync.client import connect
|
||||
import hmac
|
||||
import json
|
||||
from datetime import datetime
|
||||
from typing import List
|
||||
from time import mktime
|
||||
from urllib.parse import urlencode
|
||||
from urllib.parse import urlparse
|
||||
from typing import List
|
||||
from urllib.parse import urlencode, urlparse
|
||||
from wsgiref.handlers import format_date_time
|
||||
|
||||
from websockets.sync.client import connect
|
||||
|
||||
from dbgpt.core.interface.message import ModelMessage, ModelMessageRoleType
|
||||
from dbgpt.model.proxy.llms.proxy_model import ProxyModel
|
||||
|
||||
|
@@ -1,7 +1,8 @@
|
||||
import logging
|
||||
from typing import List
|
||||
from dbgpt.model.proxy.llms.proxy_model import ProxyModel
|
||||
|
||||
from dbgpt.core.interface.message import ModelMessage, ModelMessageRoleType
|
||||
from dbgpt.model.proxy.llms.proxy_model import ProxyModel
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@@ -1,9 +1,11 @@
|
||||
import requests
|
||||
import json
|
||||
from typing import List
|
||||
from dbgpt.model.proxy.llms.proxy_model import ProxyModel
|
||||
|
||||
import requests
|
||||
from cachetools import TTLCache, cached
|
||||
|
||||
from dbgpt.core.interface.message import ModelMessage, ModelMessageRoleType
|
||||
from cachetools import cached, TTLCache
|
||||
from dbgpt.model.proxy.llms.proxy_model import ProxyModel
|
||||
|
||||
|
||||
@cached(TTLCache(1, 1800))
|
||||
|
@@ -1,7 +1,7 @@
|
||||
from typing import List
|
||||
|
||||
from dbgpt.model.proxy.llms.proxy_model import ProxyModel
|
||||
from dbgpt.core.interface.message import ModelMessage, ModelMessageRoleType
|
||||
from dbgpt.model.proxy.llms.proxy_model import ProxyModel
|
||||
|
||||
CHATGLM_DEFAULT_MODEL = "chatglm_pro"
|
||||
|
||||
|
Reference in New Issue
Block a user