feat(core): Support i18n (#1327)

This commit is contained in:
Fangyin Cheng
2024-03-25 20:15:39 +08:00
committed by GitHub
parent fa06be64c1
commit fcc325d411
179 changed files with 12052 additions and 69512 deletions

View File

@@ -13,22 +13,25 @@ from dbgpt.core.interface.llm import (
)
from dbgpt.model.cluster.manager_base import WorkerManager
from dbgpt.model.parameter import WorkerType
from dbgpt.util.i18n_utils import _
@register_resource(
label="Default LLM Client",
label=_("Default LLM Client"),
name="default_llm_client",
category=ResourceCategory.LLM_CLIENT,
description="Default LLM client(Connect to your DB-GPT model serving)",
description=_("Default LLM client(Connect to your DB-GPT model serving)"),
parameters=[
Parameter.build_from(
"Auto Convert Message",
_("Auto Convert Message"),
name="auto_convert_message",
type=bool,
optional=True,
default=False,
description="Whether to auto convert the messages that are not supported "
"by the LLM to a compatible format",
description=_(
"Whether to auto convert the messages that are not supported "
"by the LLM to a compatible format"
),
)
],
)
@@ -107,27 +110,29 @@ class DefaultLLMClient(LLMClient):
@register_resource(
label="Remote LLM Client",
label=_("Remote LLM Client"),
name="remote_llm_client",
category=ResourceCategory.LLM_CLIENT,
description="Remote LLM client(Connect to the remote DB-GPT model serving)",
description=_("Remote LLM client(Connect to the remote DB-GPT model serving)"),
parameters=[
Parameter.build_from(
"Controller Address",
_("Controller Address"),
name="controller_address",
type=str,
optional=True,
default="http://127.0.0.1:8000",
description="Model controller address",
default=_("http://127.0.0.1:8000"),
description=_("Model controller address"),
),
Parameter.build_from(
"Auto Convert Message",
_("Auto Convert Message"),
name="auto_convert_message",
type=bool,
optional=True,
default=False,
description="Whether to auto convert the messages that are not supported "
"by the LLM to a compatible format",
description=_(
"Whether to auto convert the messages that are not supported "
"by the LLM to a compatible format"
),
),
],
)

View File

@@ -10,10 +10,10 @@ from dbgpt.core.awel.flow import (
OperatorCategory,
OperatorType,
Parameter,
ResourceCategory,
ViewMetadata,
)
from dbgpt.core.operators import BaseLLM, BaseLLMOperator, BaseStreamingLLMOperator
from dbgpt.util.i18n_utils import _
logger = logging.getLogger(__name__)
@@ -63,31 +63,34 @@ class LLMOperator(MixinLLMOperator, BaseLLMOperator):
"""
metadata = ViewMetadata(
label="LLM Operator",
label=_("LLM Operator"),
name="llm_operator",
category=OperatorCategory.LLM,
description="The LLM operator.",
description=_("The LLM operator."),
parameters=[
Parameter.build_from(
"LLM Client",
_("LLM Client"),
"llm_client",
LLMClient,
optional=True,
default=None,
description="The LLM Client.",
description=_("The LLM Client."),
),
],
inputs=[
IOField.build_from(
"Model Request", "model_request", ModelRequest, "The model request."
_("Model Request"),
"model_request",
ModelRequest,
_("The model request."),
)
],
outputs=[
IOField.build_from(
"Model Output",
_("Model Output"),
"model_output",
ModelOutput,
description="The model output.",
description=_("The model output."),
)
],
)
@@ -107,32 +110,35 @@ class StreamingLLMOperator(MixinLLMOperator, BaseStreamingLLMOperator):
"""
metadata = ViewMetadata(
label="Streaming LLM Operator",
label=_("Streaming LLM Operator"),
name="streaming_llm_operator",
operator_type=OperatorType.STREAMIFY,
category=OperatorCategory.LLM,
description="The streaming LLM operator.",
description=_("The streaming LLM operator."),
parameters=[
Parameter.build_from(
"LLM Client",
_("LLM Client"),
"llm_client",
LLMClient,
optional=True,
default=None,
description="The LLM Client.",
description=_("The LLM Client."),
),
],
inputs=[
IOField.build_from(
"Model Request", "model_request", ModelRequest, "The model request."
_("Model Request"),
"model_request",
ModelRequest,
_("The model request."),
)
],
outputs=[
IOField.build_from(
"Model Output",
_("Model Output"),
"model_output",
ModelOutput,
description="The model output.",
description=_("The model output."),
is_list=True,
)
],

View File

@@ -17,6 +17,7 @@ from dbgpt.model.parameter import ProxyModelParameters
from dbgpt.model.proxy.base import ProxyLLMClient
from dbgpt.model.proxy.llms.proxy_model import ProxyModel
from dbgpt.model.utils.chatgpt_utils import OpenAIParameters
from dbgpt.util.i18n_utils import _
if TYPE_CHECKING:
from httpx._types import ProxiesTypes
@@ -44,27 +45,31 @@ async def chatgpt_generate_stream(
@register_resource(
label="OpenAI LLM Client",
label=_("OpenAI LLM Client"),
name="openai_llm_client",
category=ResourceCategory.LLM_CLIENT,
parameters=[
Parameter.build_from(
label="OpenAI API Key",
label=_("OpenAI API Key"),
name="apk_key",
type=str,
optional=True,
default=None,
description="OpenAI API Key, not required if you have set OPENAI_API_KEY "
"environment variable.",
description=_(
"OpenAI API Key, not required if you have set OPENAI_API_KEY "
"environment variable."
),
),
Parameter.build_from(
label="OpenAI API Base",
label=_("OpenAI API Base"),
name="api_base",
type=str,
optional=True,
default=None,
description="OpenAI API Base, not required if you have set OPENAI_API_BASE "
"environment variable.",
description=_(
"OpenAI API Base, not required if you have set OPENAI_API_BASE "
"environment variable."
),
),
],
documentation_url="https://github.com/openai/openai-python",

View File

@@ -19,6 +19,7 @@ from dbgpt.core.awel import TransformStreamAbsOperator
from dbgpt.core.awel.flow import IOField, OperatorCategory, OperatorType, ViewMetadata
from dbgpt.core.interface.llm import ModelOutput
from dbgpt.core.operators import BaseLLM
from dbgpt.util.i18n_utils import _
if TYPE_CHECKING:
import httpx
@@ -154,28 +155,30 @@ class OpenAIStreamingOutputOperator(TransformStreamAbsOperator[ModelOutput, str]
"""Transform ModelOutput to openai stream format."""
metadata = ViewMetadata(
label="OpenAI Streaming Output Operator",
label=_("OpenAI Streaming Output Operator"),
name="openai_streaming_output_operator",
operator_type=OperatorType.TRANSFORM_STREAM,
category=OperatorCategory.OUTPUT_PARSER,
description="The OpenAI streaming LLM operator.",
description=_("The OpenAI streaming LLM operator."),
parameters=[],
inputs=[
IOField.build_from(
"Upstream Model Output",
_("Upstream Model Output"),
"model_output",
ModelOutput,
is_list=True,
description="The model output of upstream.",
description=_("The model output of upstream."),
)
],
outputs=[
IOField.build_from(
"Model Output",
_("Model Output"),
"model_output",
str,
is_list=True,
description="The model output after transform to openai stream format",
description=_(
"The model output after transform to openai stream format"
),
)
],
)