fix(core): Fix AWEL branch bug (#1640)

This commit is contained in:
Fangyin Cheng 2024-06-18 11:11:43 +08:00 committed by GitHub
parent 49b56b4576
commit ace169ac46
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
32 changed files with 870 additions and 481 deletions

View File

@ -13,8 +13,8 @@ from dbgpt.core import (
from dbgpt.core.awel import (
DAG,
BaseOperator,
BranchJoinOperator,
InputOperator,
JoinOperator,
MapOperator,
SimpleCallDataInputSource,
)
@ -195,9 +195,7 @@ def build_cached_chat_operator(
cache_task_name=cache_task_name,
)
# Create a join node to merge outputs from the model and cache nodes, just keep the first not empty output
join_task = JoinOperator(
combine_function=lambda model_out, cache_out: cache_out or model_out
)
join_task = BranchJoinOperator()
# Define the workflow structure using the >> operator
input_task >> cache_check_branch_task

View File

@ -17,6 +17,7 @@ from .dag.base import DAG, DAGContext, DAGVar
from .operators.base import BaseOperator, WorkflowRunner
from .operators.common_operator import (
BranchFunc,
BranchJoinOperator,
BranchOperator,
BranchTaskType,
InputOperator,
@ -78,6 +79,7 @@ __all__ = [
"ReduceStreamOperator",
"TriggerOperator",
"MapOperator",
"BranchJoinOperator",
"BranchOperator",
"InputOperator",
"BranchFunc",

View File

@ -1,4 +1,5 @@
"""The mixin of DAGs."""
import abc
import dataclasses
import inspect
@ -337,6 +338,9 @@ class Parameter(TypeMetadata, Serializable):
value: Optional[Any] = Field(
None, description="The value of the parameter(Saved in the dag file)"
)
alias: Optional[List[str]] = Field(
None, description="The alias of the parameter(Compatible with old version)"
)
@model_validator(mode="before")
@classmethod
@ -398,6 +402,7 @@ class Parameter(TypeMetadata, Serializable):
description: Optional[str] = None,
options: Optional[Union[BaseDynamicOptions, List[OptionValue]]] = None,
resource_type: ResourceType = ResourceType.INSTANCE,
alias: Optional[List[str]] = None,
):
"""Build the parameter from the type."""
type_name = type.__qualname__
@ -419,6 +424,7 @@ class Parameter(TypeMetadata, Serializable):
placeholder=placeholder,
description=description or label,
options=options,
alias=alias,
)
@classmethod
@ -452,7 +458,7 @@ class Parameter(TypeMetadata, Serializable):
def to_dict(self) -> Dict:
"""Convert current metadata to json dict."""
dict_value = model_to_dict(self, exclude={"options"})
dict_value = model_to_dict(self, exclude={"options", "alias"})
if not self.options:
dict_value["options"] = None
elif isinstance(self.options, BaseDynamicOptions):
@ -677,9 +683,18 @@ class BaseMetadata(BaseResource):
for parameter in self.parameters
if not parameter.optional
}
current_parameters = {
parameter.name: parameter for parameter in self.parameters
}
current_parameters = {}
current_aliases_parameters = {}
for parameter in self.parameters:
current_parameters[parameter.name] = parameter
if parameter.alias:
for alias in parameter.alias:
if alias in current_aliases_parameters:
raise FlowMetadataException(
f"Alias {alias} already exists in the metadata."
)
current_aliases_parameters[alias] = parameter
if len(view_required_parameters) < len(current_required_parameters):
# TODO, skip the optional parameters.
raise FlowParameterMetadataException(
@ -691,12 +706,16 @@ class BaseMetadata(BaseResource):
)
for view_param in view_parameters:
view_param_key = view_param.name
if view_param_key not in current_parameters:
if view_param_key in current_parameters:
current_parameter = current_parameters[view_param_key]
elif view_param_key in current_aliases_parameters:
current_parameter = current_aliases_parameters[view_param_key]
else:
raise FlowParameterMetadataException(
f"Parameter {view_param_key} not in the metadata."
)
runnable_parameters.update(
current_parameters[view_param_key].to_runnable_parameter(
current_parameter.to_runnable_parameter(
view_param.get_typed_value(), resources, key_to_resource_instance
)
)

View File

@ -1,8 +1,29 @@
"""Compatibility mapping for flow classes."""
from dataclasses import dataclass
from typing import Dict, Optional
_COMPAT_FLOW_MAPPING: Dict[str, str] = {}
@dataclass
class _RegisterItem:
"""Register item for compatibility mapping."""
old_module: str
new_module: str
old_name: str
new_name: Optional[str] = None
after: Optional[str] = None
def old_cls_key(self) -> str:
"""Get the old class key."""
return f"{self.old_module}.{self.old_name}"
def new_cls_key(self) -> str:
"""Get the new class key."""
return f"{self.new_module}.{self.new_name}"
_COMPAT_FLOW_MAPPING: Dict[str, _RegisterItem] = {}
_OLD_AGENT_RESOURCE_MODULE_1 = "dbgpt.serve.agent.team.layout.agent_operator_resource"
@ -11,17 +32,24 @@ _NEW_AGENT_RESOURCE_MODULE = "dbgpt.agent.core.plan.awel.agent_operator_resource
def _register(
old_module: str, new_module: str, old_name: str, new_name: Optional[str] = None
old_module: str,
new_module: str,
old_name: str,
new_name: Optional[str] = None,
after_version: Optional[str] = None,
):
if not new_name:
new_name = old_name
_COMPAT_FLOW_MAPPING[f"{old_module}.{old_name}"] = f"{new_module}.{new_name}"
item = _RegisterItem(old_module, new_module, old_name, new_name, after_version)
_COMPAT_FLOW_MAPPING[item.old_cls_key()] = item
def get_new_class_name(old_class_name: str) -> Optional[str]:
"""Get the new class name for the old class name."""
new_cls_name = _COMPAT_FLOW_MAPPING.get(old_class_name, None)
return new_cls_name
if old_class_name not in _COMPAT_FLOW_MAPPING:
return None
item = _COMPAT_FLOW_MAPPING[old_class_name]
return item.new_cls_key()
_register(
@ -54,3 +82,9 @@ _register(
_register(
_OLD_AGENT_RESOURCE_MODULE_2, _NEW_AGENT_RESOURCE_MODULE, "AWELAgent", "AWELAgent"
)
_register(
"dbgpt.storage.vector_store.connector",
"dbgpt.serve.rag.connector",
"VectorStoreConnector",
after_version="v0.5.8",
)

View File

@ -555,14 +555,6 @@ class FlowFactory:
downstream = key_to_downstream.get(operator_key, [])
if not downstream:
raise ValueError("Branch operator should have downstream.")
if len(downstream) != len(view_metadata.parameters):
raise ValueError(
"Branch operator should have the same number of downstream as "
"parameters."
)
for i, param in enumerate(view_metadata.parameters):
downstream_key, _, _ = downstream[i]
param.value = key_to_operator_nodes[downstream_key].data.name
try:
runnable_params = metadata.get_runnable_parameters(

View File

@ -137,6 +137,7 @@ class BaseOperator(DAGNode, ABC, Generic[OUT], metaclass=BaseOperatorMeta):
task_name: Optional[str] = None,
dag: Optional[DAG] = None,
runner: Optional[WorkflowRunner] = None,
can_skip_in_branch: bool = True,
**kwargs,
) -> None:
"""Create a BaseOperator with an optional workflow runner.
@ -157,6 +158,7 @@ class BaseOperator(DAGNode, ABC, Generic[OUT], metaclass=BaseOperatorMeta):
self._runner: WorkflowRunner = runner
self._dag_ctx: Optional[DAGContext] = None
self._can_skip_in_branch = can_skip_in_branch
@property
def current_dag_context(self) -> DAGContext:
@ -321,6 +323,10 @@ class BaseOperator(DAGNode, ABC, Generic[OUT], metaclass=BaseOperatorMeta):
"""Get the current event loop task id."""
return id(asyncio.current_task())
def can_skip_in_branch(self) -> bool:
"""Check if the operator can be skipped in the branch."""
return self._can_skip_in_branch
def initialize_runner(runner: WorkflowRunner):
"""Initialize the default runner."""

View File

@ -16,6 +16,7 @@ from ..task.base import (
ReduceFunc,
TaskContext,
TaskOutput,
is_empty_data,
)
from .base import BaseOperator
@ -28,13 +29,16 @@ class JoinOperator(BaseOperator, Generic[OUT]):
This node type is useful for combining the outputs of upstream nodes.
"""
def __init__(self, combine_function: JoinFunc, **kwargs):
def __init__(
self, combine_function: JoinFunc, can_skip_in_branch: bool = True, **kwargs
):
"""Create a JoinDAGNode with a combine function.
Args:
combine_function: A function that defines how to combine inputs.
can_skip_in_branch(bool): Whether the node can be skipped in a branch.
"""
super().__init__(**kwargs)
super().__init__(can_skip_in_branch=can_skip_in_branch, **kwargs)
if not callable(combine_function):
raise ValueError("combine_function must be callable")
self.combine_function = combine_function
@ -57,6 +61,12 @@ class JoinOperator(BaseOperator, Generic[OUT]):
curr_task_ctx.set_task_output(join_output)
return join_output
async def _return_first_non_empty(self, *inputs):
for data in inputs:
if not is_empty_data(data):
return data
raise ValueError("All inputs are empty")
class ReduceStreamOperator(BaseOperator, Generic[IN, OUT]):
"""Operator that reduces inputs using a custom reduce function."""
@ -287,6 +297,32 @@ class BranchOperator(BaseOperator, Generic[IN, OUT]):
raise NotImplementedError
class BranchJoinOperator(JoinOperator, Generic[OUT]):
"""Operator that joins inputs using a custom combine function.
This node type is useful for combining the outputs of upstream nodes.
"""
def __init__(
self,
combine_function: Optional[JoinFunc] = None,
can_skip_in_branch: bool = False,
**kwargs,
):
"""Create a JoinDAGNode with a combine function.
Args:
combine_function: A function that defines how to combine inputs.
can_skip_in_branch(bool): Whether the node can be skipped in a branch(
default True).
"""
super().__init__(
combine_function=combine_function or self._return_first_non_empty,
can_skip_in_branch=can_skip_in_branch,
**kwargs,
)
class InputOperator(BaseOperator, Generic[OUT]):
"""Operator node that reads data from an input source."""

View File

@ -2,6 +2,7 @@
This runner will run the workflow in the current process.
"""
import asyncio
import logging
import traceback
@ -11,7 +12,7 @@ from dbgpt.component import SystemApp
from ..dag.base import DAGContext, DAGVar
from ..operators.base import CALL_DATA, BaseOperator, WorkflowRunner
from ..operators.common_operator import BranchOperator, JoinOperator
from ..operators.common_operator import BranchOperator
from ..task.base import SKIP_DATA, TaskContext, TaskState
from ..task.task_impl import DefaultInputContext, DefaultTaskContext, SimpleTaskOutput
from .job_manager import JobManager
@ -184,14 +185,14 @@ def _skip_current_downstream_by_node_name(
return
for child in branch_node.downstream:
child = cast(BaseOperator, child)
if child.node_name in skip_nodes:
if child.node_name in skip_nodes or child.node_id in skip_node_ids:
logger.info(f"Skip node name {child.node_name}, node id {child.node_id}")
_skip_downstream_by_id(child, skip_node_ids)
def _skip_downstream_by_id(node: BaseOperator, skip_node_ids: Set[str]):
if isinstance(node, JoinOperator):
# Not skip join node
if not node.can_skip_in_branch():
# Current node can not skip, so skip its downstream
return
skip_node_ids.add(node.node_id)
for child in node.downstream:

View File

@ -130,7 +130,7 @@ async def test_branch_node(
even_node = MapOperator(
lambda x: 888, task_id="even_node", task_name="even_node_name"
)
join_node = JoinOperator(join_func)
join_node = JoinOperator(join_func, can_skip_in_branch=False)
branch_node = BranchOperator(
{lambda x: x % 2 == 1: odd_node, lambda x: x % 2 == 0: even_node}
)

View File

@ -2,12 +2,13 @@
import dataclasses
from abc import ABC
from typing import Any, AsyncIterator, Dict, List, Optional, Union
from typing import Any, AsyncIterator, Dict, List, Optional, Union, cast
from dbgpt._private.pydantic import BaseModel
from dbgpt.core.awel import (
BaseOperator,
BranchFunc,
BranchJoinOperator,
BranchOperator,
CommonLLMHttpRequestBody,
CommonLLMHttpResponseBody,
@ -340,24 +341,7 @@ class LLMBranchOperator(BranchOperator[ModelRequest, ModelRequest]):
category=OperatorCategory.LLM,
operator_type=OperatorType.BRANCH,
description=_("Branch the workflow based on the stream flag of the request."),
parameters=[
Parameter.build_from(
_("Streaming Task Name"),
"stream_task_name",
str,
optional=True,
default="streaming_llm_task",
description=_("The name of the streaming task."),
),
Parameter.build_from(
_("Non-Streaming Task Name"),
"no_stream_task_name",
str,
optional=True,
default="llm_task",
description=_("The name of the non-streaming task."),
),
],
parameters=[],
inputs=[
IOField.build_from(
_("Model Request"),
@ -382,7 +366,12 @@ class LLMBranchOperator(BranchOperator[ModelRequest, ModelRequest]):
],
)
def __init__(self, stream_task_name: str, no_stream_task_name: str, **kwargs):
def __init__(
self,
stream_task_name: Optional[str] = None,
no_stream_task_name: Optional[str] = None,
**kwargs,
):
"""Create a new LLM branch operator.
Args:
@ -390,18 +379,13 @@ class LLMBranchOperator(BranchOperator[ModelRequest, ModelRequest]):
no_stream_task_name (str): The name of the non-streaming task.
"""
super().__init__(**kwargs)
if not stream_task_name:
raise ValueError("stream_task_name is not set")
if not no_stream_task_name:
raise ValueError("no_stream_task_name is not set")
self._stream_task_name = stream_task_name
self._no_stream_task_name = no_stream_task_name
async def branches(
self,
) -> Dict[BranchFunc[ModelRequest], Union[BaseOperator, str]]:
"""
Return a dict of branch function and task name.
"""Return a dict of branch function and task name.
Returns:
Dict[BranchFunc[ModelRequest], str]: A dict of branch function and task
@ -409,6 +393,18 @@ class LLMBranchOperator(BranchOperator[ModelRequest, ModelRequest]):
If the predicate function returns True, we will run the corresponding
task.
"""
if self._stream_task_name and self._no_stream_task_name:
stream_task_name = self._stream_task_name
no_stream_task_name = self._no_stream_task_name
else:
stream_task_name = ""
no_stream_task_name = ""
for node in self.downstream:
task = cast(BaseOperator, node)
if task.streaming_operator:
stream_task_name = node.node_name
else:
no_stream_task_name = node.node_name
async def check_stream_true(r: ModelRequest) -> bool:
# If stream is true, we will run the streaming task. otherwise, we will run
@ -416,8 +412,8 @@ class LLMBranchOperator(BranchOperator[ModelRequest, ModelRequest]):
return r.stream
return {
check_stream_true: self._stream_task_name,
lambda x: not x.stream: self._no_stream_task_name,
check_stream_true: stream_task_name,
lambda x: not x.stream: no_stream_task_name,
}
@ -553,3 +549,93 @@ class StringOutput2ModelOutputOperator(MapOperator[str, ModelOutput]):
text=input_value,
error_code=500,
)
class LLMBranchJoinOperator(BranchJoinOperator[ModelOutput]):
"""The LLM Branch Join Operator.
Decide which output to keep(streaming or non-streaming).
"""
streaming_operator = True
metadata = ViewMetadata(
label=_("LLM Branch Join Operator"),
name="llm_branch_join_operator",
category=OperatorCategory.LLM,
operator_type=OperatorType.JOIN,
description=_("Just keep the first non-empty output."),
parameters=[],
inputs=[
IOField.build_from(
_("Streaming Model Output"),
"stream_output",
ModelOutput,
is_list=True,
description=_("The streaming output."),
),
IOField.build_from(
_("Non-Streaming Model Output"),
"not_stream_output",
ModelOutput,
description=_("The non-streaming output."),
),
],
outputs=[
IOField.build_from(
_("Model Output"),
"output_value",
ModelOutput,
is_list=True,
description=_("The output value of the operator."),
),
],
)
def __init__(self, **kwargs):
"""Create a new LLM branch join operator."""
super().__init__(**kwargs)
class StringBranchJoinOperator(BranchJoinOperator[str]):
"""The String Branch Join Operator.
Decide which output to keep(streaming or non-streaming).
"""
streaming_operator = True
metadata = ViewMetadata(
label=_("String Branch Join Operator"),
name="string_branch_join_operator",
category=OperatorCategory.COMMON,
operator_type=OperatorType.JOIN,
description=_("Just keep the first non-empty output."),
parameters=[],
inputs=[
IOField.build_from(
_("Streaming String Output"),
"stream_output",
str,
is_list=True,
description=_("The streaming output."),
),
IOField.build_from(
_("Non-Streaming String Output"),
"not_stream_output",
str,
description=_("The non-streaming output."),
),
],
outputs=[
IOField.build_from(
_("String Output"),
"output_value",
str,
is_list=True,
description=_("The output value of the operator."),
),
],
)
def __init__(self, **kwargs):
"""Create a new LLM branch join operator."""
super().__init__(**kwargs)

View File

@ -8,6 +8,7 @@ from dbgpt.core.interface.operators.llm_operator import ( # noqa: F401
BaseLLM,
BaseLLMOperator,
BaseStreamingLLMOperator,
LLMBranchJoinOperator,
LLMBranchOperator,
RequestBuilderOperator,
)
@ -32,6 +33,7 @@ __ALL__ = [
"BaseLLM",
"LLMBranchOperator",
"BaseLLMOperator",
"LLMBranchJoinOperator",
"RequestBuilderOperator",
"BaseStreamingLLMOperator",
"BaseConversationOperator",

View File

@ -17,7 +17,7 @@ class CrossEncoderRerankEmbeddings(BaseModel, RerankEmbeddings):
client: Any #: :meta private:
model_name: str = "BAAI/bge-reranker-base"
max_length: int = None # type: ignore
max_length: Optional[int] = None
"""Max length for input sequences. Longer sequences will be truncated. If None, max
length of the model will be used"""
"""Model name to use."""

View File

@ -29,9 +29,10 @@ class EmbeddingRetrieverOperator(RetrieverOperator[Union[str, List[str]], List[C
parameters=[
Parameter.build_from(
_("Storage Index Store"),
"vector_store_connector",
"index_store",
IndexStoreBase,
description=_("The vector store connector."),
alias=["vector_store_connector"],
),
Parameter.build_from(
_("Top K"),
@ -128,9 +129,10 @@ class EmbeddingAssemblerOperator(AssemblerOperator[Knowledge, List[Chunk]]):
parameters=[
Parameter.build_from(
_("Vector Store Connector"),
"vector_store_connector",
"index_store",
IndexStoreBase,
description=_("The vector store connector."),
alias=["vector_store_connector"],
),
Parameter.build_from(
_("Chunk Parameters"),

View File

@ -56,6 +56,8 @@ def _load_vector_options() -> List[OptionValue]:
default=None,
),
],
# Compatible with the old version
alias=["dbgpt.storage.vector_store.connector.VectorStoreConnector"],
)
class VectorStoreConnector:
"""The connector for vector store.

View File

@ -123,7 +123,7 @@ with DAG("awel_branch_operator") as dag:
task = BranchOperator(branches=branch_mapping)
even_task = MapOperator(task_name="even_task", map_function=even_func)
odd_task = MapOperator(task_name="odd_task", map_function=odd_func)
join_task = JoinOperator(combine_function=combine_function)
join_task = JoinOperator(combine_function=combine_function, can_skip_in_branch=False)
input_task >> task >> even_task >> join_task
input_task >> task >> odd_task >> join_task
@ -133,6 +133,8 @@ print("=" * 80)
print("Second call, input is 6")
assert asyncio.run(join_task.call(call_data=6)) == 60
```
Note: `can_skip_in_branch` is used to control whether current task can be skipped in the branch.
Set it to `False` to prevent the task from being skipped.
And run the following command to execute the code:

View File

@ -36,6 +36,7 @@
}'
"""
import logging
import os
from functools import cache
@ -53,12 +54,17 @@ from dbgpt.core import (
PromptTemplate,
SystemPromptTemplate,
)
from dbgpt.core.awel import DAG, HttpTrigger, JoinOperator, MapOperator
from dbgpt.core.awel import (
DAG,
BranchJoinOperator,
HttpTrigger,
JoinOperator,
MapOperator,
)
from dbgpt.core.operators import (
BufferedConversationMapperOperator,
HistoryDynamicPromptBuilderOperator,
LLMBranchOperator,
RequestBuilderOperator,
)
from dbgpt.model.operators import (
LLMOperator,
@ -318,7 +324,6 @@ with DAG("dbgpt_awel_data_analyst_assistant") as dag:
)
prompt_template_load_task = PromptTemplateBuilderOperator()
request_handle_task = RequestBuilderOperator()
# Load and store chat history
chat_history_load_task = ServePreChatHistoryLoadOperator()
@ -343,9 +348,7 @@ with DAG("dbgpt_awel_data_analyst_assistant") as dag:
)
model_parse_task = MapOperator(lambda out: out.to_dict())
openai_format_stream_task = OpenAIStreamingOutputOperator()
result_join_task = JoinOperator(
combine_function=lambda not_stream_out, stream_out: not_stream_out or stream_out
)
result_join_task = BranchJoinOperator()
trigger >> prompt_template_load_task >> history_prompt_build_task
(

View File

@ -55,6 +55,7 @@
"""
import logging
from typing import Dict, List, Optional, Union
@ -69,7 +70,7 @@ from dbgpt.core import (
ModelRequestContext,
SystemPromptTemplate,
)
from dbgpt.core.awel import DAG, HttpTrigger, JoinOperator, MapOperator
from dbgpt.core.awel import DAG, BranchJoinOperator, HttpTrigger, MapOperator
from dbgpt.core.operators import (
ChatComposerInput,
ChatHistoryPromptComposerOperator,
@ -150,9 +151,7 @@ with DAG("dbgpt_awel_simple_chat_history") as multi_round_dag:
)
model_parse_task = MapOperator(lambda out: out.to_dict())
openai_format_stream_task = OpenAIStreamingOutputOperator()
result_join_task = JoinOperator(
combine_function=lambda not_stream_out, stream_out: not_stream_out or stream_out
)
result_join_task = BranchJoinOperator()
req_handle_task = MapOperator(
lambda req: ChatComposerInput(

View File

@ -35,12 +35,13 @@
}'
"""
import logging
from typing import Any, Dict, List, Optional, Union
from dbgpt._private.pydantic import BaseModel, Field
from dbgpt.core import LLMClient
from dbgpt.core.awel import DAG, HttpTrigger, JoinOperator, MapOperator
from dbgpt.core.awel import DAG, BranchJoinOperator, HttpTrigger, MapOperator
from dbgpt.core.operators import LLMBranchOperator, RequestBuilderOperator
from dbgpt.model.operators import (
LLMOperator,
@ -94,9 +95,7 @@ with DAG("dbgpt_awel_simple_llm_client_generate") as client_generate_dag:
)
model_parse_task = MapOperator(lambda out: out.to_dict())
openai_format_stream_task = OpenAIStreamingOutputOperator()
result_join_task = JoinOperator(
combine_function=lambda not_stream_out, stream_out: not_stream_out or stream_out
)
result_join_task = BranchJoinOperator()
trigger >> request_handle_task >> branch_task
branch_task >> llm_task >> model_parse_task >> result_join_task

View File

@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-03-24 11:28+0800\n"
"POT-Creation-Date: 2024-06-17 00:05+0800\n"
"PO-Revision-Date: 2024-03-23 11:22+0800\n"
"Last-Translator: Automatically generated\n"
"Language-Team: none\n"
@ -17,7 +17,6 @@ msgstr ""
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
#: ../dbgpt/app/dbgpt_server.py:54 ../dbgpt/app/dbgpt_server.py:55
#, fuzzy
#: ../dbgpt/app/dbgpt_server.py:53 ../dbgpt/app/dbgpt_server.py:54
msgid "DB-GPT Open API"
msgstr "DB-GPT 开放 API"
msgstr "DB-GPT 开放 API"

View File

@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-03-24 22:53+0800\n"
"POT-Creation-Date: 2024-06-17 00:05+0800\n"
"PO-Revision-Date: 2024-03-23 16:45+0800\n"
"Last-Translator: Automatically generated\n"
"Language-Team: none\n"
@ -49,86 +49,84 @@ msgstr "人类消息"
msgid "The human message."
msgstr "人类消息。"
#: ../dbgpt/core/interface/operators/prompt_operator.py:255
#: ../dbgpt/core/interface/operators/prompt_operator.py:258
msgid "Prompt Builder Operator"
msgstr "提示构建器操作员"
#: ../dbgpt/core/interface/operators/prompt_operator.py:257
#: ../dbgpt/core/interface/operators/prompt_operator.py:260
msgid "Build messages from prompt template."
msgstr "从提示模板构建消息。"
#: ../dbgpt/core/interface/operators/prompt_operator.py:261
#: ../dbgpt/core/interface/operators/prompt_operator.py:348
#: ../dbgpt/core/interface/operators/prompt_operator.py:264
#: ../dbgpt/core/interface/operators/prompt_operator.py:351
msgid "Chat Prompt Template"
msgstr "聊天提示模板"
#: ../dbgpt/core/interface/operators/prompt_operator.py:264
#: ../dbgpt/core/interface/operators/prompt_operator.py:351
#: ../dbgpt/core/interface/operators/prompt_operator.py:267
#: ../dbgpt/core/interface/operators/prompt_operator.py:354
msgid "The chat prompt template."
msgstr "聊天提示模板。"
#: ../dbgpt/core/interface/operators/prompt_operator.py:269
#: ../dbgpt/core/interface/operators/prompt_operator.py:379
#: ../dbgpt/core/interface/operators/prompt_operator.py:272
#: ../dbgpt/core/interface/operators/prompt_operator.py:382
msgid "Prompt Input Dict"
msgstr "提示输入字典"
#: ../dbgpt/core/interface/operators/prompt_operator.py:272
#: ../dbgpt/core/interface/operators/prompt_operator.py:382
#: ../dbgpt/core/interface/operators/prompt_operator.py:275
#: ../dbgpt/core/interface/operators/prompt_operator.py:385
msgid "The prompt dict."
msgstr "提示字典。"
#: ../dbgpt/core/interface/operators/prompt_operator.py:277
#: ../dbgpt/core/interface/operators/prompt_operator.py:387
#: ../dbgpt/core/interface/operators/prompt_operator.py:280
#: ../dbgpt/core/interface/operators/prompt_operator.py:390
msgid "Formatted Messages"
msgstr "格式化消息"
#: ../dbgpt/core/interface/operators/prompt_operator.py:281
#: ../dbgpt/core/interface/operators/prompt_operator.py:391
#: ../dbgpt/core/interface/operators/prompt_operator.py:284
#: ../dbgpt/core/interface/operators/prompt_operator.py:394
msgid "The formatted messages."
msgstr "格式化的消息。"
#: ../dbgpt/core/interface/operators/prompt_operator.py:341
#: ../dbgpt/core/interface/operators/prompt_operator.py:344
msgid "History Prompt Builder Operator"
msgstr "历史提示构建器操作员"
#: ../dbgpt/core/interface/operators/prompt_operator.py:343
#: ../dbgpt/core/interface/operators/prompt_operator.py:346
msgid "Build messages from prompt template and chat history."
msgstr "从提示模板和聊天历史构建消息。"
#: ../dbgpt/core/interface/operators/prompt_operator.py:354
#: ../dbgpt/core/interface/operators/prompt_operator.py:357
#: ../dbgpt/core/operators/flow/composer_operator.py:65
msgid "History Key"
msgstr "历史关键字"
#: ../dbgpt/core/interface/operators/prompt_operator.py:359
#: ../dbgpt/core/interface/operators/prompt_operator.py:362
msgid "The key of history in prompt dict."
msgstr "提示字典中的历史关键字。"
#: ../dbgpt/core/interface/operators/prompt_operator.py:362
#: ../dbgpt/core/interface/operators/prompt_operator.py:365
msgid "String History"
msgstr "字符串历史"
#: ../dbgpt/core/interface/operators/prompt_operator.py:367
#: ../dbgpt/core/interface/operators/prompt_operator.py:370
msgid "Whether to convert the history to string."
msgstr "是否将历史转换为字符串。"
#: ../dbgpt/core/interface/operators/prompt_operator.py:372
#: ../dbgpt/core/interface/operators/prompt_operator.py:375
msgid "History"
msgstr "历史"
#: ../dbgpt/core/interface/operators/prompt_operator.py:376
#: ../dbgpt/core/interface/operators/prompt_operator.py:379
msgid "The history."
msgstr "历史。"
#: ../dbgpt/core/interface/operators/message_operator.py:122
#, fuzzy
msgid "Chat History Load Operator"
msgstr "历史提示构建器操作员"
msgstr "历史提示构建器算子"
#: ../dbgpt/core/interface/operators/message_operator.py:125
#, fuzzy
msgid "The operator to load chat history from storage."
msgstr "带有聊天历史提示的模型请求。"
msgstr "从存储加载聊天历史的算子"
#: ../dbgpt/core/interface/operators/message_operator.py:128
#: ../dbgpt/core/operators/flow/composer_operator.py:92
@ -153,212 +151,252 @@ msgid ""
msgstr ""
#: ../dbgpt/core/interface/operators/message_operator.py:152
#: ../dbgpt/core/interface/operators/llm_operator.py:99
#: ../dbgpt/core/interface/operators/llm_operator.py:202
#: ../dbgpt/core/interface/operators/llm_operator.py:217
#: ../dbgpt/core/interface/operators/llm_operator.py:363
#: ../dbgpt/core/interface/operators/llm_operator.py:100
#: ../dbgpt/core/interface/operators/llm_operator.py:203
#: ../dbgpt/core/interface/operators/llm_operator.py:218
#: ../dbgpt/core/interface/operators/llm_operator.py:364
#: ../dbgpt/core/operators/flow/composer_operator.py:118
msgid "Model Request"
msgstr "模型请求"
#: ../dbgpt/core/interface/operators/message_operator.py:155
#, fuzzy
msgid "The model request."
msgstr "上游的模型请求。"
#: ../dbgpt/core/interface/operators/message_operator.py:160
#, fuzzy
msgid "Stored Messages"
msgstr "格式化消息"
msgstr "存储的消息"
#: ../dbgpt/core/interface/operators/message_operator.py:163
#, fuzzy
msgid "The messages stored in the storage."
msgstr "消息存储。"
msgstr "存储在存储中的消息。"
#: ../dbgpt/core/interface/operators/llm_operator.py:51
#: ../dbgpt/core/interface/operators/llm_operator.py:52
msgid "Build Model Request"
msgstr "构建模型请求"
#: ../dbgpt/core/interface/operators/llm_operator.py:54
#: ../dbgpt/core/interface/operators/llm_operator.py:55
msgid "Build the model request from the http request body."
msgstr "从 HTTP 请求体构建模型请求。"
#: ../dbgpt/core/interface/operators/llm_operator.py:57
#: ../dbgpt/core/interface/operators/llm_operator.py:58
msgid "Default Model Name"
msgstr "默认模型名称"
#: ../dbgpt/core/interface/operators/llm_operator.py:62
#: ../dbgpt/core/interface/operators/llm_operator.py:63
msgid "The model name of the model request."
msgstr "模型请求的模型名称。"
#: ../dbgpt/core/interface/operators/llm_operator.py:65
#: ../dbgpt/core/interface/operators/llm_operator.py:66
msgid "Temperature"
msgstr ""
#: ../dbgpt/core/interface/operators/llm_operator.py:70
#, fuzzy
#: ../dbgpt/core/interface/operators/llm_operator.py:71
msgid "The temperature of the model request."
msgstr "模型请求的模型名称。"
msgstr "模型请求的温度。"
#: ../dbgpt/core/interface/operators/llm_operator.py:73
#: ../dbgpt/core/interface/operators/llm_operator.py:74
msgid "Max New Tokens"
msgstr ""
#: ../dbgpt/core/interface/operators/llm_operator.py:78
#, fuzzy
#: ../dbgpt/core/interface/operators/llm_operator.py:79
msgid "The max new tokens of the model request."
msgstr "模型请求的模型名称。"
msgstr "最大新令牌数(token)。"
#: ../dbgpt/core/interface/operators/llm_operator.py:81
#: ../dbgpt/core/interface/operators/llm_operator.py:82
msgid "Context Length"
msgstr ""
msgstr "上下文长度"
#: ../dbgpt/core/interface/operators/llm_operator.py:86
#, fuzzy
#: ../dbgpt/core/interface/operators/llm_operator.py:87
msgid "The context length of the model request."
msgstr "模型请求的模型名称。"
msgstr "模型请求的上下文长度。"
#: ../dbgpt/core/interface/operators/llm_operator.py:91
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:40
#: ../dbgpt/core/awel/trigger/http_trigger.py:735
#: ../dbgpt/core/awel/trigger/http_trigger.py:794
#: ../dbgpt/core/awel/trigger/http_trigger.py:855
#: ../dbgpt/core/awel/trigger/http_trigger.py:986
#: ../dbgpt/core/awel/trigger/http_trigger.py:1043
#: ../dbgpt/core/awel/trigger/http_trigger.py:1092
#: ../dbgpt/core/interface/operators/llm_operator.py:92
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:41
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:98
#: ../dbgpt/core/awel/trigger/http_trigger.py:766
#: ../dbgpt/core/awel/trigger/http_trigger.py:825
#: ../dbgpt/core/awel/trigger/http_trigger.py:886
#: ../dbgpt/core/awel/trigger/http_trigger.py:1017
#: ../dbgpt/core/awel/trigger/http_trigger.py:1074
#: ../dbgpt/core/awel/trigger/http_trigger.py:1123
msgid "Request Body"
msgstr "请求体"
#: ../dbgpt/core/interface/operators/llm_operator.py:94
#: ../dbgpt/core/interface/operators/llm_operator.py:366
#: ../dbgpt/core/interface/operators/llm_operator.py:440
#: ../dbgpt/core/interface/operators/llm_operator.py:531
#: ../dbgpt/core/interface/operators/llm_operator.py:539
#: ../dbgpt/core/interface/operators/llm_operator.py:95
#: ../dbgpt/core/interface/operators/llm_operator.py:367
#: ../dbgpt/core/interface/operators/llm_operator.py:441
#: ../dbgpt/core/interface/operators/llm_operator.py:534
#: ../dbgpt/core/interface/operators/llm_operator.py:542
msgid "The input value of the operator."
msgstr "操作员的输入值。"
#: ../dbgpt/core/interface/operators/llm_operator.py:102
#: ../dbgpt/core/interface/operators/llm_operator.py:220
#: ../dbgpt/core/interface/operators/llm_operator.py:448
#: ../dbgpt/core/interface/operators/llm_operator.py:103
#: ../dbgpt/core/interface/operators/llm_operator.py:221
#: ../dbgpt/core/interface/operators/llm_operator.py:449
#: ../dbgpt/core/interface/operators/llm_operator.py:594
#: ../dbgpt/core/interface/operators/llm_operator.py:639
msgid "The output value of the operator."
msgstr "算子的输出值。"
#: ../dbgpt/core/interface/operators/llm_operator.py:195
#: ../dbgpt/core/interface/operators/llm_operator.py:196
msgid "Merge Model Request Messages"
msgstr "合并模型请求消息"
#: ../dbgpt/core/interface/operators/llm_operator.py:198
#: ../dbgpt/core/interface/operators/llm_operator.py:199
msgid "Merge the model request from the input value."
msgstr "从输入值中合并模型请求。"
#: ../dbgpt/core/interface/operators/llm_operator.py:205
#: ../dbgpt/core/interface/operators/llm_operator.py:206
msgid "The model request of upstream."
msgstr "上游的模型请求。"
#: ../dbgpt/core/interface/operators/llm_operator.py:208
#: ../dbgpt/core/interface/operators/llm_operator.py:209
msgid "Model messages"
msgstr "模型消息"
#: ../dbgpt/core/interface/operators/llm_operator.py:211
#: ../dbgpt/core/interface/operators/llm_operator.py:212
msgid "The model messages of upstream."
msgstr "上游的模型消息。"
#: ../dbgpt/core/interface/operators/llm_operator.py:338
#: ../dbgpt/core/interface/operators/llm_operator.py:339
msgid "LLM Branch Operator"
msgstr "LLM 分支算子"
#: ../dbgpt/core/interface/operators/llm_operator.py:342
#: ../dbgpt/core/interface/operators/llm_operator.py:343
msgid "Branch the workflow based on the stream flag of the request."
msgstr "根据请求的流标志分支工作流。"
#: ../dbgpt/core/interface/operators/llm_operator.py:345
#: ../dbgpt/core/interface/operators/llm_operator.py:346
msgid "Streaming Task Name"
msgstr "流式任务名称"
#: ../dbgpt/core/interface/operators/llm_operator.py:350
#: ../dbgpt/core/interface/operators/llm_operator.py:351
msgid "The name of the streaming task."
msgstr "流式任务的名称。"
#: ../dbgpt/core/interface/operators/llm_operator.py:353
#: ../dbgpt/core/interface/operators/llm_operator.py:354
msgid "Non-Streaming Task Name"
msgstr "非流式任务名称"
#: ../dbgpt/core/interface/operators/llm_operator.py:358
#: ../dbgpt/core/interface/operators/llm_operator.py:359
msgid "The name of the non-streaming task."
msgstr "非流式任务的名称。"
#: ../dbgpt/core/interface/operators/llm_operator.py:371
#: ../dbgpt/core/interface/operators/llm_operator.py:372
msgid "Streaming Model Request"
msgstr "流式模型请求"
#: ../dbgpt/core/interface/operators/llm_operator.py:374
#: ../dbgpt/core/interface/operators/llm_operator.py:375
msgid "The streaming request, to streaming Operator."
msgstr "流式请求,发送至流式算子。"
#: ../dbgpt/core/interface/operators/llm_operator.py:377
#: ../dbgpt/core/interface/operators/llm_operator.py:378
msgid "Non-Streaming Model Request"
msgstr "非流式模型请求"
#: ../dbgpt/core/interface/operators/llm_operator.py:380
#: ../dbgpt/core/interface/operators/llm_operator.py:381
msgid "The non-streaming request, to non-streaming Operator."
msgstr "非流式请求,发送至非流式算子。"
#: ../dbgpt/core/interface/operators/llm_operator.py:430
#: ../dbgpt/core/interface/operators/llm_operator.py:431
msgid "Map Model Output to Common Response Body"
msgstr "将模型输出映射到通用响应体"
#: ../dbgpt/core/interface/operators/llm_operator.py:433
#: ../dbgpt/core/interface/operators/llm_operator.py:434
msgid "Map the model output to the common response body."
msgstr "将模型输出映射到通用响应体。"
#: ../dbgpt/core/interface/operators/llm_operator.py:437
#: ../dbgpt/core/interface/operators/llm_operator.py:491
#: ../dbgpt/core/interface/operators/llm_operator.py:536
#: ../dbgpt/core/interface/operators/llm_operator.py:438
#: ../dbgpt/core/interface/operators/llm_operator.py:494
#: ../dbgpt/core/interface/operators/llm_operator.py:539
#: ../dbgpt/core/interface/operators/llm_operator.py:590
#: ../dbgpt/core/interface/output_parser.py:40
#: ../dbgpt/core/interface/output_parser.py:49
msgid "Model Output"
msgstr "模型输出"
#: ../dbgpt/core/interface/operators/llm_operator.py:445
#: ../dbgpt/core/interface/operators/llm_operator.py:446
msgid "Common Response Body"
msgstr "通用响应体"
#: ../dbgpt/core/interface/operators/llm_operator.py:474
#: ../dbgpt/core/interface/operators/llm_operator.py:477
msgid "Common Streaming Output Operator"
msgstr "通用流式输出算子"
#: ../dbgpt/core/interface/operators/llm_operator.py:478
#: ../dbgpt/core/interface/operators/llm_operator.py:481
msgid "The common streaming LLM operator, for chat flow."
msgstr "用于聊天流程的通用流式 LLM 算子。"
#: ../dbgpt/core/interface/operators/llm_operator.py:482
#: ../dbgpt/core/interface/operators/llm_operator.py:485
msgid "Upstream Model Output"
msgstr "上游模型输出"
#: ../dbgpt/core/interface/operators/llm_operator.py:486
#: ../dbgpt/core/interface/operators/llm_operator.py:489
#: ../dbgpt/core/interface/output_parser.py:44
msgid "The model output of upstream."
msgstr "上游的模型输出。"
#: ../dbgpt/core/interface/operators/llm_operator.py:496
#: ../dbgpt/core/interface/operators/llm_operator.py:499
msgid "The model output after transform to common stream format"
msgstr "转换为通用流格式后的模型输出"
#: ../dbgpt/core/interface/operators/llm_operator.py:521
#: ../dbgpt/core/interface/operators/llm_operator.py:524
msgid "Map String to ModelOutput"
msgstr "将字符串映射到模型输出"
#: ../dbgpt/core/interface/operators/llm_operator.py:524
#: ../dbgpt/core/interface/operators/llm_operator.py:527
msgid "Map String to ModelOutput."
msgstr "将字符串映射到模型输出。"
#: ../dbgpt/core/interface/operators/llm_operator.py:528
#: ../dbgpt/core/interface/operators/llm_operator.py:531
msgid "String"
msgstr "字符串"
#: ../dbgpt/core/interface/operators/llm_operator.py:567
msgid "LLM Branch Join Operator"
msgstr "LLM 分支算子"
#: ../dbgpt/core/interface/operators/llm_operator.py:571
#: ../dbgpt/core/interface/operators/llm_operator.py:616
msgid "Just keep the first non-empty output."
msgstr ""
#: ../dbgpt/core/interface/operators/llm_operator.py:575
msgid "Streaming Model Output"
msgstr "模型流式输出"
#: ../dbgpt/core/interface/operators/llm_operator.py:579
#: ../dbgpt/core/interface/operators/llm_operator.py:624
msgid "The streaming output."
msgstr "上游模型流式输出"
#: ../dbgpt/core/interface/operators/llm_operator.py:582
msgid "Non-Streaming Model Output"
msgstr "非流式模型请求"
#: ../dbgpt/core/interface/operators/llm_operator.py:585
#: ../dbgpt/core/interface/operators/llm_operator.py:630
msgid "The non-streaming output."
msgstr "非流式任务的名称。"
#: ../dbgpt/core/interface/operators/llm_operator.py:612
msgid "String Branch Join Operator"
msgstr "LLM 分支算子"
#: ../dbgpt/core/interface/operators/llm_operator.py:620
msgid "Streaming String Output"
msgstr "将字符串映射到模型输出"
#: ../dbgpt/core/interface/operators/llm_operator.py:627
msgid "Non-Streaming String Output"
msgstr "非流式模型请求"
#: ../dbgpt/core/interface/operators/llm_operator.py:635
msgid "String Output"
msgstr "将字符串映射到模型输出"
#: ../dbgpt/core/interface/output_parser.py:32
#, fuzzy
msgid "Base Output Operator"
msgstr "通用流式输出算子"
@ -367,12 +405,10 @@ msgid "The base LLM out parse."
msgstr ""
#: ../dbgpt/core/interface/output_parser.py:53
#, fuzzy
msgid "The model output after parsing."
msgstr "上游的模型输出。"
#: ../dbgpt/core/interface/storage.py:390
#, fuzzy
msgid "Memory Storage"
msgstr "消息存储"
@ -452,7 +488,7 @@ msgstr "消息存储。"
#: ../dbgpt/core/operators/flow/composer_operator.py:110
#: ../dbgpt/core/operators/flow/composer_operator.py:226
#: ../dbgpt/core/awel/trigger/http_trigger.py:200
#: ../dbgpt/core/awel/trigger/http_trigger.py:209
msgid "Common LLM Http Request Body"
msgstr "通用LLM HTTP请求体"
@ -535,245 +571,298 @@ msgstr "输出"
msgid "The merged dict. example: {'user_input': 'first', 'context': 'second'}."
msgstr "合并后的字典。示例:{'user_input': 'first', 'context': 'second'}。"
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:29
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:30
msgid "Request Http Trigger"
msgstr "请求HTTP触发器"
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:34
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:35
msgid ""
"Trigger your workflow by http request, and parse the request body as a "
"starlette Request"
msgstr "通过HTTP请求触发您的工作流并将请求体解析为starlette请求。"
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:44
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:45
msgid "The request body of the API endpoint, parse as a starlette Request"
msgstr "API 端点的请求主体,解析为 starlette 请求"
#: ../dbgpt/core/awel/trigger/http_trigger.py:108
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:92
msgid "HTTP Sender"
msgstr ""
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:95
msgid "Send a HTTP request to a specified endpoint"
msgstr ""
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:101
msgid "The request body to send"
msgstr "API 端点的请求主体"
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:106
#: ../dbgpt/core/awel/trigger/http_trigger.py:974
#: ../dbgpt/core/awel/trigger/http_trigger.py:1025
msgid "Response Body"
msgstr "响应主体"
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:109
msgid "The response body of the HTTP request"
msgstr "API 端点的响应主体"
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:114
msgid "HTTP Address"
msgstr ""
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:115
msgid "address"
msgstr ""
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:117
msgid "The address to send the HTTP request to"
msgstr "模型请求的模型名称。"
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:122
msgid "Timeout"
msgstr "超时时间"
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:127
msgid "The timeout of the HTTP request in seconds"
msgstr "模型请求的模型名称。"
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:130
msgid "Token"
msgstr ""
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:135
msgid "The token to use for the HTTP request"
msgstr "发送 HTTP 请求的令牌。"
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:138
msgid "Cookies"
msgstr ""
#: ../dbgpt/core/awel/trigger/ext_http_trigger.py:143
msgid "The cookies to use for the HTTP request"
msgstr "发送 HTTP 请求的 cookies。"
#: ../dbgpt/core/awel/trigger/http_trigger.py:117
msgid "Dict Http Body"
msgstr "字典 HTTP 主体"
#: ../dbgpt/core/awel/trigger/http_trigger.py:112
#: ../dbgpt/core/awel/trigger/http_trigger.py:121
msgid "Parse the request body as a dict or response body as a dict"
msgstr "将请求主体解析为字典或响应主体解析为字典"
#: ../dbgpt/core/awel/trigger/http_trigger.py:138
#: ../dbgpt/core/awel/trigger/http_trigger.py:147
msgid "String Http Body"
msgstr "字符串 HTTP 主体"
#: ../dbgpt/core/awel/trigger/http_trigger.py:142
#: ../dbgpt/core/awel/trigger/http_trigger.py:151
msgid "Parse the request body as a string or response body as string"
msgstr "将请求主体解析为字符串或响应主体解析为字符串"
#: ../dbgpt/core/awel/trigger/http_trigger.py:168
#: ../dbgpt/core/awel/trigger/http_trigger.py:177
msgid "Request Http Body"
msgstr "请求 HTTP 主体"
#: ../dbgpt/core/awel/trigger/http_trigger.py:172
#: ../dbgpt/core/awel/trigger/http_trigger.py:181
msgid "Parse the request body as a starlette Request"
msgstr "将请求主体解析为 starlette 请求"
#: ../dbgpt/core/awel/trigger/http_trigger.py:204
#: ../dbgpt/core/awel/trigger/http_trigger.py:213
msgid "Parse the request body as a common LLM http body"
msgstr "将请求主体解析为通用 LLM HTTP 主体"
#: ../dbgpt/core/awel/trigger/http_trigger.py:264
#: ../dbgpt/core/awel/trigger/http_trigger.py:289
msgid "Common LLM Http Response Body"
msgstr "通用 LLM HTTP 响应主体"
#: ../dbgpt/core/awel/trigger/http_trigger.py:268
#: ../dbgpt/core/awel/trigger/http_trigger.py:293
msgid "Parse the response body as a common LLM http body"
msgstr "将响应主体解析为通用 LLM HTTP 主体"
#: ../dbgpt/core/awel/trigger/http_trigger.py:654
#: ../dbgpt/core/awel/trigger/http_trigger.py:685
msgid "API Endpoint"
msgstr "API 端点"
#: ../dbgpt/core/awel/trigger/http_trigger.py:654
#: ../dbgpt/core/awel/trigger/http_trigger.py:685
msgid "The API endpoint"
msgstr "API 端点"
#: ../dbgpt/core/awel/trigger/http_trigger.py:657
#: ../dbgpt/core/awel/trigger/http_trigger.py:669
#: ../dbgpt/core/awel/trigger/http_trigger.py:688
#: ../dbgpt/core/awel/trigger/http_trigger.py:700
msgid "Http Methods"
msgstr "HTTP 方法"
#: ../dbgpt/core/awel/trigger/http_trigger.py:662
#: ../dbgpt/core/awel/trigger/http_trigger.py:674
#: ../dbgpt/core/awel/trigger/http_trigger.py:693
#: ../dbgpt/core/awel/trigger/http_trigger.py:705
msgid "The methods of the API endpoint"
msgstr "API 端点的方法"
#: ../dbgpt/core/awel/trigger/http_trigger.py:664
#: ../dbgpt/core/awel/trigger/http_trigger.py:678
#: ../dbgpt/core/awel/trigger/http_trigger.py:695
#: ../dbgpt/core/awel/trigger/http_trigger.py:709
msgid "HTTP Method PUT"
msgstr "HTTP PUT 方法"
#: ../dbgpt/core/awel/trigger/http_trigger.py:665
#: ../dbgpt/core/awel/trigger/http_trigger.py:679
#: ../dbgpt/core/awel/trigger/http_trigger.py:696
#: ../dbgpt/core/awel/trigger/http_trigger.py:710
msgid "HTTP Method POST"
msgstr "HTTP POST 方法"
#: ../dbgpt/core/awel/trigger/http_trigger.py:676
#: ../dbgpt/core/awel/trigger/http_trigger.py:707
msgid "HTTP Method GET"
msgstr "HTTP GET 方法"
#: ../dbgpt/core/awel/trigger/http_trigger.py:677
#: ../dbgpt/core/awel/trigger/http_trigger.py:708
msgid "HTTP Method DELETE"
msgstr "HTTP DELETE 方法"
#: ../dbgpt/core/awel/trigger/http_trigger.py:683
#: ../dbgpt/core/awel/trigger/http_trigger.py:714
msgid "Streaming Response"
msgstr "流式响应"
#: ../dbgpt/core/awel/trigger/http_trigger.py:688
#: ../dbgpt/core/awel/trigger/http_trigger.py:719
msgid "Whether the response is streaming"
msgstr "响应是否为流式传输"
#: ../dbgpt/core/awel/trigger/http_trigger.py:691
#: ../dbgpt/core/awel/trigger/http_trigger.py:722
msgid "Http Response Body"
msgstr "HTTP 响应主体"
#: ../dbgpt/core/awel/trigger/http_trigger.py:696
#: ../dbgpt/core/awel/trigger/http_trigger.py:946
#: ../dbgpt/core/awel/trigger/http_trigger.py:997
#: ../dbgpt/core/awel/trigger/http_trigger.py:727
#: ../dbgpt/core/awel/trigger/http_trigger.py:977
#: ../dbgpt/core/awel/trigger/http_trigger.py:1028
msgid "The response body of the API endpoint"
msgstr "API 端点的响应主体"
#: ../dbgpt/core/awel/trigger/http_trigger.py:700
#: ../dbgpt/core/awel/trigger/http_trigger.py:731
msgid "Response Media Type"
msgstr "响应媒体类型"
#: ../dbgpt/core/awel/trigger/http_trigger.py:705
#: ../dbgpt/core/awel/trigger/http_trigger.py:736
msgid "The response media type"
msgstr "响应的媒体类型"
#: ../dbgpt/core/awel/trigger/http_trigger.py:708
#: ../dbgpt/core/awel/trigger/http_trigger.py:739
msgid "Http Status Code"
msgstr "HTTP 状态码"
#: ../dbgpt/core/awel/trigger/http_trigger.py:713
#: ../dbgpt/core/awel/trigger/http_trigger.py:744
msgid "The http status code"
msgstr "HTTP 状态码"
#: ../dbgpt/core/awel/trigger/http_trigger.py:724
#: ../dbgpt/core/awel/trigger/http_trigger.py:755
msgid "Dict Http Trigger"
msgstr "字典 HTTP 触发器"
#: ../dbgpt/core/awel/trigger/http_trigger.py:729
#: ../dbgpt/core/awel/trigger/http_trigger.py:760
msgid ""
"Trigger your workflow by http request, and parse the request body as a dict"
msgstr "通过 HTTP 请求触发您的工作流,并将请求主体解析为字典"
#: ../dbgpt/core/awel/trigger/http_trigger.py:738
#: ../dbgpt/core/awel/trigger/http_trigger.py:989
#: ../dbgpt/core/awel/trigger/http_trigger.py:1046
#: ../dbgpt/core/awel/trigger/http_trigger.py:1095
#: ../dbgpt/core/awel/trigger/http_trigger.py:769
#: ../dbgpt/core/awel/trigger/http_trigger.py:1020
#: ../dbgpt/core/awel/trigger/http_trigger.py:1077
#: ../dbgpt/core/awel/trigger/http_trigger.py:1126
msgid "The request body of the API endpoint"
msgstr "API 端点的请求主体"
#: ../dbgpt/core/awel/trigger/http_trigger.py:783
#: ../dbgpt/core/awel/trigger/http_trigger.py:814
msgid "String Http Trigger"
msgstr "字符串 HTTP 触发器"
#: ../dbgpt/core/awel/trigger/http_trigger.py:788
#: ../dbgpt/core/awel/trigger/http_trigger.py:819
msgid ""
"Trigger your workflow by http request, and parse the request body as a string"
msgstr "通过 HTTP 请求触发您的工作流,并将请求主体解析为字符串"
#: ../dbgpt/core/awel/trigger/http_trigger.py:798
#: ../dbgpt/core/awel/trigger/http_trigger.py:829
msgid "The request body of the API endpoint, parse as a json string"
msgstr "API 端点的请求主体,解析为 JSON 字符串"
#: ../dbgpt/core/awel/trigger/http_trigger.py:844
#: ../dbgpt/core/awel/trigger/http_trigger.py:875
msgid "Common LLM Http Trigger"
msgstr "常见 LLM Http 触发器"
#: ../dbgpt/core/awel/trigger/http_trigger.py:849
#: ../dbgpt/core/awel/trigger/http_trigger.py:880
msgid ""
"Trigger your workflow by http request, and parse the request body as a "
"common LLM http body"
msgstr "通过 HTTP 请求触发您的工作流,并将请求主体解析为常见的 LLM HTTP 主体"
#: ../dbgpt/core/awel/trigger/http_trigger.py:859
#: ../dbgpt/core/awel/trigger/http_trigger.py:890
msgid "The request body of the API endpoint, parse as a common LLM http body"
msgstr "API 端点的请求主体,解析为常见的 LLM HTTP 主体"
#: ../dbgpt/core/awel/trigger/http_trigger.py:903
#: ../dbgpt/core/awel/trigger/http_trigger.py:934
msgid "Example Http Response"
msgstr "示例 HTTP 响应"
#: ../dbgpt/core/awel/trigger/http_trigger.py:907
#: ../dbgpt/core/awel/trigger/http_trigger.py:938
msgid "Example Http Request"
msgstr "示例 HTTP 请求"
#: ../dbgpt/core/awel/trigger/http_trigger.py:929
#: ../dbgpt/core/awel/trigger/http_trigger.py:949
#: ../dbgpt/core/awel/trigger/http_trigger.py:960
#: ../dbgpt/core/awel/trigger/http_trigger.py:980
msgid "Example Http Hello Operator"
msgstr "示例 HTTP Hello 算子"
#: ../dbgpt/core/awel/trigger/http_trigger.py:935
#: ../dbgpt/core/awel/trigger/http_trigger.py:966
msgid "Http Request Body"
msgstr "HTTP 请求主体"
#: ../dbgpt/core/awel/trigger/http_trigger.py:938
#: ../dbgpt/core/awel/trigger/http_trigger.py:969
msgid "The request body of the API endpoint(Dict[str, Any])"
msgstr "API 端点的请求主体(字典[str, Any]"
#: ../dbgpt/core/awel/trigger/http_trigger.py:943
#: ../dbgpt/core/awel/trigger/http_trigger.py:994
msgid "Response Body"
msgstr "响应主体"
#: ../dbgpt/core/awel/trigger/http_trigger.py:969
#: ../dbgpt/core/awel/trigger/http_trigger.py:1000
msgid "Request Body To Dict Operator"
msgstr "请求主体转换为字典算子"
#: ../dbgpt/core/awel/trigger/http_trigger.py:974
#: ../dbgpt/core/awel/trigger/http_trigger.py:1005
msgid "Prefix Key"
msgstr "前缀键"
#: ../dbgpt/core/awel/trigger/http_trigger.py:980
#: ../dbgpt/core/awel/trigger/http_trigger.py:1011
msgid "The prefix key of the dict, link 'message' or 'extra.info'"
msgstr "字典的前缀键,链接 'message' 或 'extra.info'"
#: ../dbgpt/core/awel/trigger/http_trigger.py:1028
#: ../dbgpt/core/awel/trigger/http_trigger.py:1059
msgid "User Input Parsed Operator"
msgstr "用户输入解析算子"
#: ../dbgpt/core/awel/trigger/http_trigger.py:1033
#: ../dbgpt/core/awel/trigger/http_trigger.py:1082
#: ../dbgpt/core/awel/trigger/http_trigger.py:1064
#: ../dbgpt/core/awel/trigger/http_trigger.py:1113
msgid "Key"
msgstr "键"
#: ../dbgpt/core/awel/trigger/http_trigger.py:1038
#: ../dbgpt/core/awel/trigger/http_trigger.py:1087
#: ../dbgpt/core/awel/trigger/http_trigger.py:1069
#: ../dbgpt/core/awel/trigger/http_trigger.py:1118
msgid "The key of the dict, link 'user_input'"
msgstr "字典的键,链接 'user_input'"
#: ../dbgpt/core/awel/trigger/http_trigger.py:1051
#: ../dbgpt/core/awel/trigger/http_trigger.py:1082
msgid "User Input Dict"
msgstr "用户输入字典"
#: ../dbgpt/core/awel/trigger/http_trigger.py:1054
#: ../dbgpt/core/awel/trigger/http_trigger.py:1103
#: ../dbgpt/core/awel/trigger/http_trigger.py:1085
#: ../dbgpt/core/awel/trigger/http_trigger.py:1134
msgid "The user input dict of the API endpoint"
msgstr "API 端点的用户输入字典"
#: ../dbgpt/core/awel/trigger/http_trigger.py:1058
#: ../dbgpt/core/awel/trigger/http_trigger.py:1089
msgid ""
"User input parsed operator, parse the user input from request body and "
"return as a dict"
msgstr "用户输入解析算子,从请求主体解析用户输入并返回为字典"
#: ../dbgpt/core/awel/trigger/http_trigger.py:1077
#: ../dbgpt/core/awel/trigger/http_trigger.py:1108
msgid "Request Body Parsed To String Operator"
msgstr "请求主体解析为字符串算子"
#: ../dbgpt/core/awel/trigger/http_trigger.py:1100
#: ../dbgpt/core/awel/trigger/http_trigger.py:1131
msgid "User Input String"
msgstr "用户输入字符串"
#: ../dbgpt/core/awel/trigger/http_trigger.py:1107
#: ../dbgpt/core/awel/trigger/http_trigger.py:1138
msgid ""
"User input parsed operator, parse the user input from request body and "
"return as a string"

View File

@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-03-26 05:29+0800\n"
"POT-Creation-Date: 2024-06-17 00:05+0800\n"
"PO-Revision-Date: 2024-03-23 16:45+0800\n"
"Last-Translator: Automatically generated\n"
"Language-Team: none\n"
@ -109,7 +109,7 @@ msgstr "这是模型的请求。"
#: ../dbgpt/model/operators/llm_operator.py:90
#: ../dbgpt/model/operators/llm_operator.py:138
#: ../dbgpt/model/utils/chatgpt_utils.py:175
#: ../dbgpt/model/utils/chatgpt_utils.py:178
msgid "Model Output"
msgstr "模型输出"
@ -126,22 +126,22 @@ msgstr "流式 LLM 算子"
msgid "The streaming LLM operator."
msgstr "流式 LLM 算子。"
#: ../dbgpt/model/utils/chatgpt_utils.py:158
#: ../dbgpt/model/utils/chatgpt_utils.py:161
msgid "OpenAI Streaming Output Operator"
msgstr "OpenAI 流式输出算子"
#: ../dbgpt/model/utils/chatgpt_utils.py:162
#: ../dbgpt/model/utils/chatgpt_utils.py:165
msgid "The OpenAI streaming LLM operator."
msgstr "OpenAI 流式 LLM 算子。"
#: ../dbgpt/model/utils/chatgpt_utils.py:166
#: ../dbgpt/model/utils/chatgpt_utils.py:169
msgid "Upstream Model Output"
msgstr "上游模型输出"
#: ../dbgpt/model/utils/chatgpt_utils.py:170
#: ../dbgpt/model/utils/chatgpt_utils.py:173
msgid "The model output of upstream."
msgstr "上游模型的输出。"
#: ../dbgpt/model/utils/chatgpt_utils.py:180
#: ../dbgpt/model/utils/chatgpt_utils.py:183
msgid "The model output after transformed to openai stream format."
msgstr "转换为 OpenAI 流格式后的模型输出。"
msgstr "转换为 OpenAI 流格式后的模型输出。"

View File

@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-03-24 22:53+0800\n"
"POT-Creation-Date: 2024-06-17 00:05+0800\n"
"PO-Revision-Date: 2024-03-23 11:22+0800\n"
"Last-Translator: Automatically generated\n"
"Language-Team: none\n"
@ -26,96 +26,99 @@ msgid "Retrieve candidates from vector store."
msgstr "从向量存储中检索候选项。"
#: ../dbgpt/rag/operators/embedding.py:31
#: ../dbgpt/rag/operators/embedding.py:130
msgid "Vector Store Connector"
msgstr "向量存储连接器"
msgid "Storage Index Store"
msgstr "索引存储"
#: ../dbgpt/rag/operators/embedding.py:34
#: ../dbgpt/rag/operators/embedding.py:133
#: ../dbgpt/rag/operators/embedding.py:134
msgid "The vector store connector."
msgstr "向量存储连接器。"
#: ../dbgpt/rag/operators/embedding.py:37
#: ../dbgpt/rag/operators/embedding.py:38
msgid "Top K"
msgstr "前 K 个"
#: ../dbgpt/rag/operators/embedding.py:40
#: ../dbgpt/rag/operators/embedding.py:41
msgid "The number of candidates."
msgstr "候选项的数量。"
#: ../dbgpt/rag/operators/embedding.py:43
#: ../dbgpt/rag/operators/embedding.py:44
msgid "Score Threshold"
msgstr "分数阈值"
#: ../dbgpt/rag/operators/embedding.py:47
#: ../dbgpt/rag/operators/embedding.py:48
msgid ""
"The score threshold, if score of candidate is less than it, it will be "
"filtered."
msgstr "分数阈值,如果候选项的分数低于此值,则会被过滤。"
#: ../dbgpt/rag/operators/embedding.py:54 ../dbgpt/rag/retriever/rewrite.py:24
#: ../dbgpt/rag/operators/embedding.py:55 ../dbgpt/rag/retriever/rewrite.py:24
msgid "Query Rewrite"
msgstr "查询重写"
#: ../dbgpt/rag/operators/embedding.py:57
#: ../dbgpt/rag/operators/embedding.py:58
msgid "The query rewrite resource."
msgstr "查询重写资源。"
#: ../dbgpt/rag/operators/embedding.py:62
#: ../dbgpt/rag/operators/embedding.py:63
msgid "Rerank"
msgstr "重新排序"
#: ../dbgpt/rag/operators/embedding.py:65
#: ../dbgpt/rag/operators/embedding.py:66
msgid "The rerank."
msgstr "重新排序。"
#: ../dbgpt/rag/operators/embedding.py:72
#: ../dbgpt/rag/operators/embedding.py:73
msgid "Query"
msgstr "查询"
#: ../dbgpt/rag/operators/embedding.py:75
#: ../dbgpt/rag/operators/embedding.py:76
msgid "The query to retrieve."
msgstr "要检索的查询。"
#: ../dbgpt/rag/operators/embedding.py:80
#: ../dbgpt/rag/operators/embedding.py:81
msgid "Candidates"
msgstr "候选项"
#: ../dbgpt/rag/operators/embedding.py:83
#: ../dbgpt/rag/operators/embedding.py:84
msgid "The retrieved candidates."
msgstr "已检索的候选项。"
#: ../dbgpt/rag/operators/embedding.py:124
#: ../dbgpt/rag/operators/embedding.py:125
msgid "Embedding Assembler Operator"
msgstr "嵌入式装配算子"
#: ../dbgpt/rag/operators/embedding.py:126
#: ../dbgpt/rag/operators/embedding.py:127
msgid "Load knowledge and assemble embedding chunks to vector store."
msgstr "加载知识并将嵌入式块组装到向量存储中。"
#: ../dbgpt/rag/operators/embedding.py:136 ../dbgpt/rag/chunk_manager.py:24
#: ../dbgpt/rag/operators/embedding.py:131
msgid "Vector Store Connector"
msgstr "向量存储连接器"
#: ../dbgpt/rag/operators/embedding.py:138 ../dbgpt/rag/chunk_manager.py:24
msgid "Chunk Parameters"
msgstr "块参数"
#: ../dbgpt/rag/operators/embedding.py:139
#: ../dbgpt/rag/operators/embedding.py:141
msgid "The chunk parameters."
msgstr "块参数。"
#: ../dbgpt/rag/operators/embedding.py:146 ../dbgpt/rag/operators/summary.py:23
#: ../dbgpt/rag/operators/embedding.py:148 ../dbgpt/rag/operators/summary.py:23
#: ../dbgpt/rag/operators/knowledge.py:39
msgid "Knowledge"
msgstr "知识"
#: ../dbgpt/rag/operators/embedding.py:149
#: ../dbgpt/rag/operators/embedding.py:151
msgid "The knowledge to be loaded."
msgstr ""
#: ../dbgpt/rag/operators/embedding.py:154
#: ../dbgpt/rag/operators/embedding.py:156
#: ../dbgpt/rag/operators/knowledge.py:121
msgid "Chunks"
msgstr "块"
#: ../dbgpt/rag/operators/embedding.py:158
#: ../dbgpt/rag/operators/embedding.py:160
msgid "The assembled chunks, it has been persisted to vector store."
msgstr "已组装的块,已持久化到向量存储中。"
@ -124,18 +127,16 @@ msgid "Summary Operator"
msgstr ""
#: ../dbgpt/rag/operators/summary.py:20
#, fuzzy
msgid "The summary assembler operator."
msgstr "嵌入式装配算子"
msgstr "总结装备算子"
#: ../dbgpt/rag/operators/summary.py:23
#, fuzzy
msgid "Knowledge datasource"
msgstr "知识数据源"
#: ../dbgpt/rag/operators/summary.py:28
msgid "Document summary"
msgstr ""
msgstr "总结文档"
#: ../dbgpt/rag/operators/summary.py:36 ../dbgpt/rag/operators/rewrite.py:36
#: ../dbgpt/rag/retriever/rewrite.py:36
@ -143,82 +144,73 @@ msgid "LLM Client"
msgstr "LLM 客户端"
#: ../dbgpt/rag/operators/summary.py:41 ../dbgpt/rag/operators/rewrite.py:39
#, fuzzy
msgid "The LLM Client."
msgstr "LLM 客户端"
#: ../dbgpt/rag/operators/summary.py:44 ../dbgpt/rag/operators/rewrite.py:42
#, fuzzy
msgid "Model name"
msgstr "模型名称"
#: ../dbgpt/rag/operators/summary.py:49
#, fuzzy
msgid "LLM model name"
msgstr "LLM 模型名称。"
#: ../dbgpt/rag/operators/summary.py:52 ../dbgpt/rag/operators/summary.py:57
#: ../dbgpt/rag/operators/summary.py:65
#, fuzzy
msgid "prompt language"
msgstr "语言"
msgstr "prompt 语言"
#: ../dbgpt/rag/operators/summary.py:60
msgid "Max iteration with LLM"
msgstr ""
msgstr "LLM 最大迭代次数"
#: ../dbgpt/rag/operators/summary.py:68
msgid "Concurrency limit with LLM"
msgstr ""
msgstr "LLM 并发限制"
#: ../dbgpt/rag/operators/summary.py:73
msgid "The concurrency limit with llm"
msgstr ""
#: ../dbgpt/rag/operators/rewrite.py:16
#, fuzzy
msgid "Query Rewrite Operator"
msgstr "查询重写"
msgstr "查询重写算子"
#: ../dbgpt/rag/operators/rewrite.py:19
#, fuzzy
msgid "Query rewrite operator."
msgstr "查询重写。"
msgstr "查询重写算子。"
#: ../dbgpt/rag/operators/rewrite.py:22
#, fuzzy
msgid "Query context"
msgstr "查询重写"
msgstr "查询上下文"
#: ../dbgpt/rag/operators/rewrite.py:22
msgid "query context"
msgstr ""
msgstr "查询上下文"
#: ../dbgpt/rag/operators/rewrite.py:27 ../dbgpt/rag/operators/rewrite.py:31
msgid "Rewritten queries"
msgstr ""
msgstr "重写后的查询"
#: ../dbgpt/rag/operators/rewrite.py:47
#, fuzzy
msgid "LLM model name."
msgstr "LLM 模型名称。"
#: ../dbgpt/rag/operators/rewrite.py:50
#, fuzzy
msgid "Prompt language"
msgstr "语言"
msgstr "Prompt 语言"
#: ../dbgpt/rag/operators/rewrite.py:55
msgid "Prompt language."
msgstr ""
msgstr "Prompt 语言。"
#: ../dbgpt/rag/operators/rewrite.py:58
msgid "Number of results"
msgstr ""
msgstr "结果数量"
#: ../dbgpt/rag/operators/rewrite.py:63
msgid "rewrite query number."
msgstr ""
msgstr "重写查询数量。"
#: ../dbgpt/rag/operators/knowledge.py:23
msgid "Knowledge Operator"
@ -234,7 +226,7 @@ msgstr "知识数据源"
#: ../dbgpt/rag/operators/knowledge.py:34
msgid "knowledge datasource, which can be a document, url, or text."
msgstr ""
msgstr "知识数据源,可以是文档、网址或文本。"
#: ../dbgpt/rag/operators/knowledge.py:42
msgid "Knowledge object."
@ -267,7 +259,8 @@ msgstr "将块转换为字符串。"
#: ../dbgpt/rag/operators/knowledge.py:111 ../dbgpt/rag/chunk_manager.py:71
#: ../dbgpt/rag/text_splitter/text_splitter.py:211
#: ../dbgpt/rag/text_splitter/text_splitter.py:422
#: ../dbgpt/rag/text_splitter/text_splitter.py:804
#: ../dbgpt/rag/text_splitter/text_splitter.py:821
#: ../dbgpt/rag/text_splitter/text_splitter.py:864
msgid "Separator"
msgstr "分隔符"
@ -287,81 +280,81 @@ msgstr "字符串"
msgid "The output string."
msgstr "输出的字符串。"
#: ../dbgpt/rag/embedding/embedding_factory.py:230
#: ../dbgpt/rag/embedding/embedding_factory.py:250
msgid "Default Embeddings"
msgstr "默认嵌入式"
#: ../dbgpt/rag/embedding/embedding_factory.py:234
#: ../dbgpt/rag/embedding/embedding_factory.py:254
msgid "Default embeddings(using default embedding model of current system)"
msgstr "默认嵌入式(使用当前系统的默认嵌入式模型)"
#: ../dbgpt/rag/embedding/embeddings.py:28
#: ../dbgpt/rag/embedding/embeddings.py:27
msgid "HuggingFace Embeddings"
msgstr "HuggingFace 嵌入式"
#: ../dbgpt/rag/embedding/embeddings.py:31
#: ../dbgpt/rag/embedding/embeddings.py:30
msgid "HuggingFace sentence_transformers embedding models."
msgstr "HuggingFace 句子转换嵌入式模型。"
#: ../dbgpt/rag/embedding/embeddings.py:34
#: ../dbgpt/rag/embedding/embeddings.py:141
#: ../dbgpt/rag/embedding/embeddings.py:348
#: ../dbgpt/rag/embedding/embeddings.py:33
#: ../dbgpt/rag/embedding/embeddings.py:139
#: ../dbgpt/rag/embedding/embeddings.py:346
#: ../dbgpt/rag/embedding/embeddings.py:461
#: ../dbgpt/rag/embedding/embeddings.py:553
#: ../dbgpt/rag/embedding/embeddings.py:566
#: ../dbgpt/rag/retriever/rewrite.py:30
msgid "Model Name"
msgstr "模型名称"
#: ../dbgpt/rag/embedding/embeddings.py:39
#: ../dbgpt/rag/embedding/embeddings.py:146
#: ../dbgpt/rag/embedding/embeddings.py:38
#: ../dbgpt/rag/embedding/embeddings.py:144
msgid "Model name to use."
msgstr "要使用的模型名称。"
#: ../dbgpt/rag/embedding/embeddings.py:135
#: ../dbgpt/rag/embedding/embeddings.py:133
msgid "HuggingFace Instructor Embeddings"
msgstr "HuggingFace 指导嵌入式"
#: ../dbgpt/rag/embedding/embeddings.py:138
#: ../dbgpt/rag/embedding/embeddings.py:136
msgid "HuggingFace Instructor embeddings."
msgstr "HuggingFace 指导嵌入式。"
#: ../dbgpt/rag/embedding/embeddings.py:149
#: ../dbgpt/rag/embedding/embeddings.py:147
msgid "Embed Instruction"
msgstr "嵌入指令"
#: ../dbgpt/rag/embedding/embeddings.py:154
#: ../dbgpt/rag/embedding/embeddings.py:152
msgid "Instruction to use for embedding documents."
msgstr "用于嵌入文档的指令。"
#: ../dbgpt/rag/embedding/embeddings.py:157
#: ../dbgpt/rag/embedding/embeddings.py:155
msgid "Query Instruction"
msgstr "查询指令"
#: ../dbgpt/rag/embedding/embeddings.py:162
#: ../dbgpt/rag/embedding/embeddings.py:160
msgid "Instruction to use for embedding query."
msgstr "用于嵌入查询的指令。"
#: ../dbgpt/rag/embedding/embeddings.py:336
#: ../dbgpt/rag/embedding/embeddings.py:334
msgid "HuggingFace Inference API Embeddings"
msgstr "HuggingFace 推理 API 嵌入式"
#: ../dbgpt/rag/embedding/embeddings.py:339
#: ../dbgpt/rag/embedding/embeddings.py:337
msgid "HuggingFace Inference API embeddings."
msgstr "HuggingFace 推理 API 嵌入式。"
#: ../dbgpt/rag/embedding/embeddings.py:342
#: ../dbgpt/rag/embedding/embeddings.py:340
#: ../dbgpt/rag/embedding/embeddings.py:455
#: ../dbgpt/rag/embedding/embeddings.py:545
#: ../dbgpt/rag/embedding/embeddings.py:558
msgid "API Key"
msgstr "API 密钥"
#: ../dbgpt/rag/embedding/embeddings.py:345
#: ../dbgpt/rag/embedding/embeddings.py:343
msgid "Your API key for the HuggingFace Inference API."
msgstr "您用于 HuggingFace 推理 API 的 API 密钥。"
#: ../dbgpt/rag/embedding/embeddings.py:353
#: ../dbgpt/rag/embedding/embeddings.py:351
#: ../dbgpt/rag/embedding/embeddings.py:466
#: ../dbgpt/rag/embedding/embeddings.py:558
#: ../dbgpt/rag/embedding/embeddings.py:571
msgid "The name of the model to use for text embeddings."
msgstr "用于文本嵌入的模型名称。"
@ -377,31 +370,31 @@ msgstr "Jina AI 嵌入式。"
msgid "Your API key for the Jina AI API."
msgstr "您用于 Jina AI API 的 API 密钥。"
#: ../dbgpt/rag/embedding/embeddings.py:531
#: ../dbgpt/rag/embedding/embeddings.py:544
msgid "OpenAPI Embeddings"
msgstr "OpenAPI 嵌入式"
#: ../dbgpt/rag/embedding/embeddings.py:534
#: ../dbgpt/rag/embedding/embeddings.py:547
msgid "OpenAPI embeddings."
msgstr "OpenAPI 嵌入式。"
#: ../dbgpt/rag/embedding/embeddings.py:537
#: ../dbgpt/rag/embedding/embeddings.py:550
msgid "API URL"
msgstr "API 网址"
#: ../dbgpt/rag/embedding/embeddings.py:542
#: ../dbgpt/rag/embedding/embeddings.py:555
msgid "The URL of the embeddings API."
msgstr "嵌入式 API 的网址。"
#: ../dbgpt/rag/embedding/embeddings.py:550
#: ../dbgpt/rag/embedding/embeddings.py:563
msgid "Your API key for the Open API."
msgstr "您用于 Open API 的 API 密钥。"
#: ../dbgpt/rag/embedding/embeddings.py:561
#: ../dbgpt/rag/embedding/embeddings.py:574
msgid "Timeout"
msgstr "超时时间"
#: ../dbgpt/rag/embedding/embeddings.py:566
#: ../dbgpt/rag/embedding/embeddings.py:579
msgid "The timeout for the request in seconds."
msgstr "请求的超时时间(秒)。"
@ -439,6 +432,7 @@ msgid "Chunk size"
msgstr "块大小"
#: ../dbgpt/rag/chunk_manager.py:63
#: ../dbgpt/rag/text_splitter/text_splitter.py:414
msgid "Chunk Overlap"
msgstr "块重叠"
@ -454,22 +448,46 @@ msgstr "启用合并"
msgid "Enable chunk merge by chunk_size."
msgstr "通过块大小启用块合并。"
#: ../dbgpt/rag/retriever/rerank.py:56
#: ../dbgpt/rag/retriever/rerank.py:88
msgid "Default Ranker"
msgstr "默认排序器"
#: ../dbgpt/rag/retriever/rerank.py:59
#: ../dbgpt/rag/retriever/rerank.py:91
msgid "Default ranker(Rank by score)."
msgstr "默认排序器(按分数排序)。"
#: ../dbgpt/rag/retriever/rerank.py:62
#: ../dbgpt/rag/retriever/rerank.py:94 ../dbgpt/rag/retriever/rerank.py:184
msgid "Top k"
msgstr "前 k 个"
#: ../dbgpt/rag/retriever/rerank.py:65
#: ../dbgpt/rag/retriever/rerank.py:97 ../dbgpt/rag/retriever/rerank.py:187
msgid "The number of top k documents."
msgstr "前 k 个文档的数量。"
#: ../dbgpt/rag/retriever/rerank.py:178
msgid "CrossEncoder Rerank"
msgstr ""
#: ../dbgpt/rag/retriever/rerank.py:181
msgid "CrossEncoder ranker."
msgstr ""
#: ../dbgpt/rag/retriever/rerank.py:190
msgid "Rerank Model"
msgstr "重排序模型"
#: ../dbgpt/rag/retriever/rerank.py:193
msgid "rerank model name, e.g., 'BAAI/bge-reranker-base'."
msgstr "重排模型,例如 'BAAI/bge-reranker-base'。"
#: ../dbgpt/rag/retriever/rerank.py:196
msgid "device"
msgstr "设备"
#: ../dbgpt/rag/retriever/rerank.py:199
msgid "device name, e.g., 'cpu'."
msgstr "设备名称,例如 'cpu'。"
#: ../dbgpt/rag/retriever/rewrite.py:27
msgid "Query rewrite."
msgstr "查询重写。"
@ -496,8 +514,8 @@ msgstr "字符文本分割器"
#: ../dbgpt/rag/text_splitter/text_splitter.py:214
#: ../dbgpt/rag/text_splitter/text_splitter.py:425
#: ../dbgpt/rag/text_splitter/text_splitter.py:807
#: ../dbgpt/rag/text_splitter/text_splitter.py:850
#: ../dbgpt/rag/text_splitter/text_splitter.py:824
#: ../dbgpt/rag/text_splitter/text_splitter.py:867
msgid "Separator to split the text."
msgstr "用于分割文本的分隔符。"
@ -513,6 +531,10 @@ msgstr "递归按字符分割文本。"
msgid "Spacy Text Splitter"
msgstr "Spacy 文本分割器"
#: ../dbgpt/rag/text_splitter/text_splitter.py:335
msgid "Pipeline"
msgstr ""
#: ../dbgpt/rag/text_splitter/text_splitter.py:338
msgid "Spacy pipeline to use for tokenization."
msgstr "用于标记化的 Spacy 流水线。"
@ -525,6 +547,10 @@ msgstr "使用 Spacy 按句子分割文本。"
msgid "Markdown Header Text Splitter"
msgstr "Markdown 标题文本分割器"
#: ../dbgpt/rag/text_splitter/text_splitter.py:398
msgid "Return Each Line"
msgstr ""
#: ../dbgpt/rag/text_splitter/text_splitter.py:401
msgid "Return each line with associated headers."
msgstr "返回每行及其相关标题。"
@ -533,22 +559,26 @@ msgstr "返回每行及其相关标题。"
msgid "Size of each chunk."
msgstr "每个块的大小。"
#: ../dbgpt/rag/text_splitter/text_splitter.py:417
msgid "Overlap between chunks."
msgstr "块之间的重叠 token 数。"
#: ../dbgpt/rag/text_splitter/text_splitter.py:430
msgid "Split markdown text by headers."
msgstr "通过标题分割 Markdown 文本。"
#: ../dbgpt/rag/text_splitter/text_splitter.py:799
#: ../dbgpt/rag/text_splitter/text_splitter.py:816
msgid "Separator Text Splitter"
msgstr "分隔符文本分割器"
#: ../dbgpt/rag/text_splitter/text_splitter.py:812
#: ../dbgpt/rag/text_splitter/text_splitter.py:829
msgid "Split text by separator."
msgstr "通过分隔符分割文本。"
#: ../dbgpt/rag/text_splitter/text_splitter.py:842
#: ../dbgpt/rag/text_splitter/text_splitter.py:859
msgid "Page Text Splitter"
msgstr "页面文本分割器"
#: ../dbgpt/rag/text_splitter/text_splitter.py:855
#: ../dbgpt/rag/text_splitter/text_splitter.py:872
msgid "Split text by page."
msgstr "按页面分割文本。"

View File

@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-03-24 22:53+0800\n"
"POT-Creation-Date: 2024-06-17 00:05+0800\n"
"PO-Revision-Date: 2024-03-24 11:24+0800\n"
"Last-Translator: Automatically generated\n"
"Language-Team: none\n"
@ -19,93 +19,113 @@ msgstr ""
#: ../dbgpt/serve/rag/operators/knowledge_space.py:47
msgid "Knowledge Space Operator"
msgstr ""
msgstr "知识空间算子"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:50
msgid "knowledge space retriever operator."
msgstr ""
msgstr "知识空间检索算子。"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:51
msgid "Query"
msgstr ""
msgstr "查询"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:51
msgid "user query"
msgstr ""
msgstr "用户查询"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:54
#: ../dbgpt/serve/rag/operators/knowledge_space.py:57
msgid "related chunk content"
msgstr ""
msgstr "相关块内容"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:62
msgid "Space Name"
msgstr ""
msgstr "空间名称"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:68
msgid "space name."
msgstr ""
msgstr "空间名称。"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:143
msgid "Knowledge Space Prompt Builder Operator"
msgstr ""
msgstr "知识空间提示生成算子"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:145
msgid "Build messages from prompt template and chat history."
msgstr ""
msgstr "从提示模板和聊天历史构建消息。"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:150
msgid "Chat Prompt Template"
msgstr ""
msgstr "聊天提示模板"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:153
msgid "The chat prompt template."
msgstr ""
msgstr "聊天提示模板。"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:156
msgid "History Key"
msgstr ""
msgstr "历史键"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:161
msgid "The key of history in prompt dict."
msgstr ""
msgstr "提示字典中历史的键。"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:164
msgid "String History"
msgstr ""
msgstr "字符串历史"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:169
msgid "Whether to convert the history to string."
msgstr ""
msgstr "是否将历史转换为字符串。"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:174
#: ../dbgpt/serve/rag/operators/knowledge_space.py:178
msgid "user input"
msgstr ""
msgstr "用户输入"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:181
msgid "space related context"
msgstr ""
msgstr "空间相关上下文"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:185
msgid "context of knowledge space."
msgstr ""
msgstr "知识空间的上下文。"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:188
msgid "History"
msgstr ""
msgstr "历史"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:192
msgid "The history."
msgstr ""
msgstr "历史。"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:197
msgid "Formatted Messages"
msgstr ""
msgstr "格式化消息"
#: ../dbgpt/serve/rag/operators/knowledge_space.py:201
msgid "The formatted messages."
msgstr ""
msgstr "格式化的消息。"
#: ../dbgpt/serve/rag/connector.py:39
msgid "Vector Store Connector"
msgstr "向量存储连接器"
#: ../dbgpt/serve/rag/connector.py:44
msgid "Vector Store Type"
msgstr "向量存储类型"
#: ../dbgpt/serve/rag/connector.py:47
msgid "The type of vector store."
msgstr "向量存储的类型。"
#: ../dbgpt/serve/rag/connector.py:51
msgid "Vector Store Implementation"
msgstr "向量存储实现"
#: ../dbgpt/serve/rag/connector.py:54
msgid "The vector store implementation."
msgstr "向量存储的实现。"
#: ../dbgpt/serve/conversation/operators.py:87
msgid "Default Chat History Load Operator"
@ -127,8 +147,8 @@ msgstr "这是模型请求。"
#: ../dbgpt/serve/conversation/operators.py:105
msgid "Stored Messages"
msgstr ""
msgstr "存储的消息"
#: ../dbgpt/serve/conversation/operators.py:108
msgid "The messages stored in the storage."
msgstr ""
msgstr "存储在存储中的消息。"

View File

@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-03-24 22:53+0800\n"
"POT-Creation-Date: 2024-06-17 00:05+0800\n"
"PO-Revision-Date: 2024-03-23 11:22+0800\n"
"Last-Translator: Automatically generated\n"
"Language-Team: none\n"
@ -17,220 +17,240 @@ msgstr ""
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
#: ../dbgpt/storage/vector_store/chroma_store.py:21
#: ../dbgpt/storage/vector_store/chroma_store.py:24
#, fuzzy
msgid "Chroma Vector Store"
msgstr "Chroma 向量存储。"
#: ../dbgpt/storage/vector_store/chroma_store.py:24
#: ../dbgpt/storage/vector_store/chroma_store.py:27
msgid "Chroma vector store."
msgstr "Chroma 向量存储。"
#: ../dbgpt/storage/vector_store/chroma_store.py:28
#: ../dbgpt/storage/vector_store/weaviate_store.py:34
#: ../dbgpt/storage/vector_store/chroma_store.py:31
#: ../dbgpt/storage/vector_store/weaviate_store.py:35
msgid "Persist Path"
msgstr ""
#: ../dbgpt/storage/vector_store/chroma_store.py:31
#: ../dbgpt/storage/vector_store/weaviate_store.py:37
#: ../dbgpt/storage/vector_store/chroma_store.py:34
#: ../dbgpt/storage/vector_store/weaviate_store.py:38
msgid "the persist path of vector store."
msgstr "向量存储的持久化路径。"
#: ../dbgpt/storage/vector_store/milvus_store.py:24
#: ../dbgpt/storage/vector_store/milvus_store.py:25
msgid "Milvus Vector Store"
msgstr "Milvus 向量存储"
#: ../dbgpt/storage/vector_store/milvus_store.py:30
#: ../dbgpt/storage/vector_store/milvus_store.py:31
#: ../dbgpt/storage/vector_store/elastic_store.py:31
msgid "Uri"
msgstr "URI"
#: ../dbgpt/storage/vector_store/milvus_store.py:34
#: ../dbgpt/storage/vector_store/milvus_store.py:35
msgid "The uri of milvus store, if not set, will use the default uri."
msgstr "Milvus 存储的 URI如果未设置将使用默认的 URI。"
#: ../dbgpt/storage/vector_store/milvus_store.py:40
#: ../dbgpt/storage/vector_store/milvus_store.py:41
#: ../dbgpt/storage/vector_store/elastic_store.py:42
msgid "Port"
msgstr "端口"
#: ../dbgpt/storage/vector_store/milvus_store.py:44
#: ../dbgpt/storage/vector_store/milvus_store.py:45
msgid "The port of milvus store, if not set, will use the default port."
msgstr "Milvus 存储的端口,如果未设置,将使用默认端口。"
#: ../dbgpt/storage/vector_store/milvus_store.py:50
#: ../dbgpt/storage/vector_store/milvus_store.py:51
#: ../dbgpt/storage/vector_store/elastic_store.py:53
msgid "Alias"
msgstr "别名"
#: ../dbgpt/storage/vector_store/milvus_store.py:54
#: ../dbgpt/storage/vector_store/milvus_store.py:55
msgid "The alias of milvus store, if not set, will use the default alias."
msgstr "Milvus 存储的别名,如果未设置,将使用默认别名。"
#: ../dbgpt/storage/vector_store/milvus_store.py:60
#: ../dbgpt/storage/vector_store/milvus_store.py:61
msgid "Primary Field"
msgstr "主字段"
#: ../dbgpt/storage/vector_store/milvus_store.py:64
#: ../dbgpt/storage/vector_store/milvus_store.py:65
msgid ""
"The primary field of milvus store, if not set, will use the default primary "
"field."
msgstr "Milvus 存储的主字段,如果未设置,将使用默认的主字段。"
#: ../dbgpt/storage/vector_store/milvus_store.py:71
#: ../dbgpt/storage/vector_store/milvus_store.py:72
msgid "Text Field"
msgstr "文本字段"
#: ../dbgpt/storage/vector_store/milvus_store.py:75
#: ../dbgpt/storage/vector_store/milvus_store.py:76
msgid ""
"The text field of milvus store, if not set, will use the default text field."
msgstr "Milvus 存储的文本字段,如果未设置,将使用默认的文本字段。"
#: ../dbgpt/storage/vector_store/milvus_store.py:82
#: ../dbgpt/storage/vector_store/milvus_store.py:83
msgid "Embedding Field"
msgstr "嵌入字段"
#: ../dbgpt/storage/vector_store/milvus_store.py:86
#: ../dbgpt/storage/vector_store/milvus_store.py:87
msgid ""
"The embedding field of milvus store, if not set, will use the default "
"embedding field."
msgstr "Milvus 存储的嵌入字段,如果未设置,将使用默认的嵌入字段。"
#: ../dbgpt/storage/vector_store/milvus_store.py:93
#: ../dbgpt/storage/vector_store/milvus_store.py:94
msgid "Milvus vector store."
msgstr "Milvus 向量存储。"
#: ../dbgpt/storage/vector_store/connector.py:30
msgid "Vector Store Connector"
msgstr "向量存储连接器"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:634
msgid "OceanBase Vector Store"
msgstr "OceanBase 向量存储"
#: ../dbgpt/storage/vector_store/connector.py:35
msgid "Vector Store Type"
msgstr "向量存储类型"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:640
msgid "OceanBase Host"
msgstr "OceanBase 主机地址"
#: ../dbgpt/storage/vector_store/connector.py:38
msgid "The type of vector store."
msgstr "向量存储的类型。"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:643
msgid "oceanbase host"
msgstr "oceanbase 主机地址"
#: ../dbgpt/storage/vector_store/connector.py:42
msgid "Vector Store Implementation"
msgstr "向量存储实现"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:648
msgid "OceanBase Port"
msgstr "OceanBase 端口"
#: ../dbgpt/storage/vector_store/connector.py:45
msgid "The vector store implementation."
msgstr "向量存储的实现。"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:651
msgid "oceanbase port"
msgstr "oceanbase 端口"
#: ../dbgpt/storage/vector_store/weaviate_store.py:17
#: ../dbgpt/storage/vector_store/oceanbase_store.py:656
msgid "OceanBase User"
msgstr "OceanBase 用户名"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:659
msgid "user to login"
msgstr "登录用户名"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:664
msgid "OceanBase Password"
msgstr "OceanBase 密码"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:667
msgid "password to login"
msgstr "登录密码"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:672
msgid "OceanBase Database"
msgstr "OceanBase 数据库名"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:675
msgid "database for vector tables"
msgstr "存放向量表的数据库名"
#: ../dbgpt/storage/vector_store/weaviate_store.py:18
msgid "Weaviate Vector Store"
msgstr "Weaviate 向量存储"
#: ../dbgpt/storage/vector_store/weaviate_store.py:20
#: ../dbgpt/storage/vector_store/weaviate_store.py:21
msgid "Weaviate vector store."
msgstr "Weaviate 向量存储。"
#: ../dbgpt/storage/vector_store/weaviate_store.py:24
#: ../dbgpt/storage/vector_store/weaviate_store.py:25
msgid "Weaviate URL"
msgstr "Weaviate URL"
#: ../dbgpt/storage/vector_store/weaviate_store.py:28
#: ../dbgpt/storage/vector_store/weaviate_store.py:29
msgid "weaviate url address, if not set, will use the default url."
msgstr "Weaviate 的 URL 地址,如果未设置,将使用默认的 URL。"
#: ../dbgpt/storage/vector_store/pgvector_store.py:19
#: ../dbgpt/storage/vector_store/elastic_store.py:25
#, fuzzy
msgid "ElasticSearch Vector Store"
msgstr "Weaviate 向量存储"
#: ../dbgpt/storage/vector_store/elastic_store.py:35
#, fuzzy
msgid "The uri of elasticsearch store, if not set, will use the default uri."
msgstr "Milvus 存储的 URI如果未设置将使用默认的 URI。"
#: ../dbgpt/storage/vector_store/elastic_store.py:46
#, fuzzy
msgid "The port of elasticsearch store, if not set, will use the default port."
msgstr "Milvus 存储的端口,如果未设置,将使用默认端口。"
#: ../dbgpt/storage/vector_store/elastic_store.py:57
#, fuzzy
msgid ""
"The alias of elasticsearch store, if not set, will use the default alias."
msgstr "Milvus 存储的别名,如果未设置,将使用默认别名。"
#: ../dbgpt/storage/vector_store/elastic_store.py:64
msgid "Index Name"
msgstr ""
#: ../dbgpt/storage/vector_store/elastic_store.py:68
#, fuzzy
msgid ""
"The index name of elasticsearch store, if not set, will use the default "
"index name."
msgstr "向量存储的名称,如果未设置,将使用默认名称。"
#: ../dbgpt/storage/vector_store/elastic_store.py:75
#, fuzzy
msgid "Elasticsearch vector store."
msgstr "Weaviate 向量存储。"
#: ../dbgpt/storage/vector_store/pgvector_store.py:21
msgid "PG Vector Store"
msgstr "PG 向量存储"
#: ../dbgpt/storage/vector_store/pgvector_store.py:25
#: ../dbgpt/storage/vector_store/pgvector_store.py:27
msgid "Connection String"
msgstr "连接字符串"
#: ../dbgpt/storage/vector_store/pgvector_store.py:29
#: ../dbgpt/storage/vector_store/pgvector_store.py:31
msgid ""
"The connection string of vector store, if not set, will use the default "
"connection string."
msgstr "向量存储的连接字符串,如果未设置,将使用默认的连接字符串。"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:542
msgid "OceanBase Vector Store"
msgstr "OceanBase 向量存储"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:548
msgid "OceanBase Host"
msgstr "OceanBase 主机地址"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:552
msgid "oceanbase host"
msgstr "oceanbase 主机地址"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:558
msgid "OceanBase Port"
msgstr "OceanBase 端口"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:562
msgid "oceanbase port"
msgstr "oceanbase 端口"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:568
msgid "OceanBase User"
msgstr "OceanBase 用户名"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:572
msgid "user to login"
msgstr "登录用户名"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:578
msgid "OceanBase Password"
msgstr "OceanBase 密码"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:582
msgid "password to login"
msgstr "登录密码"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:588
msgid "OceanBase Database"
msgstr "OceanBase 数据库名"
#: ../dbgpt/storage/vector_store/oceanbase_store.py:592
msgid "database for vector tables"
msgstr "存放向量表的数据库名"
#: ../dbgpt/storage/vector_store/base.py:19
#: ../dbgpt/storage/vector_store/base.py:20
msgid "Collection Name"
msgstr "集合名称"
#: ../dbgpt/storage/vector_store/base.py:23
#: ../dbgpt/storage/vector_store/base.py:24
msgid "The name of vector store, if not set, will use the default name."
msgstr "向量存储的名称,如果未设置,将使用默认名称。"
#: ../dbgpt/storage/vector_store/base.py:29
#: ../dbgpt/storage/vector_store/base.py:30
msgid "User"
msgstr "用户"
#: ../dbgpt/storage/vector_store/base.py:33
#: ../dbgpt/storage/vector_store/base.py:34
msgid "The user of vector store, if not set, will use the default user."
msgstr "向量存储的用户,如果未设置,将使用默认用户。"
#: ../dbgpt/storage/vector_store/base.py:39
#: ../dbgpt/storage/vector_store/base.py:40
msgid "Password"
msgstr "密码"
#: ../dbgpt/storage/vector_store/base.py:43
#: ../dbgpt/storage/vector_store/base.py:44
msgid ""
"The password of vector store, if not set, will use the default password."
msgstr "向量存储的密码,如果未设置,将使用默认密码。"
#: ../dbgpt/storage/vector_store/base.py:50
#: ../dbgpt/storage/vector_store/base.py:51
msgid "Embedding Function"
msgstr "嵌入函数"
#: ../dbgpt/storage/vector_store/base.py:54
#: ../dbgpt/storage/vector_store/base.py:55
msgid ""
"The embedding function of vector store, if not set, will use the default "
"embedding function."
msgstr "向量存储的嵌入函数,如果未设置,将使用默认的嵌入函数。"
#: ../dbgpt/storage/vector_store/base.py:61
#: ../dbgpt/storage/vector_store/base.py:62
msgid "Max Chunks Once Load"
msgstr "一次加载的最大块数"
#: ../dbgpt/storage/vector_store/base.py:65
#: ../dbgpt/storage/vector_store/base.py:66
msgid ""
"The max number of chunks to load at once. If your document is large, you can "
"set this value to a larger number to speed up the loading process. Default "
@ -239,14 +259,29 @@ msgstr ""
"一次加载的最大块数。如果您的文档很大,可以将此值设置为较大的数字,以加快加载"
"过程。默认值为 10。"
#: ../dbgpt/storage/vector_store/base.py:73
#: ../dbgpt/storage/vector_store/base.py:74
msgid "Max Threads"
msgstr "最大线程数"
#: ../dbgpt/storage/vector_store/base.py:77
#: ../dbgpt/storage/vector_store/base.py:78
msgid ""
"The max number of threads to use. Default is 1. If you set this bigger than "
"1, please make sure your vector store is thread-safe."
msgstr ""
"要使用的最大线程数。默认值为 1。如果您将此值设置为大于 1请确保您的向量存储"
"是线程安全的。"
#~ msgid "Vector Store Connector"
#~ msgstr "向量存储连接器"
#~ msgid "Vector Store Type"
#~ msgstr "向量存储类型"
#~ msgid "The type of vector store."
#~ msgstr "向量存储的类型。"
#~ msgid "Vector Store Implementation"
#~ msgstr "向量存储实现"
#~ msgid "The vector store implementation."
#~ msgstr "向量存储的实现。"

View File

@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-03-24 22:53+0800\n"
"POT-Creation-Date: 2024-06-17 00:05+0800\n"
"PO-Revision-Date: 2024-03-24 22:53+0800\n"
"Last-Translator: Automatically generated\n"
"Language-Team: none\n"
@ -19,8 +19,37 @@ msgstr ""
#: ../dbgpt/util/serialization/json_serialization.py:23
msgid "Json Serializer"
msgstr ""
msgstr "JSON 序列化器"
#: ../dbgpt/util/serialization/json_serialization.py:26
msgid "The serializer for serializing data with json format."
msgstr ""
msgstr "用于以 JSON 格式序列化数据的序列化器。"
#: ../dbgpt/util/dbgpts/repo.py:72
msgid "Repos"
msgstr "仓库"
#: ../dbgpt/util/dbgpts/repo.py:73 ../dbgpt/util/dbgpts/repo.py:329
#: ../dbgpt/util/dbgpts/repo.py:360
msgid "Repository"
msgstr "存储库"
#: ../dbgpt/util/dbgpts/repo.py:74 ../dbgpt/util/dbgpts/repo.py:361
msgid "Path"
msgstr "路径"
#: ../dbgpt/util/dbgpts/repo.py:327
msgid "dbgpts In All Repos"
msgstr "所有存储库中的 dbgpts"
#: ../dbgpt/util/dbgpts/repo.py:330 ../dbgpt/util/dbgpts/repo.py:359
msgid "Type"
msgstr "类型"
#: ../dbgpt/util/dbgpts/repo.py:331 ../dbgpt/util/dbgpts/repo.py:358
msgid "Name"
msgstr "名称"
#: ../dbgpt/util/dbgpts/repo.py:356
msgid "Installed dbgpts"
msgstr "已安装的 dbgpts"

View File

@ -424,6 +424,8 @@ def core_requires():
setup_spec.extras["client"] = setup_spec.extras["core"] + [
"httpx",
"fastapi>=0.100.0",
# For retry, chromadb need tenacity<=8.3.0
"tenacity<=8.3.0",
]
# Simple command line dependencies
setup_spec.extras["cli"] = setup_spec.extras["client"] + [
@ -440,6 +442,8 @@ def core_requires():
# https://github.com/eosphoros-ai/DB-GPT/issues/551
# TODO: remove pandas dependency
"pandas==2.0.3",
# numpy should less than 2.0.0
"numpy>=1.21.0,<2.0.0",
]
# Just use by DB-GPT internal, we should find the smallest dependency set for run