mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-09-11 22:09:44 +00:00
Native data AI application framework based on AWEL+AGENT (#1152)
Co-authored-by: Fangyin Cheng <staneyffer@gmail.com> Co-authored-by: lcx01800250 <lcx01800250@alibaba-inc.com> Co-authored-by: licunxing <864255598@qq.com> Co-authored-by: Aralhi <xiaoping0501@gmail.com> Co-authored-by: xuyuan23 <643854343@qq.com> Co-authored-by: aries_ckt <916701291@qq.com> Co-authored-by: hzh97 <2976151305@qq.com>
This commit is contained in:
@@ -3,8 +3,16 @@ from abc import ABC
|
||||
from typing import Optional
|
||||
|
||||
from dbgpt.component import ComponentType
|
||||
from dbgpt.core import LLMClient
|
||||
from dbgpt.core import LLMClient, ModelOutput, ModelRequest
|
||||
from dbgpt.core.awel import BaseOperator
|
||||
from dbgpt.core.awel.flow import (
|
||||
IOField,
|
||||
OperatorCategory,
|
||||
OperatorType,
|
||||
Parameter,
|
||||
ResourceCategory,
|
||||
ViewMetadata,
|
||||
)
|
||||
from dbgpt.core.operators import BaseLLM, BaseLLMOperator, BaseStreamingLLMOperator
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -54,6 +62,36 @@ class LLMOperator(MixinLLMOperator, BaseLLMOperator):
|
||||
and if we can't connect to the model serving cluster, we will use the :class:`OpenAILLMClient` as the llm_client.
|
||||
"""
|
||||
|
||||
metadata = ViewMetadata(
|
||||
label="LLM Operator",
|
||||
name="llm_operator",
|
||||
category=OperatorCategory.LLM,
|
||||
description="The LLM operator.",
|
||||
parameters=[
|
||||
Parameter.build_from(
|
||||
"LLM Client",
|
||||
"llm_client",
|
||||
LLMClient,
|
||||
optional=True,
|
||||
default=None,
|
||||
description="The LLM Client.",
|
||||
),
|
||||
],
|
||||
inputs=[
|
||||
IOField.build_from(
|
||||
"Model Request", "model_request", ModelRequest, "The model request."
|
||||
)
|
||||
],
|
||||
outputs=[
|
||||
IOField.build_from(
|
||||
"Model Output",
|
||||
"model_output",
|
||||
ModelOutput,
|
||||
description="The model output.",
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
def __init__(self, llm_client: Optional[LLMClient] = None, **kwargs):
|
||||
super().__init__(llm_client)
|
||||
BaseLLMOperator.__init__(self, llm_client, **kwargs)
|
||||
@@ -68,6 +106,38 @@ class StreamingLLMOperator(MixinLLMOperator, BaseStreamingLLMOperator):
|
||||
and if we can't connect to the model serving cluster, we will use the :class:`OpenAILLMClient` as the llm_client.
|
||||
"""
|
||||
|
||||
metadata = ViewMetadata(
|
||||
label="Streaming LLM Operator",
|
||||
name="streaming_llm_operator",
|
||||
operator_type=OperatorType.STREAMIFY,
|
||||
category=OperatorCategory.LLM,
|
||||
description="The streaming LLM operator.",
|
||||
parameters=[
|
||||
Parameter.build_from(
|
||||
"LLM Client",
|
||||
"llm_client",
|
||||
LLMClient,
|
||||
optional=True,
|
||||
default=None,
|
||||
description="The LLM Client.",
|
||||
),
|
||||
],
|
||||
inputs=[
|
||||
IOField.build_from(
|
||||
"Model Request", "model_request", ModelRequest, "The model request."
|
||||
)
|
||||
],
|
||||
outputs=[
|
||||
IOField.build_from(
|
||||
"Model Output",
|
||||
"model_output",
|
||||
ModelOutput,
|
||||
description="The model output.",
|
||||
is_list=True,
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
def __init__(self, llm_client: Optional[LLMClient] = None, **kwargs):
|
||||
super().__init__(llm_client)
|
||||
BaseStreamingLLMOperator.__init__(self, llm_client, **kwargs)
|
||||
|
Reference in New Issue
Block a user