mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-09-13 13:10:29 +00:00
feat(agent): More general ReAct Agent (#2556)
This commit is contained in:
@@ -6,6 +6,7 @@ import sys
|
||||
from typing_extensions import Annotated, Doc
|
||||
|
||||
from dbgpt.agent import AgentContext, AgentMemory, LLMConfig, UserProxyAgent
|
||||
from dbgpt.agent.expand.actions.react_action import ReActAction, Terminate
|
||||
from dbgpt.agent.expand.react_agent import ReActAgent
|
||||
from dbgpt.agent.resource import ToolPack, tool
|
||||
|
||||
@@ -16,17 +17,13 @@ logging.basicConfig(
|
||||
)
|
||||
|
||||
|
||||
@tool
|
||||
def terminate(
|
||||
final_answer: Annotated[str, Doc("final literal answer about the goal")],
|
||||
) -> str:
|
||||
"""When the goal achieved, this tool must be called."""
|
||||
return final_answer
|
||||
|
||||
|
||||
@tool
|
||||
def simple_calculator(first_number: int, second_number: int, operator: str) -> float:
|
||||
"""Simple calculator tool. Just support +, -, *, /."""
|
||||
"""Simple calculator tool. Just support +, -, *, /.
|
||||
When users need to do numerical calculations, you must use this tool to calculate, \
|
||||
and you are not allowed to directly infer calculation results from user input or \
|
||||
external observations.
|
||||
"""
|
||||
if isinstance(first_number, str):
|
||||
first_number = int(first_number)
|
||||
if isinstance(second_number, str):
|
||||
@@ -52,22 +49,28 @@ def count_directory_files(path: Annotated[str, Doc("The directory path")]) -> in
|
||||
|
||||
|
||||
async def main():
|
||||
from dbgpt.model.proxy.llms.siliconflow import SiliconFlowLLMClient
|
||||
from dbgpt.model import AutoLLMClient
|
||||
|
||||
llm_client = SiliconFlowLLMClient(
|
||||
model_alias="Qwen/Qwen2-7B-Instruct",
|
||||
llm_client = AutoLLMClient(
|
||||
# provider=os.getenv("LLM_PROVIDER", "proxy/deepseek"),
|
||||
# name=os.getenv("LLM_MODEL_NAME", "deepseek-chat"),
|
||||
provider=os.getenv("LLM_PROVIDER", "proxy/siliconflow"),
|
||||
name=os.getenv("LLM_MODEL_NAME", "Qwen/Qwen2.5-Coder-32B-Instruct"),
|
||||
)
|
||||
agent_memory = AgentMemory()
|
||||
agent_memory.gpts_memory.init(conv_id="test456")
|
||||
|
||||
context: AgentContext = AgentContext(conv_id="test456", gpts_app_name="ReAct")
|
||||
# It is important to set the temperature to a low value to get a better result
|
||||
context: AgentContext = AgentContext(
|
||||
conv_id="test456", gpts_app_name="ReAct", temperature=0.01
|
||||
)
|
||||
|
||||
tools = ToolPack([simple_calculator, count_directory_files, terminate])
|
||||
tools = ToolPack([simple_calculator, count_directory_files, Terminate()])
|
||||
|
||||
user_proxy = await UserProxyAgent().bind(agent_memory).bind(context).build()
|
||||
|
||||
tool_engineer = (
|
||||
await ReActAgent(end_action_name="terminate", max_steps=10)
|
||||
await ReActAgent(max_retry_count=10)
|
||||
.bind(context)
|
||||
.bind(LLMConfig(llm_client=llm_client))
|
||||
.bind(agent_memory)
|
||||
@@ -78,7 +81,9 @@ async def main():
|
||||
await user_proxy.initiate_chat(
|
||||
recipient=tool_engineer,
|
||||
reviewer=user_proxy,
|
||||
message="Calculate the product of 10 and 99, Count the number of files in /tmp, answer in Chinese.",
|
||||
message="Calculate the product of 10 and 99, then count the number of files in /tmp",
|
||||
# message="Calculate the product of 10 and 99",
|
||||
# message="Count the number of files in /tmp",
|
||||
)
|
||||
|
||||
# dbgpt-vis message infos
|
||||
|
Reference in New Issue
Block a user