fix: agent examples

This commit is contained in:
csunny 2024-12-27 17:04:01 +08:00
parent bbf043155a
commit 64d9e5e424
9 changed files with 119 additions and 75 deletions

View File

@ -5,9 +5,9 @@
Execute the following command in the terminal:
Set env params.
.. code-block:: shell
export OPENAI_API_KEY=sk-xx
export OPENAI_API_BASE=https://xx:80/v1
export SILICONFLOW_API_KEY=sk-xx
export SILICONFLOW_API_BASE=https://xx:80/v1
run example.
..code-block:: shell
@ -15,6 +15,7 @@
"""
import asyncio
import os
from dbgpt.agent import (
AgentContext,
@ -32,12 +33,14 @@ initialize_tracer(
async def main():
from dbgpt.model.proxy import OpenAILLMClient
from dbgpt.model.proxy.llms.siliconflow import SiliconFlowLLMClient
agent_memory = AgentMemory()
llm_client = OpenAILLMClient(
model_alias="gpt-4o",
llm_client = SiliconFlowLLMClient(
model_alias=os.getenv(
"SILICONFLOW_MODEL_VERSION", "Qwen/Qwen2.5-Coder-32B-Instruct"
),
)
context: AgentContext = AgentContext(

View File

@ -6,14 +6,13 @@
Set env params.
.. code-block:: shell
export OPENAI_API_KEY=sk-xx
export OPENAI_API_BASE=https://xx:80/v1
export SILICONFLOW_API_KEY=sk-xx
export SILICONFLOW_API_BASE=https://xx:80/v1
run example.
..code-block:: shell
python examples/agents/awel_layout_agents_chat_examples.py
"""
import asyncio
import os
@ -34,15 +33,16 @@ initialize_tracer("/tmp/agent_trace.jsonl", create_system_app=True)
async def main():
from dbgpt.model.proxy import OpenAILLMClient
agent_memory = AgentMemory()
agent_memory.gpts_memory.init(conv_id="test456")
try:
from dbgpt.model.proxy.llms.tongyi import TongyiLLMClient
from dbgpt.model.proxy.llms.siliconflow import SiliconFlowLLMClient
llm_client = TongyiLLMClient(
model_alias="qwen2-72b-instruct",
llm_client = SiliconFlowLLMClient(
model_alias=os.getenv(
"SILICONFLOW_MODEL_VERSION", "Qwen/Qwen2.5-Coder-32B-Instruct"
),
)
context: AgentContext = AgentContext(conv_id="test456", gpts_app_name="信息析助手")

View File

@ -44,12 +44,17 @@ def count_directory_files(path: Annotated[str, Doc("The directory path")]) -> in
async def main():
from dbgpt.model.proxy import OpenAILLMClient
llm_client = OpenAILLMClient(model_alias="gpt-3.5-turbo")
context: AgentContext = AgentContext(conv_id="test456")
from dbgpt.model.proxy.llms.siliconflow import SiliconFlowLLMClient
llm_client = SiliconFlowLLMClient(
model_alias=os.getenv(
"SILICONFLOW_MODEL_VERSION", "Qwen/Qwen2.5-Coder-32B-Instruct"
),
)
agent_memory = AgentMemory()
agent_memory.gpts_memory.init(conv_id="test456")
context: AgentContext = AgentContext(conv_id="test456", gpts_app_name="工具助手")
tools = ToolPack([simple_calculator, count_directory_files])
@ -77,7 +82,7 @@ async def main():
)
# dbgpt-vis message infos
print(await agent_memory.gpts_memory.one_chat_completions("test456"))
print(await agent_memory.gpts_memory.app_link_chat_message("test456"))
if __name__ == "__main__":

View File

@ -6,8 +6,8 @@
Set env params.
.. code-block:: shell
export OPENAI_API_KEY=sk-xx
export OPENAI_API_BASE=https://xx:80/v1
export SILICONFLOW_API_KEY=sk-xx
export SILICONFLOW_API_BASE=https://xx:80/v1
run example.
..code-block:: shell
@ -20,19 +20,24 @@ import os
from dbgpt.agent import AgentContext, AgentMemory, LLMConfig, UserProxyAgent
from dbgpt.agent.expand.tool_assistant_agent import ToolAssistantAgent
from dbgpt.agent.resource import AutoGPTPluginToolPack
from dbgpt.configs.model_config import ROOT_PATH
current_dir = os.getcwd()
parent_dir = os.path.dirname(current_dir)
test_plugin_dir = os.path.join(parent_dir, "test_files/plugins")
test_plugin_dir = os.path.join(ROOT_PATH, "examples/test_files/plugins")
async def main():
from dbgpt.model.proxy import OpenAILLMClient
from dbgpt.model.proxy.llms.siliconflow import SiliconFlowLLMClient
llm_client = OpenAILLMClient(model_alias="gpt-3.5-turbo")
context: AgentContext = AgentContext(conv_id="test456")
llm_client = SiliconFlowLLMClient(
model_alias=os.getenv(
"SILICONFLOW_MODEL_VERSION", "Qwen/Qwen2.5-Coder-32B-Instruct"
),
)
agent_memory = AgentMemory()
agent_memory.gpts_memory.init(conv_id="test456")
context: AgentContext = AgentContext(conv_id="test456", gpts_app_name="插件对话助手")
tools = AutoGPTPluginToolPack(test_plugin_dir)
@ -54,7 +59,7 @@ async def main():
)
# dbgpt-vis message infos
print(await agent_memory.gpts_memory.one_chat_completions("test456"))
print(await agent_memory.gpts_memory.app_link_chat_message("test456"))
if __name__ == "__main__":

View File

@ -6,8 +6,8 @@
Set env params.
.. code-block:: shell
export OPENAI_API_KEY=sk-xx
export OPENAI_API_BASE=https://xx:80/v1
export SILICONFLOW_API_KEY=sk-xx
export SILICONFLOW_API_BASE=https://xx:80/v1
run example.
..code-block:: shell
@ -18,20 +18,27 @@ import asyncio
import os
from dbgpt.agent import AgentContext, AgentMemory, LLMConfig, UserProxyAgent
from dbgpt.agent.expand.retrieve_summary_assistant_agent import (
RetrieveSummaryAssistantAgent,
)
from dbgpt.agent.expand.summary_assistant_agent import SummaryAssistantAgent
from dbgpt.configs.model_config import ROOT_PATH
async def summary_example_with_success():
from dbgpt.model.proxy import OpenAILLMClient
async def main():
from dbgpt.model.proxy.llms.siliconflow import SiliconFlowLLMClient
llm_client = SiliconFlowLLMClient(
model_alias=os.getenv(
"SILICONFLOW_MODEL_VERSION", "Qwen/Qwen2.5-Coder-32B-Instruct"
),
)
context: AgentContext = AgentContext(
conv_id="retrieve_summarize", gpts_app_name="Summary Assistant"
)
llm_client = OpenAILLMClient(model_alias="gpt-3.5-turbo-16k")
context: AgentContext = AgentContext(conv_id="retrieve_summarize")
agent_memory = AgentMemory()
agent_memory.gpts_memory.init(conv_id="retrieve_summarize")
summarizer = (
await RetrieveSummaryAssistantAgent()
await SummaryAssistantAgent()
.bind(context)
.bind(LLMConfig(llm_client=llm_client))
.bind(agent_memory)
@ -47,6 +54,7 @@ async def summary_example_with_success():
"https://en.wikipedia.org/wiki/Chernobyl_disaster",
]
# TODO add a tool to load the pdf and internet files
await user_proxy.initiate_chat(
recipient=summarizer,
reviewer=user_proxy,
@ -55,11 +63,8 @@ async def summary_example_with_success():
)
# dbgpt-vis message infos
print(await agent_memory.gpts_memory.one_chat_completions("retrieve_summarize"))
print(await agent_memory.gpts_memory.app_link_chat_message("retrieve_summarize"))
if __name__ == "__main__":
asyncio.run(summary_example_with_success())
print(
"\033[92m=======================The Summary Assistant with Successful Results Ended==================\n\n\033[91m"
)
asyncio.run(main())

View File

@ -10,9 +10,9 @@ You can limit the memory and file system resources available to the code executi
environment. The code execution environment is isolated from the host system,
preventing access to the internet and other external resources.
"""
import asyncio
import logging
import os
from typing import Optional, Tuple
from dbgpt.agent import (
@ -270,11 +270,28 @@ class SandboxCodeAssistantAgent(ConversableAgent):
async def main():
from dbgpt.model.proxy import OpenAILLMClient
from dbgpt.model.proxy.llms.siliconflow import SiliconFlowLLMClient
llm_client = OpenAILLMClient(model_alias="gpt-4o-mini")
llm_client = SiliconFlowLLMClient(
model_alias=os.getenv(
"SILICONFLOW_MODEL_VERSION", "Qwen/Qwen2.5-Coder-32B-Instruct"
),
)
context: AgentContext = AgentContext(conv_id="test123")
agent_memory = AgentMemory(HybridMemory[AgentMemoryFragment].from_chroma())
# TODO Embedding and Rerank model refactor
from dbgpt.rag.embedding import OpenAPIEmbeddings
silicon_embeddings = OpenAPIEmbeddings(
api_url=os.getenv("SILICONFLOW_API_BASE") + "/embeddings",
api_key=os.getenv("SILICONFLOW_API_KEY"),
model_name="BAAI/bge-large-zh-v1.5",
)
agent_memory = AgentMemory(
HybridMemory[AgentMemoryFragment].from_chroma(
embeddings=silicon_embeddings,
)
)
agent_memory.gpts_memory.init("test123")
coder = (

View File

@ -17,28 +17,21 @@
import asyncio
import os
from dbgpt.agent import (
AgentContext,
AgentMemory,
AgentMemoryFragment,
HybridMemory,
LLMConfig,
UserProxyAgent,
)
from dbgpt.agent import AgentContext, AgentMemory, LLMConfig, UserProxyAgent
from dbgpt.agent.expand.code_assistant_agent import CodeAssistantAgent
async def main():
from dbgpt.model.proxy import OpenAILLMClient
from dbgpt.model.proxy.llms.siliconflow import SiliconFlowLLMClient
# llm_client = OpenAILLMClient(model_alias="gpt-3.5-turbo")
from dbgpt.model.proxy.llms.tongyi import TongyiLLMClient
llm_client = TongyiLLMClient(
model_alias="qwen2-72b-instruct",
llm_client = SiliconFlowLLMClient(
model_alias=os.getenv(
"SILICONFLOW_MODEL_VERSION", "Qwen/Qwen2.5-Coder-32B-Instruct"
),
)
context: AgentContext = AgentContext(conv_id="test123")
context: AgentContext = AgentContext(conv_id="test123", gpts_app_name="代码助手")
agent_memory = AgentMemory()
agent_memory.gpts_memory.init(conv_id="test123")
try:

View File

@ -15,18 +15,25 @@
"""
import asyncio
import os
from dbgpt.agent import AgentContext, AgentMemory, LLMConfig, UserProxyAgent
from dbgpt.agent.expand.summary_assistant_agent import SummaryAssistantAgent
async def summary_example_with_success():
from dbgpt.model.proxy import OpenAILLMClient
from dbgpt.model.proxy.llms.siliconflow import SiliconFlowLLMClient
llm_client = OpenAILLMClient(model_alias="gpt-3.5-turbo")
llm_client = SiliconFlowLLMClient(
model_alias=os.getenv(
"SILICONFLOW_MODEL_VERSION", "Qwen/Qwen2.5-Coder-32B-Instruct"
),
)
context: AgentContext = AgentContext(conv_id="summarize")
agent_memory = AgentMemory()
agent_memory.gpts_memory.init(conv_id="summarize")
summarizer = (
await SummaryAssistantAgent()
.bind(context)
@ -71,16 +78,22 @@ async def summary_example_with_success():
)
# dbgpt-vis message infos
print(await agent_memory.gpts_memory.one_chat_completions("summarize"))
print(await agent_memory.gpts_memory.app_link_chat_message("summarize"))
async def summary_example_with_faliure():
from dbgpt.model.proxy import OpenAILLMClient
from dbgpt.model.proxy.llms.siliconflow import SiliconFlowLLMClient
llm_client = OpenAILLMClient(model_alias="gpt-3.5-turbo")
llm_client = SiliconFlowLLMClient(
model_alias=os.getenv(
"SILICONFLOW_MODEL_VERSION", "Qwen/Qwen2.5-Coder-32B-Instruct"
),
)
context: AgentContext = AgentContext(conv_id="summarize")
agent_memory = AgentMemory()
agent_memory.gpts_memory.init(conv_id="summarize")
summarizer = (
await SummaryAssistantAgent()
.bind(context)
@ -92,7 +105,6 @@ async def summary_example_with_faliure():
user_proxy = await UserProxyAgent().bind(agent_memory).bind(context).build()
# Test the failure example
await user_proxy.initiate_chat(
recipient=summarizer,
reviewer=user_proxy,
@ -110,7 +122,7 @@ async def summary_example_with_faliure():
""",
)
print(await agent_memory.gpts_memory.one_chat_completions("summarize"))
print(await agent_memory.gpts_memory.app_link_chat_message("summarize"))
if __name__ == "__main__":

View File

@ -20,24 +20,28 @@ import os
from dbgpt.agent import AgentContext, AgentMemory, LLMConfig, UserProxyAgent
from dbgpt.agent.expand.data_scientist_agent import DataScientistAgent
from dbgpt.agent.resource import SQLiteDBResource
from dbgpt.configs.model_config import ROOT_PATH
from dbgpt.util.tracer import initialize_tracer
current_dir = os.getcwd()
parent_dir = os.path.dirname(current_dir)
test_plugin_dir = os.path.join(parent_dir, "test_files")
test_plugin_dir = os.path.join(ROOT_PATH, "test_files")
initialize_tracer("/tmp/agent_trace.jsonl", create_system_app=True)
async def main():
from dbgpt.model.proxy import OpenAILLMClient
from dbgpt.model.proxy.llms.siliconflow import SiliconFlowLLMClient
llm_client = OpenAILLMClient(model_alias="gpt-3.5-turbo")
llm_client = SiliconFlowLLMClient(
model_alias=os.getenv(
"SILICONFLOW_MODEL_VERSION", "Qwen/Qwen2.5-Coder-32B-Instruct"
),
)
context: AgentContext = AgentContext(conv_id="test456")
agent_memory = AgentMemory()
agent_memory.gpts_memory.init(conv_id="test456")
sqlite_resource = SQLiteDBResource("SQLite Database", f"{test_plugin_dir}/dbgpt.db")
user_proxy = await UserProxyAgent().bind(agent_memory).bind(context).build()
sql_boy = (
@ -56,7 +60,7 @@ async def main():
)
## dbgpt-vis message infos
print(await agent_memory.gpts_memory.one_chat_completions("test456"))
print(await agent_memory.gpts_memory.app_link_chat_message("test456"))
if __name__ == "__main__":