mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-07-27 13:57:46 +00:00
119 lines
3.3 KiB
Plaintext
119 lines
3.3 KiB
Plaintext
{
|
|
"cells": [
|
|
{
|
|
"cell_type": "code",
|
|
"metadata": {
|
|
"ExecuteTime": {
|
|
"end_time": "2024-04-10T04:38:14.228948Z",
|
|
"start_time": "2024-04-10T04:38:14.224972Z"
|
|
}
|
|
},
|
|
"source": [
|
|
"import nest_asyncio\n",
|
|
"from dbgpt.agent import (\n",
|
|
" AgentContext,\n",
|
|
" GptsMemory,\n",
|
|
" LLMConfig,\n",
|
|
" ResourceLoader,\n",
|
|
" UserProxyAgent,\n",
|
|
")\n",
|
|
"from dbgpt.agent.expand.code_assistant_agent import CodeAssistantAgent\n",
|
|
"from dbgpt.agent.plan import AutoPlanChatManager\n",
|
|
"from dbgpt.model.proxy import OpenAILLMClient\n",
|
|
"\n",
|
|
"nest_asyncio.apply()"
|
|
],
|
|
"execution_count": 7,
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"metadata": {
|
|
"is_executing": true
|
|
},
|
|
"source": [
|
|
"# Set your api key and api base url\n",
|
|
"# os.environ[\"OPENAI_API_KEY\"] = \"Your API\"\n",
|
|
"# os.environ[\"OPENAI_API_BASE\"] = \"https://api.openai.com/v1\""
|
|
],
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"metadata": {
|
|
"ExecuteTime": {
|
|
"end_time": "2024-04-10T04:19:47.838081Z",
|
|
"start_time": "2024-04-10T04:17:54.465616Z"
|
|
}
|
|
},
|
|
"source": [
|
|
"llm_client = OpenAILLMClient(model_alias=\"gpt-4\")\n",
|
|
"context: AgentContext = AgentContext(conv_id=\"test456\", gpts_app_name=\"代码分析助手\")\n",
|
|
"\n",
|
|
"default_memory = GptsMemory()\n",
|
|
"\n",
|
|
"resource_loader = ResourceLoader()\n",
|
|
"\n",
|
|
"coder = (\n",
|
|
" await CodeAssistantAgent()\n",
|
|
" .bind(context)\n",
|
|
" .bind(LLMConfig(llm_client=llm_client))\n",
|
|
" .bind(default_memory)\n",
|
|
" .bind(resource_loader)\n",
|
|
" .build()\n",
|
|
")\n",
|
|
"\n",
|
|
"manager = (\n",
|
|
" await AutoPlanChatManager()\n",
|
|
" .bind(context)\n",
|
|
" .bind(default_memory)\n",
|
|
" .bind(LLMConfig(llm_client=llm_client))\n",
|
|
" .build()\n",
|
|
")\n",
|
|
"manager.hire([coder])\n",
|
|
"\n",
|
|
"user_proxy = await UserProxyAgent().bind(context).bind(default_memory).build()\n",
|
|
"\n",
|
|
"\n",
|
|
"await user_proxy.initiate_chat(\n",
|
|
" recipient=manager,\n",
|
|
" reviewer=user_proxy,\n",
|
|
" message=\"Obtain simple information about issues in the repository 'eosphoros-ai/DB-GPT' in the past three days and analyze the data. Create a Markdown table grouped by day and status.\",\n",
|
|
" # message=\"Find papers on gpt-4 in the past three weeks on arxiv, and organize their titles, authors, and links into a markdown table\",\n",
|
|
" # message=\"find papers on LLM applications from arxiv in the last month, create a markdown table of different domains.\",\n",
|
|
")"
|
|
],
|
|
"execution_count": 4,
|
|
"outputs": []
|
|
}
|
|
],
|
|
"metadata": {
|
|
"kernelspec": {
|
|
"display_name": "dbgpt_env",
|
|
"language": "python",
|
|
"name": "python3"
|
|
},
|
|
"language_info": {
|
|
"codemirror_mode": {
|
|
"name": "ipython",
|
|
"version": 3
|
|
},
|
|
"file_extension": ".py",
|
|
"mimetype": "text/x-python",
|
|
"name": "python",
|
|
"nbconvert_exporter": "python",
|
|
"pygments_lexer": "ipython3",
|
|
"version": "3.10.13"
|
|
},
|
|
"orig_nbformat": 4,
|
|
"vscode": {
|
|
"interpreter": {
|
|
"hash": "f8b6b0e04f284afd2fbb5e4163e7d03bbdc845eaeb6e8c78fae04fce6b51dae6"
|
|
}
|
|
}
|
|
},
|
|
"nbformat": 4,
|
|
"nbformat_minor": 2
|
|
}
|