mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-07-30 15:21:02 +00:00
bugfix(ChatData): ChatData Use AntV Table
1.Merge ChatData and ChatDB
This commit is contained in:
parent
940edcdab1
commit
8df671c6db
64
pilot/scene/chat_agent/auto_gen.py
Normal file
64
pilot/scene/chat_agent/auto_gen.py
Normal file
@ -0,0 +1,64 @@
|
||||
import autogen
|
||||
from autogen import oai, AssistantAgent, UserProxyAgent, config_list_from_json
|
||||
from openai.openai_object import OpenAIObject
|
||||
|
||||
config_list = [
|
||||
{
|
||||
"model": "gpt-3.5-turbo",
|
||||
"api_base": "http://43.156.9.162:3001/api/openai/v1",
|
||||
"api_type": "open_ai",
|
||||
"api_key": "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",
|
||||
}
|
||||
]
|
||||
llm_config = {
|
||||
"request_timeout": 600,
|
||||
"seed": 45, # change the seed for different trials
|
||||
"config_list": config_list,
|
||||
"temperature": 0,
|
||||
"max_tokens": 3000,
|
||||
}
|
||||
|
||||
# assistant = AssistantAgent("assistant", llm_config=llm_config)
|
||||
# user_proxy = UserProxyAgent("user_proxy", code_execution_config={"work_dir": "coding"})
|
||||
#
|
||||
|
||||
assistant = autogen.AssistantAgent(
|
||||
name="assistant",
|
||||
llm_config=llm_config,
|
||||
is_termination_msg=lambda x: True if "TERMINATE" in x.get("content") else False,
|
||||
)
|
||||
|
||||
# 创建名为 user_proxy 的用户代理实例,这里定义为进行干预
|
||||
user_proxy = autogen.UserProxyAgent(
|
||||
name="user_proxy",
|
||||
human_input_mode="TERMINATE",
|
||||
max_consecutive_auto_reply=1,
|
||||
is_termination_msg=lambda x: x.get("content", "").rstrip().endswith("TERMINATE"),
|
||||
code_execution_config={"work_dir": "web"},
|
||||
llm_config=llm_config,
|
||||
system_message="""Reply TERMINATE if the task has been solved at full satisfaction.
|
||||
Otherwise, reply CONTINUE, or the reason why the task is not solved yet.""",
|
||||
)
|
||||
|
||||
task1 = """今天是星期几?,还有几天周末?请告诉我答案。"""
|
||||
|
||||
if __name__ == "__main__":
|
||||
user_proxy.initiate_chat(assistant, message=task1)
|
||||
# user_proxy.initiate_chat(assistant, message="Plot a chart of NVDA and TESLA stock price change YTD.")
|
||||
|
||||
# response: OpenAIObject = oai.Completion.create(
|
||||
# config_list=[
|
||||
# {
|
||||
# "model": "gpt-3.5-turbo",
|
||||
# "api_base": "http://43.156.9.162:3001/api/openai/v1",
|
||||
# "api_type": "open_ai",
|
||||
# "api_key": "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",
|
||||
# }
|
||||
# ],
|
||||
# prompt="你好呀!",
|
||||
# )
|
||||
#
|
||||
# print(response)
|
||||
#
|
||||
# text = response.get("choices")[0].get("text")
|
||||
# print(text)
|
94
pilot/scene/chat_agent/auto_gen2.py
Normal file
94
pilot/scene/chat_agent/auto_gen2.py
Normal file
@ -0,0 +1,94 @@
|
||||
import autogen
|
||||
|
||||
config_list = [
|
||||
{
|
||||
"model": "gpt-3.5-turbo",
|
||||
"api_base": "http://43.156.9.162:3001/api/openai/v1",
|
||||
"api_type": "open_ai",
|
||||
"api_key": "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",
|
||||
}
|
||||
]
|
||||
llm_config = {
|
||||
"functions": [
|
||||
{
|
||||
"name": "python",
|
||||
"description": "run cell in ipython and return the execution result.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"cell": {
|
||||
"type": "string",
|
||||
"description": "Valid Python cell to execute.",
|
||||
}
|
||||
},
|
||||
"required": ["cell"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "sh",
|
||||
"description": "run a shell script and return the execution result.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"script": {
|
||||
"type": "string",
|
||||
"description": "Valid shell script to execute.",
|
||||
}
|
||||
},
|
||||
"required": ["script"],
|
||||
},
|
||||
},
|
||||
],
|
||||
"config_list": config_list,
|
||||
"request_timeout": 120,
|
||||
"max_tokens": 3000,
|
||||
}
|
||||
chatbot = autogen.AssistantAgent(
|
||||
name="chatbot",
|
||||
system_message="For coding tasks, only use the functions you have been provided with. Reply TERMINATE when the task is done.",
|
||||
llm_config=llm_config,
|
||||
)
|
||||
|
||||
# create a UserProxyAgent instance named "user_proxy"
|
||||
user_proxy = autogen.UserProxyAgent(
|
||||
name="user_proxy",
|
||||
is_termination_msg=lambda x: x.get("content", "")
|
||||
and x.get("content", "").rstrip().endswith("TERMINATE"),
|
||||
human_input_mode="NEVER",
|
||||
max_consecutive_auto_reply=10,
|
||||
code_execution_config={"work_dir": "coding"},
|
||||
)
|
||||
|
||||
# define functions according to the function description
|
||||
from IPython import get_ipython
|
||||
|
||||
|
||||
def exec_python(cell):
|
||||
ipython = get_ipython()
|
||||
result = ipython.run_cell(cell)
|
||||
log = str(result.result)
|
||||
if result.error_before_exec is not None:
|
||||
log += f"\n{result.error_before_exec}"
|
||||
if result.error_in_exec is not None:
|
||||
log += f"\n{result.error_in_exec}"
|
||||
return log
|
||||
|
||||
|
||||
def exec_sh(script):
|
||||
return user_proxy.execute_code_blocks([("sh", script)])
|
||||
|
||||
|
||||
# register the functions
|
||||
user_proxy.register_function(
|
||||
function_map={
|
||||
"python": exec_python,
|
||||
"sh": exec_sh,
|
||||
}
|
||||
)
|
||||
|
||||
if __name__ == "__main__":
|
||||
# start the conversation
|
||||
user_proxy.initiate_chat(
|
||||
chatbot,
|
||||
message="Draw two agents chatting with each other with an example dialog. Don't add plt.show().",
|
||||
)
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -0,0 +1 @@
|
||||
self.__BUILD_MANIFEST=function(s,c,e,a,t,n,b,d,k,h,i,u){return{__rewrites:{beforeFiles:[],afterFiles:[],fallback:[]},"/":[b,s,c,e,n,d,"static/chunks/539-dcd22f1f6b99ebee.js","static/chunks/pages/index-2d0eeb81ae5a56b6.js"],"/_error":["static/chunks/pages/_error-dee72aff9b2e2c12.js"],"/agent":[s,c,a,k,t,"static/chunks/pages/agent-ff3852522fea07c6.js"],"/chat":["static/chunks/pages/chat-be738dbebc61501d.js"],"/chat/[scene]/[id]":["static/chunks/pages/chat/[scene]/[id]-868f396254bd78ec.js"],"/database":[s,c,e,a,t,n,h,"static/chunks/643-d8f53f40dd3c5b40.js","static/chunks/pages/database-51b362b90c1b0be9.js"],"/knowledge":[i,s,c,a,k,t,n,"static/chunks/63-d9f1013be8e4599a.js","static/chunks/pages/knowledge-7a8d4a6321e572be.js"],"/knowledge/chunk":[a,t,"static/chunks/pages/knowledge/chunk-e27c2e349b868b28.js"],"/models":[i,s,c,e,u,h,"static/chunks/pages/models-97ac98a0a4bed459.js"],"/prompt":[b,s,c,e,u,"static/chunks/837-e6d4d1eb9e057050.js",d,"static/chunks/607-b224c640f6907e4b.js","static/chunks/pages/prompt-b28755caf89f9c30.js"],sortedPages:["/","/_app","/_error","/agent","/chat","/chat/[scene]/[id]","/database","/knowledge","/knowledge/chunk","/models","/prompt"]}}("static/chunks/64-91b49d45b9846775.js","static/chunks/479-b20198841f9a6a1e.js","static/chunks/9-bb2c54d5c06ba4bf.js","static/chunks/442-197e6cbc1e54109a.js","static/chunks/813-cce9482e33f2430c.js","static/chunks/411-d9eba2657c72f766.js","static/chunks/29107295-90b90cb30c825230.js","static/chunks/719-5a18c3c696beda6f.js","static/chunks/365-2cad3676ccbb1b1a.js","static/chunks/928-74244889bd7f2699.js","static/chunks/75fc9c18-a784766a129ec5fb.js","static/chunks/947-5980a3ff49069ddd.js"),self.__BUILD_MANIFEST_CB&&self.__BUILD_MANIFEST_CB();
|
@ -0,0 +1 @@
|
||||
self.__SSG_MANIFEST=new Set([]);self.__SSG_MANIFEST_CB&&self.__SSG_MANIFEST_CB()
|
0
pilot/test/test.db
Normal file
0
pilot/test/test.db
Normal file
Loading…
Reference in New Issue
Block a user