mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-09-10 05:19:44 +00:00
feat:deepsearch v1
This commit is contained in:
@@ -55,8 +55,8 @@ class ReflectionModel(BaseModel):
|
|||||||
default_factory=list,
|
default_factory=list,
|
||||||
description="List of tools to be used in the action.",
|
description="List of tools to be used in the action.",
|
||||||
)
|
)
|
||||||
thought: str = Field(
|
thought: Optional[str] = Field(
|
||||||
...,
|
None,
|
||||||
description="The thought of the current action, describing what you want to achieve.",
|
description="The thought of the current action, describing what you want to achieve.",
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -102,7 +102,12 @@ class DeepSearchAction(ToolAction):
|
|||||||
) -> ActionOutput:
|
) -> ActionOutput:
|
||||||
"""Perform the action."""
|
"""Perform the action."""
|
||||||
try:
|
try:
|
||||||
# state = "split_query"
|
if self.state == "summarize":
|
||||||
|
return ActionOutput(
|
||||||
|
is_exe_success=True,
|
||||||
|
content=ai_message,
|
||||||
|
view=ai_message,
|
||||||
|
)
|
||||||
action_param: ReflectionModel = self._input_convert(
|
action_param: ReflectionModel = self._input_convert(
|
||||||
ai_message, ReflectionModel
|
ai_message, ReflectionModel
|
||||||
)
|
)
|
||||||
@@ -115,6 +120,7 @@ class DeepSearchAction(ToolAction):
|
|||||||
|
|
||||||
sub_queries = action_param.sub_queries
|
sub_queries = action_param.sub_queries
|
||||||
if action_param.status == "summarize":
|
if action_param.status == "summarize":
|
||||||
|
self.state = "summarize"
|
||||||
return ActionOutput(
|
return ActionOutput(
|
||||||
is_exe_success=True,
|
is_exe_success=True,
|
||||||
content=action_param.thought,
|
content=action_param.thought,
|
||||||
|
@@ -13,7 +13,7 @@ from dbgpt.agent import (
|
|||||||
ProfileConfig,
|
ProfileConfig,
|
||||||
Resource,
|
Resource,
|
||||||
ResourceType,
|
ResourceType,
|
||||||
StructuredAgentMemoryFragment,
|
StructuredAgentMemoryFragment, BlankAction,
|
||||||
)
|
)
|
||||||
from dbgpt.agent.core.role import AgentRunMode
|
from dbgpt.agent.core.role import AgentRunMode
|
||||||
from dbgpt.agent.resource import BaseTool, ResourcePack, ToolPack
|
from dbgpt.agent.resource import BaseTool, ResourcePack, ToolPack
|
||||||
@@ -214,7 +214,7 @@ class DeepSearchAgent(ConversableAgent):
|
|||||||
"""Init indicator AssistantAgent."""
|
"""Init indicator AssistantAgent."""
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
|
|
||||||
self._init_actions([DeepSearchAction])
|
self._init_actions([DeepSearchAction, BlankAction])
|
||||||
|
|
||||||
async def preload_resource(self) -> None:
|
async def preload_resource(self) -> None:
|
||||||
await super().preload_resource()
|
await super().preload_resource()
|
||||||
@@ -294,41 +294,41 @@ class DeepSearchAgent(ConversableAgent):
|
|||||||
|
|
||||||
return json.dumps(abilities, ensure_ascii=False), []
|
return json.dumps(abilities, ensure_ascii=False), []
|
||||||
|
|
||||||
async def build_system_prompt(
|
# async def build_system_prompt(
|
||||||
self,
|
# self,
|
||||||
question: Optional[str] = None,
|
# question: Optional[str] = None,
|
||||||
most_recent_memories: Optional[str] = None,
|
# most_recent_memories: Optional[str] = None,
|
||||||
resource_vars: Optional[Dict] = None,
|
# resource_vars: Optional[Dict] = None,
|
||||||
context: Optional[Dict[str, Any]] = None,
|
# context: Optional[Dict[str, Any]] = None,
|
||||||
is_retry_chat: bool = False,
|
# is_retry_chat: bool = False,
|
||||||
):
|
# ):
|
||||||
"""Build system prompt."""
|
# """Build system prompt."""
|
||||||
system_prompt = None
|
# system_prompt = None
|
||||||
if self.bind_prompt:
|
# if self.bind_prompt:
|
||||||
prompt_param = {}
|
# prompt_param = {}
|
||||||
if resource_vars:
|
# if resource_vars:
|
||||||
prompt_param.update(resource_vars)
|
# prompt_param.update(resource_vars)
|
||||||
if context:
|
# if context:
|
||||||
prompt_param.update(context)
|
# prompt_param.update(context)
|
||||||
if self.bind_prompt.template_format == "f-string":
|
# if self.bind_prompt.template_format == "f-string":
|
||||||
system_prompt = self.bind_prompt.template.format(
|
# system_prompt = self.bind_prompt.template.format(
|
||||||
**prompt_param,
|
# **prompt_param,
|
||||||
)
|
# )
|
||||||
elif self.bind_prompt.template_format == "jinja2":
|
# elif self.bind_prompt.template_format == "jinja2":
|
||||||
system_prompt = Template(self.bind_prompt.template).render(prompt_param)
|
# system_prompt = Template(self.bind_prompt.template).render(prompt_param)
|
||||||
else:
|
# else:
|
||||||
logger.warning("Bind prompt template not exsit or format not support!")
|
# logger.warning("Bind prompt template not exsit or format not support!")
|
||||||
if not system_prompt:
|
# if not system_prompt:
|
||||||
param: Dict = context if context else {}
|
# param: Dict = context if context else {}
|
||||||
system_prompt = await self.build_prompt(
|
# system_prompt = await self.build_prompt(
|
||||||
question=question,
|
# question=question,
|
||||||
is_system=True,
|
# is_system=True,
|
||||||
most_recent_memories=most_recent_memories,
|
# most_recent_memories=most_recent_memories,
|
||||||
resource_vars=resource_vars,
|
# resource_vars=resource_vars,
|
||||||
is_retry_chat=is_retry_chat,
|
# is_retry_chat=is_retry_chat,
|
||||||
**param,
|
# **param,
|
||||||
)
|
# )
|
||||||
return system_prompt
|
# return system_prompt
|
||||||
|
|
||||||
def prepare_act_param(
|
def prepare_act_param(
|
||||||
self,
|
self,
|
||||||
@@ -376,6 +376,12 @@ class DeepSearchAgent(ConversableAgent):
|
|||||||
# )
|
# )
|
||||||
# if real_action is None:
|
# if real_action is None:
|
||||||
# continue
|
# continue
|
||||||
|
final_summarize = False
|
||||||
|
if self.profile.system_prompt_template == _DEEPSEARCH_FINIAL_SUMMARY_TEMPLATE:
|
||||||
|
if isinstance(action, DeepSearchAction):
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
final_summarize = True
|
||||||
|
|
||||||
last_out = await action.run(
|
last_out = await action.run(
|
||||||
ai_message=message.content if message.content else "",
|
ai_message=message.content if message.content else "",
|
||||||
@@ -383,6 +389,7 @@ class DeepSearchAgent(ConversableAgent):
|
|||||||
rely_action_out=last_out,
|
rely_action_out=last_out,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
last_out.terminate = final_summarize
|
||||||
if not last_out.terminate:
|
if not last_out.terminate:
|
||||||
self.profile.system_prompt_template = _DEEPSEARCH_FINIAL_SUMMARY_TEMPLATE
|
self.profile.system_prompt_template = _DEEPSEARCH_FINIAL_SUMMARY_TEMPLATE
|
||||||
span.metadata["action_out"] = last_out.to_dict() if last_out else None
|
span.metadata["action_out"] = last_out.to_dict() if last_out else None
|
||||||
@@ -459,5 +466,6 @@ class DeepSearchAgent(ConversableAgent):
|
|||||||
# )
|
# )
|
||||||
# )
|
# )
|
||||||
return "\n".join([
|
return "\n".join([
|
||||||
mem_dict.get("observation") for mem_dict in structured_memories
|
mem_dict.get("observation") for mem_dict in structured_memories if mem_dict.get("observation")
|
||||||
])
|
])
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user