mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-16 09:48:04 +00:00
add handling on error (#541)
This commit is contained in:
parent
1631981f84
commit
5aefc2b7ce
@ -272,9 +272,14 @@ class AgentExecutor(Chain, BaseModel):
|
||||
self.callback_manager.on_tool_start(
|
||||
{"name": str(chain)[:60] + "..."}, output, color="green"
|
||||
)
|
||||
# We then call the tool on the tool input to get an observation
|
||||
observation = chain(output.tool_input)
|
||||
color = color_mapping[output.tool]
|
||||
try:
|
||||
# We then call the tool on the tool input to get an observation
|
||||
observation = chain(output.tool_input)
|
||||
color = color_mapping[output.tool]
|
||||
except Exception as e:
|
||||
if self.verbose:
|
||||
self.callback_manager.on_tool_error(e)
|
||||
raise e
|
||||
else:
|
||||
if self.verbose:
|
||||
self.callback_manager.on_tool_start(
|
||||
|
@ -138,7 +138,12 @@ class Chain(BaseModel, ABC):
|
||||
self.callback_manager.on_chain_start(
|
||||
{"name": self.__class__.__name__}, inputs
|
||||
)
|
||||
outputs = self._call(inputs)
|
||||
try:
|
||||
outputs = self._call(inputs)
|
||||
except Exception as e:
|
||||
if self.verbose:
|
||||
self.callback_manager.on_chain_error(e)
|
||||
raise e
|
||||
if self.verbose:
|
||||
self.callback_manager.on_chain_end(outputs)
|
||||
self._validate_outputs(outputs)
|
||||
|
@ -73,7 +73,12 @@ class BaseLLM(BaseModel, ABC):
|
||||
self.callback_manager.on_llm_start(
|
||||
{"name": self.__class__.__name__}, prompts
|
||||
)
|
||||
output = self._generate(prompts, stop=stop)
|
||||
try:
|
||||
output = self._generate(prompts, stop=stop)
|
||||
except Exception as e:
|
||||
if self.verbose:
|
||||
self.callback_manager.on_llm_error(e)
|
||||
raise e
|
||||
if self.verbose:
|
||||
self.callback_manager.on_llm_end(output)
|
||||
return output
|
||||
@ -90,11 +95,18 @@ class BaseLLM(BaseModel, ABC):
|
||||
else:
|
||||
missing_prompts.append(prompt)
|
||||
missing_prompt_idxs.append(i)
|
||||
self.callback_manager.on_llm_start(
|
||||
{"name": self.__class__.__name__}, missing_prompts
|
||||
)
|
||||
new_results = self._generate(missing_prompts, stop=stop)
|
||||
self.callback_manager.on_llm_end(new_results)
|
||||
if self.verbose:
|
||||
self.callback_manager.on_llm_start(
|
||||
{"name": self.__class__.__name__}, missing_prompts
|
||||
)
|
||||
try:
|
||||
new_results = self._generate(missing_prompts, stop=stop)
|
||||
except Exception as e:
|
||||
if self.verbose:
|
||||
self.callback_manager.on_llm_error(e)
|
||||
raise e
|
||||
if self.verbose:
|
||||
self.callback_manager.on_llm_end(new_results)
|
||||
for i, result in enumerate(new_results.generations):
|
||||
existing_prompts[missing_prompt_idxs[i]] = result
|
||||
prompt = prompts[i]
|
||||
|
Loading…
Reference in New Issue
Block a user