From 5a920e14c06735441a9ea28c1313f8bd433dc721 Mon Sep 17 00:00:00 2001 From: Krish Dholakia Date: Mon, 13 Nov 2023 17:55:10 -0800 Subject: [PATCH] fix litellm openai imports (#13307) --- libs/langchain/langchain/chat_models/litellm.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/libs/langchain/langchain/chat_models/litellm.py b/libs/langchain/langchain/chat_models/litellm.py index e06f836b5ea..ac1c1c9c7c8 100644 --- a/libs/langchain/langchain/chat_models/litellm.py +++ b/libs/langchain/langchain/chat_models/litellm.py @@ -62,14 +62,13 @@ def _create_retry_decorator( ] = None, ) -> Callable[[Any], Any]: """Returns a tenacity retry decorator, preconfigured to handle PaLM exceptions""" - import openai + import litellm errors = [ - openai.error.Timeout, - openai.error.APIError, - openai.error.APIConnectionError, - openai.error.RateLimitError, - openai.error.ServiceUnavailableError, + litellm.Timeout, + litellm.APIError, + litellm.APIConnectionError, + litellm.RateLimitError, ] return create_base_retry_decorator( error_types=errors, max_retries=llm.max_retries, run_manager=run_manager