all: test 3.13 ci (#27197)

Co-authored-by: Bagatur <baskaryan@gmail.com>
Co-authored-by: Bagatur <22008038+baskaryan@users.noreply.github.com>
This commit is contained in:
Erick Friis
2024-10-25 12:56:58 -07:00
committed by GitHub
parent 06df15c9c0
commit 600b7bdd61
237 changed files with 3668 additions and 4656 deletions

View File

@@ -46,11 +46,11 @@ def _create_retry_decorator(embeddings: LocalAIEmbeddings) -> Callable[[Any], An
stop=stop_after_attempt(embeddings.max_retries),
wait=wait_exponential(multiplier=1, min=min_seconds, max=max_seconds),
retry=(
retry_if_exception_type(openai.error.Timeout)
| retry_if_exception_type(openai.error.APIError)
| retry_if_exception_type(openai.error.APIConnectionError)
| retry_if_exception_type(openai.error.RateLimitError)
| retry_if_exception_type(openai.error.ServiceUnavailableError)
retry_if_exception_type(openai.error.Timeout) # type: ignore[attr-defined]
| retry_if_exception_type(openai.error.APIError) # type: ignore[attr-defined]
| retry_if_exception_type(openai.error.APIConnectionError) # type: ignore[attr-defined]
| retry_if_exception_type(openai.error.RateLimitError) # type: ignore[attr-defined]
| retry_if_exception_type(openai.error.ServiceUnavailableError) # type: ignore[attr-defined]
),
before_sleep=before_sleep_log(logger, logging.WARNING),
)
@@ -68,11 +68,11 @@ def _async_retry_decorator(embeddings: LocalAIEmbeddings) -> Any:
stop=stop_after_attempt(embeddings.max_retries),
wait=wait_exponential(multiplier=1, min=min_seconds, max=max_seconds),
retry=(
retry_if_exception_type(openai.error.Timeout)
| retry_if_exception_type(openai.error.APIError)
| retry_if_exception_type(openai.error.APIConnectionError)
| retry_if_exception_type(openai.error.RateLimitError)
| retry_if_exception_type(openai.error.ServiceUnavailableError)
retry_if_exception_type(openai.error.Timeout) # type: ignore[attr-defined]
| retry_if_exception_type(openai.error.APIError) # type: ignore[attr-defined]
| retry_if_exception_type(openai.error.APIConnectionError) # type: ignore[attr-defined]
| retry_if_exception_type(openai.error.RateLimitError) # type: ignore[attr-defined]
| retry_if_exception_type(openai.error.ServiceUnavailableError) # type: ignore[attr-defined]
),
before_sleep=before_sleep_log(logger, logging.WARNING),
)
@@ -93,7 +93,7 @@ def _check_response(response: dict) -> dict:
if any(len(d["embedding"]) == 1 for d in response["data"]):
import openai
raise openai.error.APIError("LocalAI API returned an empty embedding")
raise openai.error.APIError("LocalAI API returned an empty embedding") # type: ignore[attr-defined]
return response
@@ -230,7 +230,7 @@ class LocalAIEmbeddings(BaseModel, Embeddings):
try:
import openai
values["client"] = openai.Embedding
values["client"] = openai.Embedding # type: ignore[attr-defined]
except ImportError:
raise ImportError(
"Could not import openai python package. "
@@ -253,7 +253,7 @@ class LocalAIEmbeddings(BaseModel, Embeddings):
if self.openai_proxy:
import openai
openai.proxy = {
openai.proxy = { # type: ignore[attr-defined]
"http": self.openai_proxy,
"https": self.openai_proxy,
} # type: ignore[assignment]