mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-04 20:28:10 +00:00
changed ValueError to ImportError (#5103)
# changed ValueError to ImportError Code cleaning. Fixed inconsistencies in ImportError handling. Sometimes it raises ImportError and sometime ValueError. I've changed all cases to the `raise ImportError` Also: - added installation instruction in the error message, where it missed; - fixed several installation instructions in the error message; - fixed several error handling in regards to the ImportError
This commit is contained in:
parent
5e47c648ed
commit
c28cc0f1ac
@ -34,7 +34,7 @@ def create_pandas_dataframe_agent(
|
||||
try:
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"pandas package not found, please install with `pip install pandas`"
|
||||
)
|
||||
|
||||
|
@ -313,7 +313,7 @@ class GPTCache(BaseCache):
|
||||
try:
|
||||
import gptcache # noqa: F401
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import gptcache python package. "
|
||||
"Please install it with `pip install gptcache`."
|
||||
)
|
||||
|
@ -54,7 +54,7 @@ class OpenAIModerationChain(Chain):
|
||||
openai.organization = openai_organization
|
||||
values["client"] = openai.Moderation
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import openai python package. "
|
||||
"Please install it with `pip install openai`."
|
||||
)
|
||||
|
@ -86,7 +86,7 @@ class AzureChatOpenAI(ChatOpenAI):
|
||||
if openai_organization:
|
||||
openai.organization = openai_organization
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import openai python package. "
|
||||
"Please install it with `pip install openai`."
|
||||
)
|
||||
|
@ -41,7 +41,7 @@ class ApifyDatasetLoader(BaseLoader, BaseModel):
|
||||
|
||||
values["apify_client"] = ApifyClient()
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import apify-client Python package. "
|
||||
"Please install it with `pip install apify-client`."
|
||||
)
|
||||
|
@ -63,7 +63,7 @@ class DocugamiLoader(BaseLoader, BaseModel):
|
||||
try:
|
||||
from lxml import etree
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import lxml python package. "
|
||||
"Please install it with `pip install lxml`."
|
||||
)
|
||||
@ -259,7 +259,7 @@ class DocugamiLoader(BaseLoader, BaseModel):
|
||||
try:
|
||||
from lxml import etree
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import lxml python package. "
|
||||
"Please install it with `pip install lxml`."
|
||||
)
|
||||
|
@ -33,7 +33,7 @@ class DuckDBLoader(BaseLoader):
|
||||
try:
|
||||
import duckdb
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import duckdb python package. "
|
||||
"Please install it with `pip install duckdb`."
|
||||
)
|
||||
|
@ -39,7 +39,7 @@ class ImageCaptionLoader(BaseLoader):
|
||||
try:
|
||||
from transformers import BlipForConditionalGeneration, BlipProcessor
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"`transformers` package not found, please install with "
|
||||
"`pip install transformers`."
|
||||
)
|
||||
@ -66,7 +66,7 @@ class ImageCaptionLoader(BaseLoader):
|
||||
try:
|
||||
from PIL import Image
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"`PIL` package not found, please install with `pip install pillow`"
|
||||
)
|
||||
|
||||
|
@ -42,7 +42,7 @@ class JSONLoader(BaseLoader):
|
||||
try:
|
||||
import jq # noqa:F401
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"jq package not found, please install it with `pip install jq`"
|
||||
)
|
||||
|
||||
|
@ -83,7 +83,7 @@ class NotebookLoader(BaseLoader):
|
||||
try:
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"pandas is needed for Notebook Loader, "
|
||||
"please install with `pip install pandas`"
|
||||
)
|
||||
|
@ -77,7 +77,7 @@ class OneDriveLoader(BaseLoader, BaseModel):
|
||||
try:
|
||||
from O365 import FileSystemTokenBackend
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"O365 package not found, please install it with `pip install o365`"
|
||||
)
|
||||
if self.auth_with_token:
|
||||
|
@ -103,7 +103,7 @@ class PyPDFLoader(BasePDFLoader):
|
||||
try:
|
||||
import pypdf # noqa:F401
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"pypdf package not found, please install it with " "`pip install pypdf`"
|
||||
)
|
||||
self.parser = PyPDFParser()
|
||||
@ -194,7 +194,7 @@ class PDFMinerLoader(BasePDFLoader):
|
||||
try:
|
||||
from pdfminer.high_level import extract_text # noqa:F401
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"`pdfminer` package not found, please install it with "
|
||||
"`pip install pdfminer.six`"
|
||||
)
|
||||
@ -222,7 +222,7 @@ class PDFMinerPDFasHTMLLoader(BasePDFLoader):
|
||||
try:
|
||||
from pdfminer.high_level import extract_text_to_fp # noqa:F401
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"`pdfminer` package not found, please install it with "
|
||||
"`pip install pdfminer.six`"
|
||||
)
|
||||
@ -256,7 +256,7 @@ class PyMuPDFLoader(BasePDFLoader):
|
||||
try:
|
||||
import fitz # noqa:F401
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"`PyMuPDF` package not found, please install it with "
|
||||
"`pip install pymupdf`"
|
||||
)
|
||||
@ -375,7 +375,7 @@ class PDFPlumberLoader(BasePDFLoader):
|
||||
try:
|
||||
import pdfplumber # noqa:F401
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"pdfplumber package not found, please install it with "
|
||||
"`pip install pdfplumber`"
|
||||
)
|
||||
|
@ -19,9 +19,8 @@ class ReadTheDocsLoader(BaseLoader):
|
||||
"""Initialize path."""
|
||||
try:
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import python packages. "
|
||||
"Please install it with `pip install beautifulsoup4`. "
|
||||
)
|
||||
|
@ -19,7 +19,7 @@ class S3DirectoryLoader(BaseLoader):
|
||||
try:
|
||||
import boto3
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import boto3 python package. "
|
||||
"Please install it with `pip install boto3`."
|
||||
)
|
||||
|
@ -21,7 +21,7 @@ class S3FileLoader(BaseLoader):
|
||||
try:
|
||||
import boto3
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import `boto3` python package. "
|
||||
"Please install it with `pip install boto3`."
|
||||
)
|
||||
|
@ -58,7 +58,7 @@ class SitemapLoader(WebBaseLoader):
|
||||
try:
|
||||
import lxml # noqa:F401
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"lxml package not found, please install it with " "`pip install lxml`"
|
||||
)
|
||||
|
||||
@ -107,8 +107,9 @@ class SitemapLoader(WebBaseLoader):
|
||||
try:
|
||||
import bs4
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"bs4 package not found, please install it with " "`pip install bs4`"
|
||||
raise ImportError(
|
||||
"beautifulsoup4 package not found, please install it"
|
||||
" with `pip install beautifulsoup4`"
|
||||
)
|
||||
fp = open(self.web_path)
|
||||
soup = bs4.BeautifulSoup(fp, "xml")
|
||||
|
@ -13,8 +13,8 @@ class SRTLoader(BaseLoader):
|
||||
try:
|
||||
import pysrt # noqa:F401
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"package `pysrt` not found, please install it with `pysrt`"
|
||||
raise ImportError(
|
||||
"package `pysrt` not found, please install it with `pip install pysrt`"
|
||||
)
|
||||
self.file_path = file_path
|
||||
|
||||
|
@ -226,7 +226,7 @@ class TelegramChatApiLoader(BaseLoader):
|
||||
nest_asyncio.apply()
|
||||
asyncio.run(self.fetch_data_from_telegram())
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"""`nest_asyncio` package not found.
|
||||
please install with `pip install nest_asyncio`
|
||||
"""
|
||||
@ -239,7 +239,7 @@ class TelegramChatApiLoader(BaseLoader):
|
||||
try:
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"""`pandas` package not found.
|
||||
please install with `pip install pandas`
|
||||
"""
|
||||
|
@ -15,7 +15,7 @@ def _dependable_tweepy_import() -> tweepy:
|
||||
try:
|
||||
import tweepy
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"tweepy package not found, please install it with `pip install tweepy`"
|
||||
)
|
||||
return tweepy
|
||||
|
@ -30,7 +30,7 @@ class PlaywrightURLLoader(BaseLoader):
|
||||
try:
|
||||
import playwright # noqa:F401
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"playwright package not found, please install it with "
|
||||
"`pip install playwright`"
|
||||
)
|
||||
|
@ -40,7 +40,7 @@ class SeleniumURLLoader(BaseLoader):
|
||||
try:
|
||||
import selenium # noqa:F401
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"selenium package not found, please install it with "
|
||||
"`pip install selenium`"
|
||||
)
|
||||
@ -48,7 +48,7 @@ class SeleniumURLLoader(BaseLoader):
|
||||
try:
|
||||
import unstructured # noqa:F401
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"unstructured package not found, please install it with "
|
||||
"`pip install unstructured`"
|
||||
)
|
||||
|
@ -48,7 +48,7 @@ class CohereEmbeddings(BaseModel, Embeddings):
|
||||
|
||||
values["client"] = cohere.Client(cohere_api_key)
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import cohere python package. "
|
||||
"Please install it with `pip install cohere`."
|
||||
)
|
||||
|
@ -46,7 +46,7 @@ class HuggingFaceEmbeddings(BaseModel, Embeddings):
|
||||
import sentence_transformers
|
||||
|
||||
except ImportError as exc:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import sentence_transformers python package. "
|
||||
"Please install it with `pip install sentence_transformers`."
|
||||
) from exc
|
||||
|
@ -34,7 +34,7 @@ class JinaEmbeddings(BaseModel, Embeddings):
|
||||
try:
|
||||
import jina
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import `jina` python package. "
|
||||
"Please install it with `pip install jina`."
|
||||
)
|
||||
|
@ -178,7 +178,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
|
||||
openai.api_type = openai_api_type
|
||||
values["client"] = openai.Embedding
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import openai python package. "
|
||||
"Please install it with `pip install openai`."
|
||||
)
|
||||
@ -192,67 +192,64 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
|
||||
embeddings: List[List[float]] = [[] for _ in range(len(texts))]
|
||||
try:
|
||||
import tiktoken
|
||||
|
||||
tokens = []
|
||||
indices = []
|
||||
encoding = tiktoken.model.encoding_for_model(self.model)
|
||||
for i, text in enumerate(texts):
|
||||
if self.model.endswith("001"):
|
||||
# See: https://github.com/openai/openai-python/issues/418#issuecomment-1525939500
|
||||
# replace newlines, which can negatively affect performance.
|
||||
text = text.replace("\n", " ")
|
||||
token = encoding.encode(
|
||||
text,
|
||||
allowed_special=self.allowed_special,
|
||||
disallowed_special=self.disallowed_special,
|
||||
)
|
||||
for j in range(0, len(token), self.embedding_ctx_length):
|
||||
tokens += [token[j : j + self.embedding_ctx_length]]
|
||||
indices += [i]
|
||||
|
||||
batched_embeddings = []
|
||||
_chunk_size = chunk_size or self.chunk_size
|
||||
for i in range(0, len(tokens), _chunk_size):
|
||||
response = embed_with_retry(
|
||||
self,
|
||||
input=tokens[i : i + _chunk_size],
|
||||
engine=self.deployment,
|
||||
request_timeout=self.request_timeout,
|
||||
headers=self.headers,
|
||||
)
|
||||
batched_embeddings += [r["embedding"] for r in response["data"]]
|
||||
|
||||
results: List[List[List[float]]] = [[] for _ in range(len(texts))]
|
||||
num_tokens_in_batch: List[List[int]] = [[] for _ in range(len(texts))]
|
||||
for i in range(len(indices)):
|
||||
results[indices[i]].append(batched_embeddings[i])
|
||||
num_tokens_in_batch[indices[i]].append(len(tokens[i]))
|
||||
|
||||
for i in range(len(texts)):
|
||||
_result = results[i]
|
||||
if len(_result) == 0:
|
||||
average = embed_with_retry(
|
||||
self,
|
||||
input="",
|
||||
engine=self.deployment,
|
||||
request_timeout=self.request_timeout,
|
||||
headers=self.headers,
|
||||
)["data"][0]["embedding"]
|
||||
else:
|
||||
average = np.average(
|
||||
_result, axis=0, weights=num_tokens_in_batch[i]
|
||||
)
|
||||
embeddings[i] = (average / np.linalg.norm(average)).tolist()
|
||||
|
||||
return embeddings
|
||||
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import tiktoken python package. "
|
||||
"This is needed in order to for OpenAIEmbeddings. "
|
||||
"Please install it with `pip install tiktoken`."
|
||||
)
|
||||
|
||||
tokens = []
|
||||
indices = []
|
||||
encoding = tiktoken.model.encoding_for_model(self.model)
|
||||
for i, text in enumerate(texts):
|
||||
if self.model.endswith("001"):
|
||||
# See: https://github.com/openai/openai-python/issues/418#issuecomment-1525939500
|
||||
# replace newlines, which can negatively affect performance.
|
||||
text = text.replace("\n", " ")
|
||||
token = encoding.encode(
|
||||
text,
|
||||
allowed_special=self.allowed_special,
|
||||
disallowed_special=self.disallowed_special,
|
||||
)
|
||||
for j in range(0, len(token), self.embedding_ctx_length):
|
||||
tokens += [token[j : j + self.embedding_ctx_length]]
|
||||
indices += [i]
|
||||
|
||||
batched_embeddings = []
|
||||
_chunk_size = chunk_size or self.chunk_size
|
||||
for i in range(0, len(tokens), _chunk_size):
|
||||
response = embed_with_retry(
|
||||
self,
|
||||
input=tokens[i : i + _chunk_size],
|
||||
engine=self.deployment,
|
||||
request_timeout=self.request_timeout,
|
||||
headers=self.headers,
|
||||
)
|
||||
batched_embeddings += [r["embedding"] for r in response["data"]]
|
||||
|
||||
results: List[List[List[float]]] = [[] for _ in range(len(texts))]
|
||||
num_tokens_in_batch: List[List[int]] = [[] for _ in range(len(texts))]
|
||||
for i in range(len(indices)):
|
||||
results[indices[i]].append(batched_embeddings[i])
|
||||
num_tokens_in_batch[indices[i]].append(len(tokens[i]))
|
||||
|
||||
for i in range(len(texts)):
|
||||
_result = results[i]
|
||||
if len(_result) == 0:
|
||||
average = embed_with_retry(
|
||||
self,
|
||||
input="",
|
||||
engine=self.deployment,
|
||||
request_timeout=self.request_timeout,
|
||||
headers=self.headers,
|
||||
)["data"][0]["embedding"]
|
||||
else:
|
||||
average = np.average(_result, axis=0, weights=num_tokens_in_batch[i])
|
||||
embeddings[i] = (average / np.linalg.norm(average)).tolist()
|
||||
|
||||
return embeddings
|
||||
|
||||
def _embedding_func(self, text: str, *, engine: str) -> List[float]:
|
||||
"""Call out to OpenAI's embedding endpoint."""
|
||||
# handle large input text
|
||||
|
@ -30,13 +30,20 @@ class TensorflowHubEmbeddings(BaseModel, Embeddings):
|
||||
super().__init__(**kwargs)
|
||||
try:
|
||||
import tensorflow_hub
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Could not import tensorflow-hub python package. "
|
||||
"Please install it with `pip install tensorflow-hub``."
|
||||
)
|
||||
try:
|
||||
import tensorflow_text # noqa
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Could not import tensorflow_text python package. "
|
||||
"Please install it with `pip install tensorflow_text``."
|
||||
)
|
||||
|
||||
self.embed = tensorflow_hub.load(self.model_url)
|
||||
except ImportError as e:
|
||||
raise ValueError(
|
||||
"Could not import some python packages." "Please install them."
|
||||
) from e
|
||||
self.embed = tensorflow_hub.load(self.model_url)
|
||||
|
||||
class Config:
|
||||
"""Configuration for this pydantic object."""
|
||||
|
@ -54,7 +54,7 @@ class NetworkxEntityGraph:
|
||||
try:
|
||||
import networkx as nx
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import networkx python package. "
|
||||
"Please install it with `pip install networkx`."
|
||||
)
|
||||
@ -70,7 +70,7 @@ class NetworkxEntityGraph:
|
||||
try:
|
||||
import networkx as nx
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import networkx python package. "
|
||||
"Please install it with `pip install networkx`."
|
||||
)
|
||||
|
@ -148,7 +148,7 @@ class AlephAlpha(LLM):
|
||||
|
||||
values["client"] = aleph_alpha_client.Client(token=aleph_alpha_api_key)
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import aleph_alpha_client python package. "
|
||||
"Please install it with `pip install aleph_alpha_client`."
|
||||
)
|
||||
|
@ -59,7 +59,7 @@ class _AnthropicCommon(BaseModel):
|
||||
values["AI_PROMPT"] = anthropic.AI_PROMPT
|
||||
values["count_tokens"] = anthropic.count_tokens
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import anthropic python package. "
|
||||
"Please it install it with `pip install anthropic`."
|
||||
)
|
||||
|
@ -91,7 +91,7 @@ class Banana(LLM):
|
||||
try:
|
||||
import banana_dev as banana
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import banana-dev python package. "
|
||||
"Please install it with `pip install banana-dev`."
|
||||
)
|
||||
|
@ -72,7 +72,7 @@ class Cohere(LLM):
|
||||
|
||||
values["client"] = cohere.Client(cohere_api_key)
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import cohere python package. "
|
||||
"Please install it with `pip install cohere`."
|
||||
)
|
||||
|
@ -29,7 +29,10 @@ def _create_retry_decorator() -> Callable[[Any], Any]:
|
||||
try:
|
||||
import google.api_core.exceptions
|
||||
except ImportError:
|
||||
raise ImportError()
|
||||
raise ImportError(
|
||||
"Could not import google-api-core python package. "
|
||||
"Please install it with `pip install google-api-core`."
|
||||
)
|
||||
|
||||
multiplier = 2
|
||||
min_seconds = 1
|
||||
@ -105,7 +108,10 @@ class GooglePalm(BaseLLM, BaseModel):
|
||||
|
||||
genai.configure(api_key=google_api_key)
|
||||
except ImportError:
|
||||
raise ImportError("Could not import google.generativeai python package.")
|
||||
raise ImportError(
|
||||
"Could not import google-generativeai python package. "
|
||||
"Please install it with `pip install google-generativeai`."
|
||||
)
|
||||
|
||||
values["client"] = genai
|
||||
|
||||
|
@ -100,7 +100,7 @@ class GooseAI(LLM):
|
||||
openai.api_base = "https://api.goose.ai/v1"
|
||||
values["client"] = openai.Completion
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import openai python package. "
|
||||
"Please install it with `pip install openai`."
|
||||
)
|
||||
|
@ -97,7 +97,7 @@ class HuggingFaceTextGenInference(LLM):
|
||||
values["inference_server_url"], timeout=values["timeout"]
|
||||
)
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import text_generation python package. "
|
||||
"Please install it with `pip install text_generation`."
|
||||
)
|
||||
|
@ -75,7 +75,7 @@ class NLPCloud(LLM):
|
||||
values["model_name"], nlpcloud_api_key, gpu=True, lang="en"
|
||||
)
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import nlpcloud python package. "
|
||||
"Please install it with `pip install nlpcloud`."
|
||||
)
|
||||
|
@ -234,7 +234,7 @@ class BaseOpenAI(BaseLLM):
|
||||
openai.organization = openai_organization
|
||||
values["client"] = openai.Completion
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import openai python package. "
|
||||
"Please install it with `pip install openai`."
|
||||
)
|
||||
@ -462,7 +462,7 @@ class BaseOpenAI(BaseLLM):
|
||||
try:
|
||||
import tiktoken
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import tiktoken python package. "
|
||||
"This is needed in order to calculate get_num_tokens. "
|
||||
"Please install it with `pip install tiktoken`."
|
||||
@ -677,7 +677,7 @@ class OpenAIChat(BaseLLM):
|
||||
if openai_organization:
|
||||
openai.organization = openai_organization
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import openai python package. "
|
||||
"Please install it with `pip install openai`."
|
||||
)
|
||||
@ -807,7 +807,7 @@ class OpenAIChat(BaseLLM):
|
||||
try:
|
||||
import tiktoken
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import tiktoken python package. "
|
||||
"This is needed in order to calculate get_num_tokens. "
|
||||
"Please install it with `pip install tiktoken`."
|
||||
|
@ -50,7 +50,7 @@ class PredictionGuard(LLM):
|
||||
|
||||
values["client"] = pg.Client(token=token)
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import predictionguard python package. "
|
||||
"Please install it with `pip install predictionguard`."
|
||||
)
|
||||
|
@ -89,7 +89,7 @@ class Replicate(LLM):
|
||||
try:
|
||||
import replicate as replicate_python
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import replicate python package. "
|
||||
"Please install it with `pip install replicate`."
|
||||
)
|
||||
|
@ -103,7 +103,7 @@ class RWKV(LLM, BaseModel):
|
||||
try:
|
||||
import tokenizers
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import tokenizers python package. "
|
||||
"Please install it with `pip install tokenizers`."
|
||||
)
|
||||
|
@ -182,7 +182,7 @@ class SagemakerEndpoint(LLM):
|
||||
) from e
|
||||
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import boto3 python package. "
|
||||
"Please install it with `pip install boto3`."
|
||||
)
|
||||
|
@ -155,7 +155,7 @@ class SelfHostedPipeline(LLM):
|
||||
import runhouse as rh
|
||||
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import runhouse python package. "
|
||||
"Please install it with `pip install runhouse`."
|
||||
)
|
||||
|
@ -52,13 +52,11 @@ class FirestoreChatMessageHistory(BaseChatMessageHistory):
|
||||
try:
|
||||
import firebase_admin
|
||||
from firebase_admin import firestore
|
||||
except ImportError as e:
|
||||
logger.error(
|
||||
"Failed to import Firebase and Firestore: %s. "
|
||||
"Make sure to install the 'firebase-admin' module.",
|
||||
e,
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Could not import firebase-admin python package. "
|
||||
"Please install it with `pip install firebase-admin`."
|
||||
)
|
||||
raise e
|
||||
|
||||
# For multiple instances, only initialize the app once.
|
||||
try:
|
||||
|
@ -25,7 +25,7 @@ class RedisChatMessageHistory(BaseChatMessageHistory):
|
||||
try:
|
||||
import redis
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import redis python package. "
|
||||
"Please install it with `pip install redis`."
|
||||
)
|
||||
|
@ -91,7 +91,7 @@ class RedisEntityStore(BaseEntityStore):
|
||||
try:
|
||||
import redis
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import redis python package. "
|
||||
"Please install it with `pip install redis`."
|
||||
)
|
||||
|
@ -41,7 +41,7 @@ class CohereRerank(BaseDocumentCompressor):
|
||||
|
||||
values["client"] = cohere.Client(cohere_api_key)
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import cohere python package. "
|
||||
"Please install it with `pip install cohere`."
|
||||
)
|
||||
|
@ -23,7 +23,7 @@ class WeaviateHybridSearchRetriever(BaseRetriever):
|
||||
try:
|
||||
import weaviate
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import weaviate python package. "
|
||||
"Please install it with `pip install weaviate-client`."
|
||||
)
|
||||
|
@ -154,7 +154,7 @@ class TextSplitter(BaseDocumentTransformer, ABC):
|
||||
try:
|
||||
import tiktoken
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import tiktoken python package. "
|
||||
"This is needed in order to calculate max_tokens_for_prompt. "
|
||||
"Please install it with `pip install tiktoken`."
|
||||
@ -232,7 +232,7 @@ class TokenTextSplitter(TextSplitter):
|
||||
try:
|
||||
import tiktoken
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import tiktoken python package. "
|
||||
"This is needed in order to for TokenTextSplitter. "
|
||||
"Please install it with `pip install tiktoken`."
|
||||
|
@ -35,7 +35,7 @@ class LambdaWrapper(BaseModel):
|
||||
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"boto3 is not installed." "Please install it with `pip install boto3`"
|
||||
"boto3 is not installed. Please install it with `pip install boto3`"
|
||||
)
|
||||
|
||||
values["lambda_client"] = boto3.client("lambda")
|
||||
|
@ -51,8 +51,8 @@ class GooglePlacesAPIWrapper(BaseModel):
|
||||
|
||||
values["google_map_client"] = googlemaps.Client(gplaces_api_key)
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import googlemaps python packge. "
|
||||
raise ImportError(
|
||||
"Could not import googlemaps python package. "
|
||||
"Please install it with `pip install googlemaps`."
|
||||
)
|
||||
return values
|
||||
|
@ -156,7 +156,7 @@ class JiraAPIWrapper(BaseModel):
|
||||
import json
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"json is not installed. " "Please install it with `pip install json`"
|
||||
"json is not installed. Please install it with `pip install json`"
|
||||
)
|
||||
params = json.loads(query)
|
||||
return self.jira.issue_create(fields=dict(params))
|
||||
|
@ -38,7 +38,7 @@ class OpenWeatherMapAPIWrapper(BaseModel):
|
||||
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"pyowm is not installed. " "Please install it with `pip install pyowm`"
|
||||
"pyowm is not installed. Please install it with `pip install pyowm`"
|
||||
)
|
||||
|
||||
owm = pyowm.OWM(openweathermap_api_key)
|
||||
|
Loading…
Reference in New Issue
Block a user