mirror of
https://github.com/hwchase17/langchain.git
synced 2025-08-24 12:00:52 +00:00
Minor: Remove duplicated word in error message (#2706)
Removed the duplicated word "it" from the error message. From: `Please it install it with xxx` To: `Please install it with xxx`.
This commit is contained in:
parent
1e60e6e15b
commit
8073bc849f
@ -57,7 +57,7 @@ class LLMRequestsChain(Chain):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import bs4 python package. "
|
||||
"Please it install it with `pip install bs4`."
|
||||
"Please install it with `pip install bs4`."
|
||||
)
|
||||
return values
|
||||
|
||||
|
@ -55,7 +55,7 @@ class OpenAIModerationChain(Chain):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import openai python package. "
|
||||
"Please it install it with `pip install openai`."
|
||||
"Please install it with `pip install openai`."
|
||||
)
|
||||
return values
|
||||
|
||||
|
@ -53,7 +53,7 @@ class Crawler:
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import playwright python package. "
|
||||
"Please it install it with `pip install playwright`."
|
||||
"Please install it with `pip install playwright`."
|
||||
)
|
||||
self.browser: Browser = (
|
||||
sync_playwright().start().chromium.launch(headless=False)
|
||||
|
@ -87,7 +87,7 @@ class AzureChatOpenAI(ChatOpenAI):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import openai python package. "
|
||||
"Please it install it with `pip install openai`."
|
||||
"Please install it with `pip install openai`."
|
||||
)
|
||||
try:
|
||||
values["client"] = openai.ChatCompletion
|
||||
|
@ -167,7 +167,7 @@ class ChatOpenAI(BaseChatModel):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import openai python package. "
|
||||
"Please it install it with `pip install openai`."
|
||||
"Please install it with `pip install openai`."
|
||||
)
|
||||
try:
|
||||
values["client"] = openai.ChatCompletion
|
||||
@ -336,7 +336,7 @@ class ChatOpenAI(BaseChatModel):
|
||||
raise ValueError(
|
||||
"Could not import tiktoken python package. "
|
||||
"This is needed in order to calculate get_num_tokens. "
|
||||
"Please it install it with `pip install tiktoken`."
|
||||
"Please install it with `pip install tiktoken`."
|
||||
)
|
||||
# create a GPT-3.5-Turbo encoder instance
|
||||
enc = tiktoken.encoding_for_model(self.model_name)
|
||||
@ -358,7 +358,7 @@ class ChatOpenAI(BaseChatModel):
|
||||
raise ValueError(
|
||||
"Could not import tiktoken python package. "
|
||||
"This is needed in order to calculate get_num_tokens. "
|
||||
"Please it install it with `pip install tiktoken`."
|
||||
"Please install it with `pip install tiktoken`."
|
||||
)
|
||||
|
||||
model = self.model_name
|
||||
|
@ -24,7 +24,7 @@ class AzureBlobStorageContainerLoader(BaseLoader):
|
||||
except ImportError as exc:
|
||||
raise ValueError(
|
||||
"Could not import azure storage blob python package. "
|
||||
"Please it install it with `pip install azure-storage-blob`."
|
||||
"Please install it with `pip install azure-storage-blob`."
|
||||
) from exc
|
||||
|
||||
container = ContainerClient.from_connection_string(
|
||||
|
@ -24,7 +24,7 @@ class AzureBlobStorageFileLoader(BaseLoader):
|
||||
except ImportError as exc:
|
||||
raise ValueError(
|
||||
"Could not import azure storage blob python package. "
|
||||
"Please it install it with `pip install azure-storage-blob`."
|
||||
"Please install it with `pip install azure-storage-blob`."
|
||||
) from exc
|
||||
|
||||
client = BlobClient.from_connection_string(
|
||||
|
@ -35,7 +35,7 @@ class DuckDBLoader(BaseLoader):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import duckdb python package. "
|
||||
"Please it install it with `pip install duckdb`."
|
||||
"Please install it with `pip install duckdb`."
|
||||
)
|
||||
|
||||
docs = []
|
||||
|
@ -22,7 +22,7 @@ class GCSDirectoryLoader(BaseLoader):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import google-cloud-storage python package. "
|
||||
"Please it install it with `pip install google-cloud-storage`."
|
||||
"Please install it with `pip install google-cloud-storage`."
|
||||
)
|
||||
client = storage.Client(project=self.project_name)
|
||||
docs = []
|
||||
|
@ -23,7 +23,7 @@ class GCSFileLoader(BaseLoader):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import google-cloud-storage python package. "
|
||||
"Please it install it with `pip install google-cloud-storage`."
|
||||
"Please install it with `pip install google-cloud-storage`."
|
||||
)
|
||||
|
||||
# Initialise a client
|
||||
|
@ -21,7 +21,7 @@ class S3DirectoryLoader(BaseLoader):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import boto3 python package. "
|
||||
"Please it install it with `pip install boto3`."
|
||||
"Please install it with `pip install boto3`."
|
||||
)
|
||||
s3 = boto3.resource("s3")
|
||||
bucket = s3.Bucket(self.bucket)
|
||||
|
@ -23,7 +23,7 @@ class S3FileLoader(BaseLoader):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import boto3 python package. "
|
||||
"Please it install it with `pip install boto3`."
|
||||
"Please install it with `pip install boto3`."
|
||||
)
|
||||
s3 = boto3.client("s3")
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
|
@ -118,7 +118,7 @@ class YoutubeLoader(BaseLoader):
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Could not import youtube_transcript_api python package. "
|
||||
"Please it install it with `pip install youtube-transcript-api`."
|
||||
"Please install it with `pip install youtube-transcript-api`."
|
||||
)
|
||||
|
||||
metadata = {"source": self.video_id}
|
||||
@ -159,7 +159,7 @@ class YoutubeLoader(BaseLoader):
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Could not import pytube python package. "
|
||||
"Please it install it with `pip install pytube`."
|
||||
"Please install it with `pip install pytube`."
|
||||
)
|
||||
yt = YouTube(f"https://www.youtube.com/watch?v={self.video_id}")
|
||||
video_info = {
|
||||
|
@ -58,7 +58,7 @@ class AlephAlphaAsymmetricSemanticEmbedding(BaseModel, Embeddings):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import aleph_alpha_client python package. "
|
||||
"Please it install it with `pip install aleph_alpha_client`."
|
||||
"Please install it with `pip install aleph_alpha_client`."
|
||||
)
|
||||
values["client"] = Client(token=aleph_alpha_api_key)
|
||||
return values
|
||||
@ -81,7 +81,7 @@ class AlephAlphaAsymmetricSemanticEmbedding(BaseModel, Embeddings):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import aleph_alpha_client python package. "
|
||||
"Please it install it with `pip install aleph_alpha_client`."
|
||||
"Please install it with `pip install aleph_alpha_client`."
|
||||
)
|
||||
document_embeddings = []
|
||||
|
||||
@ -121,7 +121,7 @@ class AlephAlphaAsymmetricSemanticEmbedding(BaseModel, Embeddings):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import aleph_alpha_client python package. "
|
||||
"Please it install it with `pip install aleph_alpha_client`."
|
||||
"Please install it with `pip install aleph_alpha_client`."
|
||||
)
|
||||
symmetric_params = {
|
||||
"prompt": Prompt.from_text(text),
|
||||
@ -166,7 +166,7 @@ class AlephAlphaSymmetricSemanticEmbedding(AlephAlphaAsymmetricSemanticEmbedding
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import aleph_alpha_client python package. "
|
||||
"Please it install it with `pip install aleph_alpha_client`."
|
||||
"Please install it with `pip install aleph_alpha_client`."
|
||||
)
|
||||
query_params = {
|
||||
"prompt": Prompt.from_text(text),
|
||||
|
@ -48,7 +48,7 @@ class CohereEmbeddings(BaseModel, Embeddings):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import cohere python package. "
|
||||
"Please it install it with `pip install cohere`."
|
||||
"Please install it with `pip install cohere`."
|
||||
)
|
||||
return values
|
||||
|
||||
|
@ -73,7 +73,7 @@ class HuggingFaceHubEmbeddings(BaseModel, Embeddings):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import huggingface_hub python package. "
|
||||
"Please it install it with `pip install huggingface_hub`."
|
||||
"Please install it with `pip install huggingface_hub`."
|
||||
)
|
||||
return values
|
||||
|
||||
|
@ -36,7 +36,7 @@ class JinaEmbeddings(BaseModel, Embeddings):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import `jina` python package. "
|
||||
"Please it install it with `pip install jina`."
|
||||
"Please install it with `pip install jina`."
|
||||
)
|
||||
|
||||
# Setup client
|
||||
|
@ -188,7 +188,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import openai python package. "
|
||||
"Please it install it with `pip install openai`."
|
||||
"Please install it with `pip install openai`."
|
||||
)
|
||||
return values
|
||||
|
||||
@ -242,7 +242,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
|
||||
raise ValueError(
|
||||
"Could not import tiktoken python package. "
|
||||
"This is needed in order to for OpenAIEmbeddings. "
|
||||
"Please it install it with `pip install tiktoken`."
|
||||
"Please install it with `pip install tiktoken`."
|
||||
)
|
||||
|
||||
def _embedding_func(self, text: str, *, engine: str) -> List[float]:
|
||||
|
@ -131,7 +131,7 @@ class SagemakerEndpointEmbeddings(BaseModel, Embeddings):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import boto3 python package. "
|
||||
"Please it install it with `pip install boto3`."
|
||||
"Please install it with `pip install boto3`."
|
||||
)
|
||||
return values
|
||||
|
||||
|
@ -56,7 +56,7 @@ class NetworkxEntityGraph:
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import networkx python package. "
|
||||
"Please it install it with `pip install networkx`."
|
||||
"Please install it with `pip install networkx`."
|
||||
)
|
||||
if graph is not None:
|
||||
if not isinstance(graph, nx.DiGraph):
|
||||
@ -72,7 +72,7 @@ class NetworkxEntityGraph:
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import networkx python package. "
|
||||
"Please it install it with `pip install networkx`."
|
||||
"Please install it with `pip install networkx`."
|
||||
)
|
||||
graph = nx.read_gml(gml_path)
|
||||
return cls(graph)
|
||||
|
@ -149,7 +149,7 @@ class AlephAlpha(LLM):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import aleph_alpha_client python package. "
|
||||
"Please it install it with `pip install aleph_alpha_client`."
|
||||
"Please install it with `pip install aleph_alpha_client`."
|
||||
)
|
||||
return values
|
||||
|
||||
|
@ -76,7 +76,7 @@ class Anthropic(LLM):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import anthropic python package. "
|
||||
"Please it install it with `pip install anthropic`."
|
||||
"Please install it with `pip install anthropic`."
|
||||
)
|
||||
return values
|
||||
|
||||
|
@ -73,7 +73,7 @@ class Cohere(LLM):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import cohere python package. "
|
||||
"Please it install it with `pip install cohere`."
|
||||
"Please install it with `pip install cohere`."
|
||||
)
|
||||
return values
|
||||
|
||||
|
@ -70,7 +70,7 @@ class HuggingFaceEndpoint(LLM):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import huggingface_hub python package. "
|
||||
"Please it install it with `pip install huggingface_hub`."
|
||||
"Please install it with `pip install huggingface_hub`."
|
||||
)
|
||||
return values
|
||||
|
||||
|
@ -66,7 +66,7 @@ class HuggingFaceHub(LLM):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import huggingface_hub python package. "
|
||||
"Please it install it with `pip install huggingface_hub`."
|
||||
"Please install it with `pip install huggingface_hub`."
|
||||
)
|
||||
return values
|
||||
|
||||
|
@ -28,7 +28,7 @@ class ManifestWrapper(LLM):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import manifest python package. "
|
||||
"Please it install it with `pip install manifest-ml`."
|
||||
"Please install it with `pip install manifest-ml`."
|
||||
)
|
||||
return values
|
||||
|
||||
|
@ -76,7 +76,7 @@ class NLPCloud(LLM):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import nlpcloud python package. "
|
||||
"Please it install it with `pip install nlpcloud`."
|
||||
"Please install it with `pip install nlpcloud`."
|
||||
)
|
||||
return values
|
||||
|
||||
|
@ -446,7 +446,7 @@ class BaseOpenAI(BaseLLM):
|
||||
raise ValueError(
|
||||
"Could not import tiktoken python package. "
|
||||
"This is needed in order to calculate get_num_tokens. "
|
||||
"Please it install it with `pip install tiktoken`."
|
||||
"Please install it with `pip install tiktoken`."
|
||||
)
|
||||
encoder = "gpt2"
|
||||
if self.model_name in ("text-davinci-003", "text-davinci-002"):
|
||||
@ -611,7 +611,7 @@ class OpenAIChat(BaseLLM):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import openai python package. "
|
||||
"Please it install it with `pip install openai`."
|
||||
"Please install it with `pip install openai`."
|
||||
)
|
||||
try:
|
||||
values["client"] = openai.ChatCompletion
|
||||
@ -742,7 +742,7 @@ class OpenAIChat(BaseLLM):
|
||||
raise ValueError(
|
||||
"Could not import tiktoken python package. "
|
||||
"This is needed in order to calculate get_num_tokens. "
|
||||
"Please it install it with `pip install tiktoken`."
|
||||
"Please install it with `pip install tiktoken`."
|
||||
)
|
||||
# create a GPT-3.5-Turbo encoder instance
|
||||
enc = tiktoken.encoding_for_model("gpt-3.5-turbo")
|
||||
|
@ -176,7 +176,7 @@ class SagemakerEndpoint(LLM):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import boto3 python package. "
|
||||
"Please it install it with `pip install boto3`."
|
||||
"Please install it with `pip install boto3`."
|
||||
)
|
||||
return values
|
||||
|
||||
|
@ -194,7 +194,7 @@ class BaseLanguageModel(BaseModel, ABC):
|
||||
raise ValueError(
|
||||
"Could not import transformers python package. "
|
||||
"This is needed in order to calculate get_num_tokens. "
|
||||
"Please it install it with `pip install transformers`."
|
||||
"Please install it with `pip install transformers`."
|
||||
)
|
||||
# create a GPT-3 tokenizer instance
|
||||
tokenizer = GPT2TokenizerFast.from_pretrained("gpt2")
|
||||
|
@ -131,7 +131,7 @@ class TextSplitter(ABC):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import transformers python package. "
|
||||
"Please it install it with `pip install transformers`."
|
||||
"Please install it with `pip install transformers`."
|
||||
)
|
||||
return cls(length_function=_huggingface_tokenizer_length, **kwargs)
|
||||
|
||||
@ -150,7 +150,7 @@ class TextSplitter(ABC):
|
||||
raise ValueError(
|
||||
"Could not import tiktoken python package. "
|
||||
"This is needed in order to calculate max_tokens_for_prompt. "
|
||||
"Please it install it with `pip install tiktoken`."
|
||||
"Please install it with `pip install tiktoken`."
|
||||
)
|
||||
|
||||
# create a GPT-3 encoder instance
|
||||
@ -205,7 +205,7 @@ class TokenTextSplitter(TextSplitter):
|
||||
raise ValueError(
|
||||
"Could not import tiktoken python package. "
|
||||
"This is needed in order to for TokenTextSplitter. "
|
||||
"Please it install it with `pip install tiktoken`."
|
||||
"Please install it with `pip install tiktoken`."
|
||||
)
|
||||
# create a GPT-3 encoder instance
|
||||
self._tokenizer = tiktoken.get_encoding(encoding_name)
|
||||
|
@ -31,7 +31,7 @@ class WikipediaAPIWrapper(BaseModel):
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import wikipedia python package. "
|
||||
"Please it install it with `pip install wikipedia`."
|
||||
"Please install it with `pip install wikipedia`."
|
||||
)
|
||||
return values
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user