mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-30 10:23:30 +00:00
fix apparent spelling inconsistencies (#8574)
Use ImportErrors where appropriate
This commit is contained in:
parent
0ec020698f
commit
465faab935
@ -15,7 +15,7 @@ def create_csv_agent(
|
||||
try:
|
||||
import pandas as pd
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"pandas package not found, please install with `pip install pandas`"
|
||||
)
|
||||
|
||||
|
@ -121,7 +121,7 @@ def _get_prompt_and_tools(
|
||||
|
||||
pd.set_option("display.max_columns", None)
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"pandas package not found, please install with `pip install pandas`"
|
||||
)
|
||||
|
||||
@ -232,7 +232,7 @@ def _get_functions_prompt_and_tools(
|
||||
|
||||
pd.set_option("display.max_columns", None)
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"pandas package not found, please install with `pip install pandas`"
|
||||
)
|
||||
if input_variables is not None:
|
||||
|
@ -46,7 +46,7 @@ def create_spark_dataframe_agent(
|
||||
"""Construct a Spark agent from an LLM and dataframe."""
|
||||
|
||||
if not _validate_spark_df(df) and not _validate_spark_connect_df(df):
|
||||
raise ValueError("Spark is not installed. run `pip install pyspark`.")
|
||||
raise ImportError("Spark is not installed. run `pip install pyspark`.")
|
||||
|
||||
if input_variables is None:
|
||||
input_variables = ["df", "input", "agent_scratchpad"]
|
||||
|
@ -21,7 +21,7 @@ class YoutubeAudioLoader(BlobLoader):
|
||||
try:
|
||||
import yt_dlp
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"yt_dlp package not found, please install it with "
|
||||
"`pip install yt_dlp`"
|
||||
)
|
||||
|
@ -121,7 +121,11 @@ class DirectoryLoader(BaseLoader):
|
||||
if self.silent_errors:
|
||||
logger.warning(e)
|
||||
else:
|
||||
raise e
|
||||
raise ImportError(
|
||||
"To log the progress of DirectoryLoader "
|
||||
"you need to install tqdm, "
|
||||
"`pip install tqdm`"
|
||||
)
|
||||
|
||||
if self.use_multithreading:
|
||||
with concurrent.futures.ThreadPoolExecutor(
|
||||
|
@ -74,12 +74,11 @@ class EverNoteLoader(BaseLoader):
|
||||
|
||||
return html2text.html2text(content).strip()
|
||||
except ImportError as e:
|
||||
logger.error(
|
||||
raise ImportError(
|
||||
"Could not import `html2text`. Although it is not a required package "
|
||||
"to use Langchain, using the EverNote loader requires `html2text`. "
|
||||
"Please install `html2text` via `pip install html2text` and try again."
|
||||
)
|
||||
raise e
|
||||
) from e
|
||||
|
||||
@staticmethod
|
||||
def _parse_resource(resource: list) -> dict:
|
||||
|
@ -20,7 +20,7 @@ class GeoDataFrameLoader(BaseLoader):
|
||||
try:
|
||||
import geopandas as gpd
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"geopandas package not found, please install it with "
|
||||
"`pip install geopandas`"
|
||||
)
|
||||
|
@ -21,14 +21,14 @@ class OpenAIWhisperParser(BaseBlobParser):
|
||||
try:
|
||||
import openai
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"openai package not found, please install it with "
|
||||
"`pip install openai`"
|
||||
)
|
||||
try:
|
||||
from pydub import AudioSegment
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"pydub package not found, please install it with " "`pip install pydub`"
|
||||
)
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
import logging
|
||||
from typing import Dict, Iterator, List, Union
|
||||
|
||||
import requests
|
||||
@ -6,6 +7,8 @@ from langchain.docstore.document import Document
|
||||
from langchain.document_loaders.base import BaseBlobParser
|
||||
from langchain.document_loaders.blob_loaders import Blob
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ServerUnavailableException(Exception):
|
||||
"""Exception raised when the GROBID server is unavailable."""
|
||||
@ -26,7 +29,7 @@ class GrobidParser(BaseBlobParser):
|
||||
try:
|
||||
requests.get(grobid_server)
|
||||
except requests.exceptions.RequestException:
|
||||
print(
|
||||
logger.error(
|
||||
"GROBID server does not appear up and running, \
|
||||
please ensure Grobid is installed and the server is running"
|
||||
)
|
||||
@ -136,6 +139,7 @@ class GrobidParser(BaseBlobParser):
|
||||
)
|
||||
xml_data = r.text
|
||||
except requests.exceptions.ReadTimeout:
|
||||
logger.error("GROBID server timed out. Return None.")
|
||||
xml_data = None
|
||||
|
||||
if xml_data is None:
|
||||
|
@ -24,7 +24,7 @@ class BS4HTMLParser(BaseBlobParser):
|
||||
try:
|
||||
import bs4 # noqa:F401
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"beautifulsoup4 package not found, please install it with "
|
||||
"`pip install beautifulsoup4`"
|
||||
)
|
||||
|
@ -13,8 +13,8 @@ class CodeSegmenter(ABC):
|
||||
|
||||
@abstractmethod
|
||||
def simplify_code(self) -> str:
|
||||
raise NotImplementedError # pragma: no cover
|
||||
raise NotImplementedError() # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
def extract_functions_classes(self) -> List[str]:
|
||||
raise NotImplementedError # pragma: no cover
|
||||
raise NotImplementedError() # pragma: no cover
|
||||
|
@ -87,7 +87,7 @@ class PyPDFium2Parser(BaseBlobParser):
|
||||
try:
|
||||
import pypdfium2 # noqa:F401
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"pypdfium2 package not found, please install it with"
|
||||
" `pip install pypdfium2`"
|
||||
)
|
||||
|
@ -14,7 +14,7 @@ def _dependable_praw_import() -> praw:
|
||||
try:
|
||||
import praw
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"praw package not found, please install it with `pip install praw`"
|
||||
)
|
||||
return praw
|
||||
|
@ -27,7 +27,7 @@ class TencentCOSDirectoryLoader(BaseLoader):
|
||||
try:
|
||||
from qcloud_cos import CosS3Client
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import cos-python-sdk-v5 python package. "
|
||||
"Please install it with `pip install cos-python-sdk-v5`."
|
||||
)
|
||||
|
@ -29,7 +29,7 @@ class TencentCOSFileLoader(BaseLoader):
|
||||
try:
|
||||
from qcloud_cos import CosS3Client
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import cos-python-sdk-v5 python package. "
|
||||
"Please install it with `pip install cos-python-sdk-v5`."
|
||||
)
|
||||
|
@ -49,7 +49,7 @@ class UnstructuredURLLoader(BaseLoader):
|
||||
|
||||
self.__version = __unstructured_version__
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"unstructured package not found, please install it with "
|
||||
"`pip install unstructured`"
|
||||
)
|
||||
|
@ -38,7 +38,7 @@ class PlaywrightURLLoader(BaseLoader):
|
||||
try:
|
||||
import unstructured # noqa:F401
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"unstructured package not found, please install it with "
|
||||
"`pip install unstructured`"
|
||||
)
|
||||
|
@ -76,7 +76,7 @@ class WebBaseLoader(BaseLoader):
|
||||
try:
|
||||
import bs4 # noqa:F401
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"bs4 package not found, please install it with " "`pip install bs4`"
|
||||
)
|
||||
|
||||
|
@ -83,7 +83,7 @@ def _filter_cluster_embeddings(
|
||||
try:
|
||||
from sklearn.cluster import KMeans
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"sklearn package not found, please install it with "
|
||||
"`pip install scikit-learn`"
|
||||
)
|
||||
|
@ -20,7 +20,7 @@ class Html2TextTransformer(BaseDocumentTransformer):
|
||||
try:
|
||||
import html2text
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"""html2text package not found, please
|
||||
install it with `pip install html2text`"""
|
||||
)
|
||||
|
@ -41,7 +41,7 @@ class CassandraChatMessageHistory(BaseChatMessageHistory):
|
||||
try:
|
||||
from cassio.history import StoredBlobHistory
|
||||
except (ImportError, ModuleNotFoundError):
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import cassio python package. "
|
||||
"Please install it with `pip install cassio`."
|
||||
)
|
||||
|
@ -61,7 +61,7 @@ class ZepChatMessageHistory(BaseChatMessageHistory):
|
||||
try:
|
||||
from zep_python import ZepClient
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"Could not import zep-python package. "
|
||||
"Please install it with `pip install zep-python`."
|
||||
)
|
||||
|
@ -59,7 +59,7 @@ class LLMChainFilter(BaseDocumentCompressor):
|
||||
callbacks: Optional[Callbacks] = None,
|
||||
) -> Sequence[Document]:
|
||||
"""Filter down documents."""
|
||||
raise NotImplementedError
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def from_llm(
|
||||
|
@ -90,4 +90,4 @@ class CohereRerank(BaseDocumentCompressor):
|
||||
query: str,
|
||||
callbacks: Optional[Callbacks] = None,
|
||||
) -> Sequence[Document]:
|
||||
raise NotImplementedError
|
||||
raise NotImplementedError()
|
||||
|
@ -76,4 +76,4 @@ class EmbeddingsFilter(BaseDocumentCompressor):
|
||||
callbacks: Optional[Callbacks] = None,
|
||||
) -> Sequence[Document]:
|
||||
"""Filter down documents."""
|
||||
raise NotImplementedError
|
||||
raise NotImplementedError()
|
||||
|
@ -42,7 +42,7 @@ def import_installed_app_flow() -> InstalledAppFlow:
|
||||
try:
|
||||
from google_auth_oauthlib.flow import InstalledAppFlow
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"You need to install google-auth-oauthlib to use this toolkit. "
|
||||
"Try running pip install --upgrade google-auth-oauthlib"
|
||||
)
|
||||
@ -58,7 +58,7 @@ def import_googleapiclient_resource_builder() -> build_resource:
|
||||
try:
|
||||
from googleapiclient.discovery import build
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"You need to install googleapiclient to use this toolkit. "
|
||||
"Try running pip install --upgrade google-api-python-client"
|
||||
)
|
||||
|
@ -30,7 +30,7 @@ def lazy_import_playwright_browsers() -> Tuple[Type[AsyncBrowser], Type[SyncBrow
|
||||
from playwright.async_api import Browser as AsyncBrowser # noqa: F401
|
||||
from playwright.sync_api import Browser as SyncBrowser # noqa: F401
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"The 'playwright' package is required to use the playwright tools."
|
||||
" Please install it with 'pip install playwright'."
|
||||
)
|
||||
|
@ -38,7 +38,7 @@ class ExtractHyperlinksTool(BaseBrowserTool):
|
||||
try:
|
||||
from bs4 import BeautifulSoup # noqa: F401
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"The 'beautifulsoup4' package is required to use this tool."
|
||||
" Please install it with 'pip install beautifulsoup4'."
|
||||
)
|
||||
|
@ -14,7 +14,7 @@ def make_image_public(client: Steamship, block: Block) -> str:
|
||||
from steamship.data.workspace import SignedUrl
|
||||
from steamship.utils.signed_urls import upload_to_signed_url
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
"The make_image_public function requires the steamship"
|
||||
" package to be installed. Please install steamship"
|
||||
" with `pip install --upgrade steamship`"
|
||||
|
@ -20,7 +20,7 @@ def _check_docarray_import() -> None:
|
||||
|
||||
da_version = docarray.__version__.split(".")
|
||||
if int(da_version[0]) == 0 and int(da_version[1]) <= 31:
|
||||
raise ValueError(
|
||||
raise ImportError(
|
||||
f"To use the DocArrayHnswSearch VectorStore the docarray "
|
||||
f"version >=0.32.0 is expected, received: {docarray.__version__}."
|
||||
f"To upgrade, please run: `pip install -U docarray`."
|
||||
@ -135,7 +135,7 @@ class DocArrayIndex(VectorStore, ABC):
|
||||
|
||||
0 is dissimilar, 1 is most similar.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
raise NotImplementedError()
|
||||
|
||||
def similarity_search_by_vector(
|
||||
self, embedding: List[float], k: int = 4, **kwargs: Any
|
||||
|
Loading…
Reference in New Issue
Block a user