langchain[minor]: add warnings when importing integrations (#15505)

Should be imported from community directly
This commit is contained in:
Bagatur 2024-01-04 17:41:45 -05:00 committed by GitHub
parent 14966581df
commit f5e4f0b30b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
40 changed files with 556 additions and 2564 deletions

View File

@ -14,16 +14,11 @@ except metadata.PackageNotFoundError:
del metadata # optional, avoids polluting the results of dir(__package__) del metadata # optional, avoids polluting the results of dir(__package__)
def _is_interactive_env() -> bool:
"""Determine if running within IPython or Jupyter."""
import sys
return hasattr(sys, "ps2")
def _warn_on_import(name: str, replacement: Optional[str] = None) -> None: def _warn_on_import(name: str, replacement: Optional[str] = None) -> None:
"""Warn on import of deprecated module.""" """Warn on import of deprecated module."""
if _is_interactive_env(): from langchain.utils.interactive_env import is_interactive_env
if is_interactive_env():
# No warnings for interactive environments. # No warnings for interactive environments.
# This is done to avoid polluting the output of interactive environments # This is done to avoid polluting the output of interactive environments
# where users rely on auto-complete and may trigger this warning # where users rely on auto-complete and may trigger this warning
@ -131,104 +126,107 @@ def __getattr__(name: str) -> Any:
elif name == "Anthropic": elif name == "Anthropic":
from langchain_community.llms import Anthropic from langchain_community.llms import Anthropic
_warn_on_import(name, replacement="langchain.llms.Anthropic") _warn_on_import(name, replacement="langchain_community.llms.Anthropic")
return Anthropic return Anthropic
elif name == "Banana": elif name == "Banana":
from langchain_community.llms import Banana from langchain_community.llms import Banana
_warn_on_import(name, replacement="langchain.llms.Banana") _warn_on_import(name, replacement="langchain_community.llms.Banana")
return Banana return Banana
elif name == "CerebriumAI": elif name == "CerebriumAI":
from langchain_community.llms import CerebriumAI from langchain_community.llms import CerebriumAI
_warn_on_import(name, replacement="langchain.llms.CerebriumAI") _warn_on_import(name, replacement="langchain_community.llms.CerebriumAI")
return CerebriumAI return CerebriumAI
elif name == "Cohere": elif name == "Cohere":
from langchain_community.llms import Cohere from langchain_community.llms import Cohere
_warn_on_import(name, replacement="langchain.llms.Cohere") _warn_on_import(name, replacement="langchain_community.llms.Cohere")
return Cohere return Cohere
elif name == "ForefrontAI": elif name == "ForefrontAI":
from langchain_community.llms import ForefrontAI from langchain_community.llms import ForefrontAI
_warn_on_import(name, replacement="langchain.llms.ForefrontAI") _warn_on_import(name, replacement="langchain_community.llms.ForefrontAI")
return ForefrontAI return ForefrontAI
elif name == "GooseAI": elif name == "GooseAI":
from langchain_community.llms import GooseAI from langchain_community.llms import GooseAI
_warn_on_import(name, replacement="langchain.llms.GooseAI") _warn_on_import(name, replacement="langchain_community.llms.GooseAI")
return GooseAI return GooseAI
elif name == "HuggingFaceHub": elif name == "HuggingFaceHub":
from langchain_community.llms import HuggingFaceHub from langchain_community.llms import HuggingFaceHub
_warn_on_import(name, replacement="langchain.llms.HuggingFaceHub") _warn_on_import(name, replacement="langchain_community.llms.HuggingFaceHub")
return HuggingFaceHub return HuggingFaceHub
elif name == "HuggingFaceTextGenInference": elif name == "HuggingFaceTextGenInference":
from langchain_community.llms import HuggingFaceTextGenInference from langchain_community.llms import HuggingFaceTextGenInference
_warn_on_import(name, replacement="langchain.llms.HuggingFaceTextGenInference") _warn_on_import(
name, replacement="langchain_community.llms.HuggingFaceTextGenInference"
)
return HuggingFaceTextGenInference return HuggingFaceTextGenInference
elif name == "LlamaCpp": elif name == "LlamaCpp":
from langchain_community.llms import LlamaCpp from langchain_community.llms import LlamaCpp
_warn_on_import(name, replacement="langchain.llms.LlamaCpp") _warn_on_import(name, replacement="langchain_community.llms.LlamaCpp")
return LlamaCpp return LlamaCpp
elif name == "Modal": elif name == "Modal":
from langchain_community.llms import Modal from langchain_community.llms import Modal
_warn_on_import(name, replacement="langchain.llms.Modal") _warn_on_import(name, replacement="langchain_community.llms.Modal")
return Modal return Modal
elif name == "OpenAI": elif name == "OpenAI":
from langchain_community.llms import OpenAI from langchain_community.llms import OpenAI
_warn_on_import(name, replacement="langchain.llms.OpenAI") _warn_on_import(name, replacement="langchain_community.llms.OpenAI")
return OpenAI return OpenAI
elif name == "Petals": elif name == "Petals":
from langchain_community.llms import Petals from langchain_community.llms import Petals
_warn_on_import(name, replacement="langchain.llms.Petals") _warn_on_import(name, replacement="langchain_community.llms.Petals")
return Petals return Petals
elif name == "PipelineAI": elif name == "PipelineAI":
from langchain_community.llms import PipelineAI from langchain_community.llms import PipelineAI
_warn_on_import(name, replacement="langchain.llms.PipelineAI") _warn_on_import(name, replacement="langchain_community.llms.PipelineAI")
return PipelineAI return PipelineAI
elif name == "SagemakerEndpoint": elif name == "SagemakerEndpoint":
from langchain_community.llms import SagemakerEndpoint from langchain_community.llms import SagemakerEndpoint
_warn_on_import(name, replacement="langchain.llms.SagemakerEndpoint") _warn_on_import(name, replacement="langchain_community.llms.SagemakerEndpoint")
return SagemakerEndpoint return SagemakerEndpoint
elif name == "StochasticAI": elif name == "StochasticAI":
from langchain_community.llms import StochasticAI from langchain_community.llms import StochasticAI
_warn_on_import(name, replacement="langchain.llms.StochasticAI") _warn_on_import(name, replacement="langchain_community.llms.StochasticAI")
return StochasticAI return StochasticAI
elif name == "Writer": elif name == "Writer":
from langchain_community.llms import Writer from langchain_community.llms import Writer
_warn_on_import(name, replacement="langchain.llms.Writer") _warn_on_import(name, replacement="langchain_community.llms.Writer")
return Writer return Writer
elif name == "HuggingFacePipeline": elif name == "HuggingFacePipeline":
from langchain_community.llms.huggingface_pipeline import HuggingFacePipeline from langchain_community.llms.huggingface_pipeline import HuggingFacePipeline
_warn_on_import( _warn_on_import(
name, replacement="langchain.llms.huggingface_pipeline.HuggingFacePipeline" name,
replacement="langchain_community.llms.huggingface_pipeline.HuggingFacePipeline",
) )
return HuggingFacePipeline return HuggingFacePipeline

View File

@ -13,40 +13,11 @@ whether permissions of the given toolkit are appropriate for the application.
See [Security](https://python.langchain.com/docs/security) for more information. See [Security](https://python.langchain.com/docs/security) for more information.
""" """
import warnings
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any
from langchain_community.agent_toolkits.ainetwork.toolkit import AINetworkToolkit from langchain_core._api import LangChainDeprecationWarning
from langchain_community.agent_toolkits.amadeus.toolkit import AmadeusToolkit
from langchain_community.agent_toolkits.azure_cognitive_services import (
AzureCognitiveServicesToolkit,
)
from langchain_community.agent_toolkits.file_management.toolkit import (
FileManagementToolkit,
)
from langchain_community.agent_toolkits.gmail.toolkit import GmailToolkit
from langchain_community.agent_toolkits.jira.toolkit import JiraToolkit
from langchain_community.agent_toolkits.json.base import create_json_agent
from langchain_community.agent_toolkits.json.toolkit import JsonToolkit
from langchain_community.agent_toolkits.multion.toolkit import MultionToolkit
from langchain_community.agent_toolkits.nasa.toolkit import NasaToolkit
from langchain_community.agent_toolkits.nla.toolkit import NLAToolkit
from langchain_community.agent_toolkits.office365.toolkit import O365Toolkit
from langchain_community.agent_toolkits.openapi.base import create_openapi_agent
from langchain_community.agent_toolkits.openapi.toolkit import OpenAPIToolkit
from langchain_community.agent_toolkits.playwright.toolkit import (
PlayWrightBrowserToolkit,
)
from langchain_community.agent_toolkits.powerbi.base import create_pbi_agent
from langchain_community.agent_toolkits.powerbi.chat_base import create_pbi_chat_agent
from langchain_community.agent_toolkits.powerbi.toolkit import PowerBIToolkit
from langchain_community.agent_toolkits.slack.toolkit import SlackToolkit
from langchain_community.agent_toolkits.spark_sql.base import create_spark_sql_agent
from langchain_community.agent_toolkits.spark_sql.toolkit import SparkSQLToolkit
from langchain_community.agent_toolkits.sql.base import create_sql_agent
from langchain_community.agent_toolkits.sql.toolkit import SQLDatabaseToolkit
from langchain_community.agent_toolkits.steam.toolkit import SteamToolkit
from langchain_community.agent_toolkits.zapier.toolkit import ZapierToolkit
from langchain_core._api.path import as_import_path from langchain_core._api.path import as_import_path
from langchain.agents.agent_toolkits.conversational_retrieval.openai_functions import ( from langchain.agents.agent_toolkits.conversational_retrieval.openai_functions import (
@ -62,6 +33,7 @@ from langchain.agents.agent_toolkits.vectorstore.toolkit import (
VectorStoreToolkit, VectorStoreToolkit,
) )
from langchain.tools.retriever import create_retriever_tool from langchain.tools.retriever import create_retriever_tool
from langchain.utils.interactive_env import is_interactive_env
DEPRECATED_AGENTS = [ DEPRECATED_AGENTS = [
"create_csv_agent", "create_csv_agent",
@ -84,7 +56,21 @@ def __getattr__(name: str) -> Any:
"for more information.\n" "for more information.\n"
f"Please update your import statement from: `{old_path}` to `{new_path}`." f"Please update your import statement from: `{old_path}` to `{new_path}`."
) )
raise AttributeError(f"{name} does not exist")
from langchain_community import agent_toolkits
# If not in interactive env, raise warning.
if not is_interactive_env():
warnings.warn(
"Importing this agent toolkit from langchain is deprecated. Importing it "
"from langchain will no longer be supported as of langchain==0.2.0. "
"Please import from langchain-community instead:\n\n"
f"`from langchain_community.agent_toolkits import {name}`.\n\n"
"To install langchain-community run `pip install -U langchain-community`.",
category=LangChainDeprecationWarning,
)
return getattr(agent_toolkits, name)
__all__ = [ __all__ = [

View File

@ -6,38 +6,10 @@
BaseCallbackHandler --> <name>CallbackHandler # Example: AimCallbackHandler BaseCallbackHandler --> <name>CallbackHandler # Example: AimCallbackHandler
""" """
import warnings
from typing import Any
from langchain_community.callbacks.aim_callback import AimCallbackHandler from langchain_core._api import LangChainDeprecationWarning
from langchain_community.callbacks.argilla_callback import ArgillaCallbackHandler
from langchain_community.callbacks.arize_callback import ArizeCallbackHandler
from langchain_community.callbacks.arthur_callback import ArthurCallbackHandler
from langchain_community.callbacks.clearml_callback import ClearMLCallbackHandler
from langchain_community.callbacks.comet_ml_callback import CometCallbackHandler
from langchain_community.callbacks.context_callback import ContextCallbackHandler
from langchain_community.callbacks.flyte_callback import FlyteCallbackHandler
from langchain_community.callbacks.human import HumanApprovalCallbackHandler
from langchain_community.callbacks.infino_callback import InfinoCallbackHandler
from langchain_community.callbacks.labelstudio_callback import (
LabelStudioCallbackHandler,
)
from langchain_community.callbacks.llmonitor_callback import LLMonitorCallbackHandler
from langchain_community.callbacks.manager import (
get_openai_callback,
wandb_tracing_enabled,
)
from langchain_community.callbacks.mlflow_callback import MlflowCallbackHandler
from langchain_community.callbacks.openai_info import OpenAICallbackHandler
from langchain_community.callbacks.promptlayer_callback import (
PromptLayerCallbackHandler,
)
from langchain_community.callbacks.sagemaker_callback import SageMakerCallbackHandler
from langchain_community.callbacks.streamlit import (
LLMThoughtLabeler,
StreamlitCallbackHandler,
)
from langchain_community.callbacks.trubrics_callback import TrubricsCallbackHandler
from langchain_community.callbacks.wandb_callback import WandbCallbackHandler
from langchain_community.callbacks.whylabs_callback import WhyLabsCallbackHandler
from langchain_core.callbacks import ( from langchain_core.callbacks import (
StdOutCallbackHandler, StdOutCallbackHandler,
StreamingStdOutCallbackHandler, StreamingStdOutCallbackHandler,
@ -54,6 +26,25 @@ from langchain.callbacks.streaming_aiter import AsyncIteratorCallbackHandler
from langchain.callbacks.streaming_stdout_final_only import ( from langchain.callbacks.streaming_stdout_final_only import (
FinalStreamingStdOutCallbackHandler, FinalStreamingStdOutCallbackHandler,
) )
from langchain.utils.interactive_env import is_interactive_env
def __getattr__(name: str) -> Any:
from langchain_community import callbacks
# If not in interactive env, raise warning.
if not is_interactive_env():
warnings.warn(
"Importing this callback from langchain is deprecated. Importing it from "
"langchain will no longer be supported as of langchain==0.2.0. "
"Please import from langchain-community instead:\n\n"
f"`from langchain_community.callbacks import {name}`.\n\n"
"To install langchain-community run `pip install -U langchain-community`.",
category=LangChainDeprecationWarning,
)
return getattr(callbacks, name)
__all__ = [ __all__ = [
"AimCallbackHandler", "AimCallbackHandler",

View File

@ -16,37 +16,29 @@ an interface where "chat messages" are the inputs and outputs.
AIMessage, BaseMessage, HumanMessage AIMessage, BaseMessage, HumanMessage
""" # noqa: E501 """ # noqa: E501
import warnings
from langchain_core._api import LangChainDeprecationWarning
from langchain.utils.interactive_env import is_interactive_env
def __getattr__(name: str) -> None:
from langchain_community import chat_models
# If not in interactive env, raise warning.
if not is_interactive_env():
warnings.warn(
"Importing chat models from langchain is deprecated. Importing from "
"langchain will no longer be supported as of langchain==0.2.0. "
"Please import from langchain-community instead:\n\n"
f"`from langchain_community.chat_models import {name}`.\n\n"
"To install langchain-community run `pip install -U langchain-community`.",
category=LangChainDeprecationWarning,
)
return getattr(chat_models, name)
from langchain_community.chat_models.anthropic import ChatAnthropic
from langchain_community.chat_models.anyscale import ChatAnyscale
from langchain_community.chat_models.azure_openai import AzureChatOpenAI
from langchain_community.chat_models.baichuan import ChatBaichuan
from langchain_community.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint
from langchain_community.chat_models.bedrock import BedrockChat
from langchain_community.chat_models.cohere import ChatCohere
from langchain_community.chat_models.databricks import ChatDatabricks
from langchain_community.chat_models.ernie import ErnieBotChat
from langchain_community.chat_models.everlyai import ChatEverlyAI
from langchain_community.chat_models.fake import FakeListChatModel
from langchain_community.chat_models.fireworks import ChatFireworks
from langchain_community.chat_models.gigachat import GigaChat
from langchain_community.chat_models.google_palm import ChatGooglePalm
from langchain_community.chat_models.human import HumanInputChatModel
from langchain_community.chat_models.hunyuan import ChatHunyuan
from langchain_community.chat_models.javelin_ai_gateway import ChatJavelinAIGateway
from langchain_community.chat_models.jinachat import JinaChat
from langchain_community.chat_models.konko import ChatKonko
from langchain_community.chat_models.litellm import ChatLiteLLM
from langchain_community.chat_models.minimax import MiniMaxChat
from langchain_community.chat_models.mlflow import ChatMlflow
from langchain_community.chat_models.mlflow_ai_gateway import ChatMLflowAIGateway
from langchain_community.chat_models.ollama import ChatOllama
from langchain_community.chat_models.openai import ChatOpenAI
from langchain_community.chat_models.pai_eas_endpoint import PaiEasChatEndpoint
from langchain_community.chat_models.promptlayer_openai import PromptLayerChatOpenAI
from langchain_community.chat_models.vertexai import ChatVertexAI
from langchain_community.chat_models.volcengine_maas import VolcEngineMaasChat
from langchain_community.chat_models.yandex import ChatYandexGPT
__all__ = [ __all__ = [
"ChatOpenAI", "ChatOpenAI",

View File

@ -14,8 +14,29 @@ The **Docstore** is a simplified version of the Document Loader.
Document, AddableMixin Document, AddableMixin
""" """
from langchain.docstore.arbitrary_fn import DocstoreFn import warnings
from langchain.docstore.in_memory import InMemoryDocstore from typing import Any
from langchain.docstore.wikipedia import Wikipedia
from langchain_core._api import LangChainDeprecationWarning
from langchain.utils.interactive_env import is_interactive_env
def __getattr__(name: str) -> Any:
from langchain_community import docstore
# If not in interactive env, raise warning.
if not is_interactive_env():
warnings.warn(
"Importing docstores from langchain is deprecated. Importing from "
"langchain will no longer be supported as of langchain==0.2.0. "
"Please import from langchain-community instead:\n\n"
f"`from langchain_community.docstore import {name}`.\n\n"
"To install langchain-community run `pip install -U langchain-community`.",
category=LangChainDeprecationWarning,
)
return getattr(docstore, name)
__all__ = ["DocstoreFn", "InMemoryDocstore", "Wikipedia"] __all__ = ["DocstoreFn", "InMemoryDocstore", "Wikipedia"]

View File

@ -14,215 +14,42 @@
Document, <name>TextSplitter Document, <name>TextSplitter
""" """
import warnings
from typing import Any
from langchain_community.document_loaders.acreom import AcreomLoader from langchain_core._api import LangChainDeprecationWarning
from langchain_community.document_loaders.airbyte import (
AirbyteCDKLoader,
AirbyteGongLoader,
AirbyteHubspotLoader,
AirbyteSalesforceLoader,
AirbyteShopifyLoader,
AirbyteStripeLoader,
AirbyteTypeformLoader,
AirbyteZendeskSupportLoader,
)
from langchain_community.document_loaders.airbyte_json import AirbyteJSONLoader
from langchain_community.document_loaders.airtable import AirtableLoader
from langchain_community.document_loaders.apify_dataset import ApifyDatasetLoader
from langchain_community.document_loaders.arcgis_loader import ArcGISLoader
from langchain_community.document_loaders.arxiv import ArxivLoader
from langchain_community.document_loaders.assemblyai import (
AssemblyAIAudioTranscriptLoader,
)
from langchain_community.document_loaders.async_html import AsyncHtmlLoader
from langchain_community.document_loaders.azlyrics import AZLyricsLoader
from langchain_community.document_loaders.azure_ai_data import (
AzureAIDataLoader,
)
from langchain_community.document_loaders.azure_blob_storage_container import (
AzureBlobStorageContainerLoader,
)
from langchain_community.document_loaders.azure_blob_storage_file import (
AzureBlobStorageFileLoader,
)
from langchain_community.document_loaders.bibtex import BibtexLoader
from langchain_community.document_loaders.bigquery import BigQueryLoader
from langchain_community.document_loaders.bilibili import BiliBiliLoader
from langchain_community.document_loaders.blackboard import BlackboardLoader
from langchain_community.document_loaders.blob_loaders import (
Blob,
BlobLoader,
FileSystemBlobLoader,
YoutubeAudioLoader,
)
from langchain_community.document_loaders.blockchain import BlockchainDocumentLoader
from langchain_community.document_loaders.brave_search import BraveSearchLoader
from langchain_community.document_loaders.browserless import BrowserlessLoader
from langchain_community.document_loaders.chatgpt import ChatGPTLoader
from langchain_community.document_loaders.chromium import AsyncChromiumLoader
from langchain_community.document_loaders.college_confidential import (
CollegeConfidentialLoader,
)
from langchain_community.document_loaders.concurrent import ConcurrentLoader
from langchain_community.document_loaders.confluence import ConfluenceLoader
from langchain_community.document_loaders.conllu import CoNLLULoader
from langchain_community.document_loaders.couchbase import CouchbaseLoader
from langchain_community.document_loaders.csv_loader import (
CSVLoader,
UnstructuredCSVLoader,
)
from langchain_community.document_loaders.cube_semantic import CubeSemanticLoader
from langchain_community.document_loaders.datadog_logs import DatadogLogsLoader
from langchain_community.document_loaders.dataframe import DataFrameLoader
from langchain_community.document_loaders.diffbot import DiffbotLoader
from langchain_community.document_loaders.directory import DirectoryLoader
from langchain_community.document_loaders.discord import DiscordChatLoader
from langchain_community.document_loaders.docugami import DocugamiLoader
from langchain_community.document_loaders.docusaurus import DocusaurusLoader
from langchain_community.document_loaders.dropbox import DropboxLoader
from langchain_community.document_loaders.duckdb_loader import DuckDBLoader
from langchain_community.document_loaders.email import (
OutlookMessageLoader,
UnstructuredEmailLoader,
)
from langchain_community.document_loaders.epub import UnstructuredEPubLoader
from langchain_community.document_loaders.etherscan import EtherscanLoader
from langchain_community.document_loaders.evernote import EverNoteLoader
from langchain_community.document_loaders.excel import UnstructuredExcelLoader
from langchain_community.document_loaders.facebook_chat import FacebookChatLoader
from langchain_community.document_loaders.fauna import FaunaLoader
from langchain_community.document_loaders.figma import FigmaFileLoader
from langchain_community.document_loaders.gcs_directory import GCSDirectoryLoader
from langchain_community.document_loaders.gcs_file import GCSFileLoader
from langchain_community.document_loaders.geodataframe import GeoDataFrameLoader
from langchain_community.document_loaders.git import GitLoader
from langchain_community.document_loaders.gitbook import GitbookLoader
from langchain_community.document_loaders.github import GitHubIssuesLoader
from langchain_community.document_loaders.google_speech_to_text import (
GoogleSpeechToTextLoader,
)
from langchain_community.document_loaders.googledrive import GoogleDriveLoader
from langchain_community.document_loaders.gutenberg import GutenbergLoader
from langchain_community.document_loaders.hn import HNLoader
from langchain_community.document_loaders.html import UnstructuredHTMLLoader
from langchain_community.document_loaders.html_bs import BSHTMLLoader
from langchain_community.document_loaders.hugging_face_dataset import (
HuggingFaceDatasetLoader,
)
from langchain_community.document_loaders.ifixit import IFixitLoader
from langchain_community.document_loaders.image import UnstructuredImageLoader
from langchain_community.document_loaders.image_captions import ImageCaptionLoader
from langchain_community.document_loaders.imsdb import IMSDbLoader
from langchain_community.document_loaders.iugu import IuguLoader
from langchain_community.document_loaders.joplin import JoplinLoader
from langchain_community.document_loaders.json_loader import JSONLoader
from langchain_community.document_loaders.lakefs import LakeFSLoader
from langchain_community.document_loaders.larksuite import LarkSuiteDocLoader
from langchain_community.document_loaders.markdown import UnstructuredMarkdownLoader
from langchain_community.document_loaders.mastodon import MastodonTootsLoader
from langchain_community.document_loaders.max_compute import MaxComputeLoader
from langchain_community.document_loaders.mediawikidump import MWDumpLoader
from langchain_community.document_loaders.merge import MergedDataLoader
from langchain_community.document_loaders.mhtml import MHTMLLoader
from langchain_community.document_loaders.modern_treasury import ModernTreasuryLoader
from langchain_community.document_loaders.mongodb import MongodbLoader
from langchain_community.document_loaders.news import NewsURLLoader
from langchain_community.document_loaders.notebook import NotebookLoader
from langchain_community.document_loaders.notion import NotionDirectoryLoader
from langchain_community.document_loaders.notiondb import NotionDBLoader
from langchain_community.document_loaders.obs_directory import OBSDirectoryLoader
from langchain_community.document_loaders.obs_file import OBSFileLoader
from langchain_community.document_loaders.obsidian import ObsidianLoader
from langchain_community.document_loaders.odt import UnstructuredODTLoader
from langchain_community.document_loaders.onedrive import OneDriveLoader
from langchain_community.document_loaders.onedrive_file import OneDriveFileLoader
from langchain_community.document_loaders.open_city_data import OpenCityDataLoader
from langchain_community.document_loaders.org_mode import UnstructuredOrgModeLoader
from langchain_community.document_loaders.pdf import (
AmazonTextractPDFLoader,
MathpixPDFLoader,
OnlinePDFLoader,
PDFMinerLoader,
PDFMinerPDFasHTMLLoader,
PDFPlumberLoader,
PyMuPDFLoader,
PyPDFDirectoryLoader,
PyPDFium2Loader,
PyPDFLoader,
UnstructuredPDFLoader,
)
from langchain_community.document_loaders.polars_dataframe import PolarsDataFrameLoader
from langchain_community.document_loaders.powerpoint import UnstructuredPowerPointLoader
from langchain_community.document_loaders.psychic import PsychicLoader
from langchain_community.document_loaders.pubmed import PubMedLoader
from langchain_community.document_loaders.pyspark_dataframe import (
PySparkDataFrameLoader,
)
from langchain_community.document_loaders.python import PythonLoader
from langchain_community.document_loaders.readthedocs import ReadTheDocsLoader
from langchain_community.document_loaders.recursive_url_loader import RecursiveUrlLoader
from langchain_community.document_loaders.reddit import RedditPostsLoader
from langchain_community.document_loaders.roam import RoamLoader
from langchain_community.document_loaders.rocksetdb import RocksetLoader
from langchain_community.document_loaders.rss import RSSFeedLoader
from langchain_community.document_loaders.rst import UnstructuredRSTLoader
from langchain_community.document_loaders.rtf import UnstructuredRTFLoader
from langchain_community.document_loaders.s3_directory import S3DirectoryLoader
from langchain_community.document_loaders.s3_file import S3FileLoader
from langchain_community.document_loaders.sharepoint import SharePointLoader
from langchain_community.document_loaders.sitemap import SitemapLoader
from langchain_community.document_loaders.slack_directory import SlackDirectoryLoader
from langchain_community.document_loaders.snowflake_loader import SnowflakeLoader
from langchain_community.document_loaders.spreedly import SpreedlyLoader
from langchain_community.document_loaders.srt import SRTLoader
from langchain_community.document_loaders.stripe import StripeLoader
from langchain_community.document_loaders.telegram import (
TelegramChatApiLoader,
TelegramChatFileLoader,
)
from langchain_community.document_loaders.tencent_cos_directory import (
TencentCOSDirectoryLoader,
)
from langchain_community.document_loaders.tencent_cos_file import TencentCOSFileLoader
from langchain_community.document_loaders.tensorflow_datasets import (
TensorflowDatasetLoader,
)
from langchain_community.document_loaders.text import TextLoader
from langchain_community.document_loaders.tomarkdown import ToMarkdownLoader
from langchain_community.document_loaders.toml import TomlLoader
from langchain_community.document_loaders.trello import TrelloLoader
from langchain_community.document_loaders.tsv import UnstructuredTSVLoader
from langchain_community.document_loaders.twitter import TwitterTweetLoader
from langchain_community.document_loaders.unstructured import (
UnstructuredAPIFileIOLoader,
UnstructuredAPIFileLoader,
UnstructuredFileIOLoader,
UnstructuredFileLoader,
)
from langchain_community.document_loaders.url import UnstructuredURLLoader
from langchain_community.document_loaders.url_playwright import PlaywrightURLLoader
from langchain_community.document_loaders.url_selenium import SeleniumURLLoader
from langchain_community.document_loaders.weather import WeatherDataLoader
from langchain_community.document_loaders.web_base import WebBaseLoader
from langchain_community.document_loaders.whatsapp_chat import WhatsAppChatLoader
from langchain_community.document_loaders.wikipedia import WikipediaLoader
from langchain_community.document_loaders.word_document import (
Docx2txtLoader,
UnstructuredWordDocumentLoader,
)
from langchain_community.document_loaders.xml import UnstructuredXMLLoader
from langchain_community.document_loaders.xorbits import XorbitsLoader
from langchain_community.document_loaders.youtube import (
GoogleApiClient,
GoogleApiYoutubeLoader,
YoutubeLoader,
)
# Legacy: only for backwards compatibility. Use PyPDFLoader instead from langchain.utils.interactive_env import is_interactive_env
PagedPDFSplitter = PyPDFLoader
# For backwards compatibility # For backwards compatibility
TelegramChatLoader = TelegramChatFileLoader _old_to_new_name = {
"PagedPDFSplitter": "PyPDFLoader",
"TelegramChatLoader": "TelegramChatFileLoader",
}
def __getattr__(name: str) -> Any:
from langchain_community import document_loaders
# If not in interactive env, raise warning.
if not is_interactive_env():
warnings.warn(
"Importing document loaders from langchain is deprecated. Importing from "
"langchain will no longer be supported as of langchain==0.2.0. "
"Please import from langchain-community instead:\n\n"
f"`from langchain_community.document_loaders import {name}`.\n\n"
"To install langchain-community run `pip install -U langchain-community`.",
category=LangChainDeprecationWarning,
)
if name in _old_to_new_name:
warnings.warn(
f"Using legacy class name {name}, use {_old_to_new_name[name]} instead."
)
name = _old_to_new_name[name]
return getattr(document_loaders, name)
__all__ = [ __all__ = [
"AcreomLoader", "AcreomLoader",

View File

@ -14,37 +14,30 @@
Document Document
""" # noqa: E501 """ # noqa: E501
import warnings
from typing import Any
from langchain_core._api import LangChainDeprecationWarning
from langchain.utils.interactive_env import is_interactive_env
def __getattr__(name: str) -> Any:
from langchain_community import document_transformers
# If not in interactive env, raise warning.
if not is_interactive_env():
warnings.warn(
"Importing document transformers from langchain is deprecated. Importing "
"from langchain will no longer be supported as of langchain==0.2.0. "
"Please import from langchain-community instead:\n\n"
f"`from langchain_community.document_transformers import {name}`.\n\n"
"To install langchain-community run `pip install -U langchain-community`.",
category=LangChainDeprecationWarning,
)
return getattr(document_transformers, name)
from langchain_community.document_transformers.beautiful_soup_transformer import (
BeautifulSoupTransformer,
)
from langchain_community.document_transformers.doctran_text_extract import (
DoctranPropertyExtractor,
)
from langchain_community.document_transformers.doctran_text_qa import (
DoctranQATransformer,
)
from langchain_community.document_transformers.doctran_text_translate import (
DoctranTextTranslator,
)
from langchain_community.document_transformers.embeddings_redundant_filter import (
EmbeddingsClusteringFilter,
EmbeddingsRedundantFilter,
get_stateful_documents,
)
from langchain_community.document_transformers.google_translate import (
GoogleTranslateTransformer,
)
from langchain_community.document_transformers.html2text import Html2TextTransformer
from langchain_community.document_transformers.long_context_reorder import (
LongContextReorder,
)
from langchain_community.document_transformers.nuclia_text_transform import (
NucliaTextTransformer,
)
from langchain_community.document_transformers.openai_functions import (
OpenAIMetadataTagger,
)
__all__ = [ __all__ = [
"BeautifulSoupTransformer", "BeautifulSoupTransformer",

View File

@ -12,76 +12,31 @@ from different APIs and services.
import logging import logging
import warnings
from typing import Any from typing import Any
from langchain_community.embeddings.aleph_alpha import ( from langchain_core._api import LangChainDeprecationWarning
AlephAlphaAsymmetricSemanticEmbedding,
AlephAlphaSymmetricSemanticEmbedding,
)
from langchain_community.embeddings.awa import AwaEmbeddings
from langchain_community.embeddings.azure_openai import AzureOpenAIEmbeddings
from langchain_community.embeddings.baidu_qianfan_endpoint import (
QianfanEmbeddingsEndpoint,
)
from langchain_community.embeddings.bedrock import BedrockEmbeddings
from langchain_community.embeddings.bookend import BookendEmbeddings
from langchain_community.embeddings.clarifai import ClarifaiEmbeddings
from langchain_community.embeddings.cohere import CohereEmbeddings
from langchain_community.embeddings.dashscope import DashScopeEmbeddings
from langchain_community.embeddings.databricks import DatabricksEmbeddings
from langchain_community.embeddings.deepinfra import DeepInfraEmbeddings
from langchain_community.embeddings.edenai import EdenAiEmbeddings
from langchain_community.embeddings.elasticsearch import ElasticsearchEmbeddings
from langchain_community.embeddings.embaas import EmbaasEmbeddings
from langchain_community.embeddings.ernie import ErnieEmbeddings
from langchain_community.embeddings.fake import (
DeterministicFakeEmbedding,
FakeEmbeddings,
)
from langchain_community.embeddings.fastembed import FastEmbedEmbeddings
from langchain_community.embeddings.google_palm import GooglePalmEmbeddings
from langchain_community.embeddings.gpt4all import GPT4AllEmbeddings
from langchain_community.embeddings.gradient_ai import GradientEmbeddings
from langchain_community.embeddings.huggingface import (
HuggingFaceBgeEmbeddings,
HuggingFaceEmbeddings,
HuggingFaceInferenceAPIEmbeddings,
HuggingFaceInstructEmbeddings,
)
from langchain_community.embeddings.huggingface_hub import HuggingFaceHubEmbeddings
from langchain_community.embeddings.infinity import InfinityEmbeddings
from langchain_community.embeddings.javelin_ai_gateway import JavelinAIGatewayEmbeddings
from langchain_community.embeddings.jina import JinaEmbeddings
from langchain_community.embeddings.johnsnowlabs import JohnSnowLabsEmbeddings
from langchain_community.embeddings.llamacpp import LlamaCppEmbeddings
from langchain_community.embeddings.localai import LocalAIEmbeddings
from langchain_community.embeddings.minimax import MiniMaxEmbeddings
from langchain_community.embeddings.mlflow import MlflowEmbeddings
from langchain_community.embeddings.mlflow_gateway import MlflowAIGatewayEmbeddings
from langchain_community.embeddings.modelscope_hub import ModelScopeEmbeddings
from langchain_community.embeddings.mosaicml import MosaicMLInstructorEmbeddings
from langchain_community.embeddings.nlpcloud import NLPCloudEmbeddings
from langchain_community.embeddings.octoai_embeddings import OctoAIEmbeddings
from langchain_community.embeddings.ollama import OllamaEmbeddings
from langchain_community.embeddings.openai import OpenAIEmbeddings
from langchain_community.embeddings.sagemaker_endpoint import (
SagemakerEndpointEmbeddings,
)
from langchain_community.embeddings.self_hosted import SelfHostedEmbeddings
from langchain_community.embeddings.self_hosted_hugging_face import (
SelfHostedHuggingFaceEmbeddings,
SelfHostedHuggingFaceInstructEmbeddings,
)
from langchain_community.embeddings.sentence_transformer import (
SentenceTransformerEmbeddings,
)
from langchain_community.embeddings.spacy_embeddings import SpacyEmbeddings
from langchain_community.embeddings.tensorflow_hub import TensorflowHubEmbeddings
from langchain_community.embeddings.vertexai import VertexAIEmbeddings
from langchain_community.embeddings.voyageai import VoyageEmbeddings
from langchain_community.embeddings.xinference import XinferenceEmbeddings
from langchain.embeddings.cache import CacheBackedEmbeddings from langchain.embeddings.cache import CacheBackedEmbeddings
from langchain.utils.interactive_env import is_interactive_env
def __getattr__(name: str) -> Any:
from langchain_community import embeddings
# If not in interactive env, raise warning.
if not is_interactive_env():
warnings.warn(
"Importing embeddings from langchain is deprecated. Importing from "
"langchain will no longer be supported as of langchain==0.2.0. "
"Please import from langchain-community instead:\n\n"
f"`from langchain_community.embeddings import {name}`.\n\n"
"To install langchain-community run `pip install -U langchain-community`.",
category=LangChainDeprecationWarning,
)
return getattr(embeddings, name)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -1,15 +1,28 @@
"""**Graphs** provide a natural language interface to graph databases.""" """**Graphs** provide a natural language interface to graph databases."""
import warnings
from typing import Any
from langchain_core._api import LangChainDeprecationWarning
from langchain.utils.interactive_env import is_interactive_env
def __getattr__(name: str) -> Any:
from langchain_community import graphs
# If not in interactive env, raise warning.
if not is_interactive_env():
warnings.warn(
"Importing graphs from langchain is deprecated. Importing from "
"langchain will no longer be supported as of langchain==0.2.0. "
"Please import from langchain-community instead:\n\n"
f"`from langchain_community.graphs import {name}`.\n\n"
"To install langchain-community run `pip install -U langchain-community`.",
category=LangChainDeprecationWarning,
)
return getattr(graphs, name)
from langchain_community.graphs.arangodb_graph import ArangoGraph
from langchain_community.graphs.falkordb_graph import FalkorDBGraph
from langchain_community.graphs.hugegraph import HugeGraph
from langchain_community.graphs.kuzu_graph import KuzuGraph
from langchain_community.graphs.memgraph_graph import MemgraphGraph
from langchain_community.graphs.nebula_graph import NebulaGraph
from langchain_community.graphs.neo4j_graph import Neo4jGraph
from langchain_community.graphs.neptune_graph import NeptuneGraph
from langchain_community.graphs.networkx_graph import NetworkxEntityGraph
from langchain_community.graphs.rdf_graph import RdfGraph
__all__ = [ __all__ = [
"MemgraphGraph", "MemgraphGraph",

View File

@ -17,10 +17,14 @@ access to the large language model (**LLM**) APIs and services.
CallbackManager, AsyncCallbackManager, CallbackManager, AsyncCallbackManager,
AIMessage, BaseMessage AIMessage, BaseMessage
""" # noqa: E501 """ # noqa: E501
import warnings
from typing import Any, Callable, Dict, Type from typing import Any, Callable, Dict, Type
from langchain_core._api import LangChainDeprecationWarning
from langchain_core.language_models.llms import BaseLLM from langchain_core.language_models.llms import BaseLLM
from langchain.utils.interactive_env import is_interactive_env
def _import_ai21() -> Any: def _import_ai21() -> Any:
from langchain_community.llms.ai21 import AI21 from langchain_community.llms.ai21 import AI21
@ -537,180 +541,27 @@ def _import_volcengine_maas() -> Any:
def __getattr__(name: str) -> Any: def __getattr__(name: str) -> Any:
if name == "AI21": from langchain_community import llms
return _import_ai21()
elif name == "AlephAlpha": # If not in interactive env, raise warning.
return _import_aleph_alpha() if not is_interactive_env():
elif name == "AmazonAPIGateway": warnings.warn(
return _import_amazon_api_gateway() "Importing LLMs from langchain is deprecated. Importing from "
elif name == "Anthropic": "langchain will no longer be supported as of langchain==0.2.0. "
return _import_anthropic() "Please import from langchain-community instead:\n\n"
elif name == "Anyscale": f"`from langchain_community.llms import {name}`.\n\n"
return _import_anyscale() "To install langchain-community run `pip install -U langchain-community`.",
elif name == "Arcee": category=LangChainDeprecationWarning,
return _import_arcee() )
elif name == "Aviary":
return _import_aviary() if name == "type_to_cls_dict":
elif name == "AzureMLOnlineEndpoint":
return _import_azureml_endpoint()
elif name == "QianfanLLMEndpoint":
return _import_baidu_qianfan_endpoint()
elif name == "Banana":
return _import_bananadev()
elif name == "Baseten":
return _import_baseten()
elif name == "Beam":
return _import_beam()
elif name == "Bedrock":
return _import_bedrock()
elif name == "NIBittensorLLM":
return _import_bittensor()
elif name == "CerebriumAI":
return _import_cerebriumai()
elif name == "ChatGLM":
return _import_chatglm()
elif name == "Clarifai":
return _import_clarifai()
elif name == "Cohere":
return _import_cohere()
elif name == "CTransformers":
return _import_ctransformers()
elif name == "CTranslate2":
return _import_ctranslate2()
elif name == "Databricks":
return _import_databricks()
elif name == "DeepInfra":
return _import_deepinfra()
elif name == "DeepSparse":
return _import_deepsparse()
elif name == "EdenAI":
return _import_edenai()
elif name == "FakeListLLM":
return _import_fake()
elif name == "Fireworks":
return _import_fireworks()
elif name == "ForefrontAI":
return _import_forefrontai()
elif name == "GigaChat":
return _import_gigachat()
elif name == "GooglePalm":
return _import_google_palm()
elif name == "GooseAI":
return _import_gooseai()
elif name == "GPT4All":
return _import_gpt4all()
elif name == "GradientLLM":
return _import_gradient_ai()
elif name == "HuggingFaceEndpoint":
return _import_huggingface_endpoint()
elif name == "HuggingFaceHub":
return _import_huggingface_hub()
elif name == "HuggingFacePipeline":
return _import_huggingface_pipeline()
elif name == "HuggingFaceTextGenInference":
return _import_huggingface_text_gen_inference()
elif name == "HumanInputLLM":
return _import_human()
elif name == "JavelinAIGateway":
return _import_javelin_ai_gateway()
elif name == "KoboldApiLLM":
return _import_koboldai()
elif name == "LlamaCpp":
return _import_llamacpp()
elif name == "ManifestWrapper":
return _import_manifest()
elif name == "Minimax":
return _import_minimax()
elif name == "Mlflow":
return _import_mlflow()
elif name == "MlflowAIGateway":
return _import_mlflow_ai_gateway()
elif name == "Modal":
return _import_modal()
elif name == "MosaicML":
return _import_mosaicml()
elif name == "NLPCloud":
return _import_nlpcloud()
elif name == "OctoAIEndpoint":
return _import_octoai_endpoint()
elif name == "Ollama":
return _import_ollama()
elif name == "OpaquePrompts":
return _import_opaqueprompts()
elif name == "AzureOpenAI":
return _import_azure_openai()
elif name == "OpenAI":
return _import_openai()
elif name == "OpenAIChat":
return _import_openai_chat()
elif name == "OpenLLM":
return _import_openllm()
elif name == "OpenLM":
return _import_openlm()
elif name == "PaiEasEndpoint":
return _import_pai_eas_endpoint()
elif name == "Petals":
return _import_petals()
elif name == "PipelineAI":
return _import_pipelineai()
elif name == "Predibase":
return _import_predibase()
elif name == "PredictionGuard":
return _import_predictionguard()
elif name == "PromptLayerOpenAI":
return _import_promptlayer()
elif name == "PromptLayerOpenAIChat":
return _import_promptlayer_chat()
elif name == "Replicate":
return _import_replicate()
elif name == "RWKV":
return _import_rwkv()
elif name == "SagemakerEndpoint":
return _import_sagemaker_endpoint()
elif name == "SelfHostedPipeline":
return _import_self_hosted()
elif name == "SelfHostedHuggingFaceLLM":
return _import_self_hosted_hugging_face()
elif name == "StochasticAI":
return _import_stochasticai()
elif name == "Nebula":
return _import_symblai_nebula()
elif name == "TextGen":
return _import_textgen()
elif name == "TitanTakeoff":
return _import_titan_takeoff()
elif name == "TitanTakeoffPro":
return _import_titan_takeoff_pro()
elif name == "Together":
return _import_together()
elif name == "Tongyi":
return _import_tongyi()
elif name == "VertexAI":
return _import_vertex()
elif name == "VertexAIModelGarden":
return _import_vertex_model_garden()
elif name == "VLLM":
return _import_vllm()
elif name == "VLLMOpenAI":
return _import_vllm_openai()
elif name == "WatsonxLLM":
return _import_watsonxllm()
elif name == "Writer":
return _import_writer()
elif name == "Xinference":
return _import_xinference()
elif name == "YandexGPT":
return _import_yandex_gpt()
elif name == "VolcEngineMaasLLM":
return _import_volcengine_maas()
elif name == "type_to_cls_dict":
# for backwards compatibility # for backwards compatibility
type_to_cls_dict: Dict[str, Type[BaseLLM]] = { type_to_cls_dict: Dict[str, Type[BaseLLM]] = {
k: v() for k, v in get_type_to_cls_dict().items() k: v() for k, v in get_type_to_cls_dict().items()
} }
return type_to_cls_dict return type_to_cls_dict
else: else:
raise AttributeError(f"Could not find: {name}") return getattr(llms, name)
__all__ = [ __all__ = [

View File

@ -1,45 +1,27 @@
from langchain_community.chat_message_histories.astradb import ( import warnings
AstraDBChatMessageHistory, from typing import Any
)
from langchain_community.chat_message_histories.cassandra import ( from langchain_core._api import LangChainDeprecationWarning
CassandraChatMessageHistory,
) from langchain.utils.interactive_env import is_interactive_env
from langchain_community.chat_message_histories.cosmos_db import (
CosmosDBChatMessageHistory,
) def __getattr__(name: str) -> Any:
from langchain_community.chat_message_histories.dynamodb import ( from langchain_community import chat_message_histories
DynamoDBChatMessageHistory,
) # If not in interactive env, raise warning.
from langchain_community.chat_message_histories.elasticsearch import ( if not is_interactive_env():
ElasticsearchChatMessageHistory, warnings.warn(
) "Importing chat message histories from langchain is deprecated. Importing "
from langchain_community.chat_message_histories.file import FileChatMessageHistory "from langchain will no longer be supported as of langchain==0.2.0. "
from langchain_community.chat_message_histories.firestore import ( "Please import from langchain-community instead:\n\n"
FirestoreChatMessageHistory, f"`from langchain_community.chat_message_histories import {name}`.\n\n"
) "To install langchain-community run `pip install -U langchain-community`.",
from langchain_community.chat_message_histories.in_memory import ChatMessageHistory category=LangChainDeprecationWarning,
from langchain_community.chat_message_histories.momento import MomentoChatMessageHistory )
from langchain_community.chat_message_histories.mongodb import MongoDBChatMessageHistory
from langchain_community.chat_message_histories.neo4j import Neo4jChatMessageHistory return getattr(chat_message_histories, name)
from langchain_community.chat_message_histories.postgres import (
PostgresChatMessageHistory,
)
from langchain_community.chat_message_histories.redis import RedisChatMessageHistory
from langchain_community.chat_message_histories.rocksetdb import (
RocksetChatMessageHistory,
)
from langchain_community.chat_message_histories.singlestoredb import (
SingleStoreDBChatMessageHistory,
)
from langchain_community.chat_message_histories.sql import SQLChatMessageHistory
from langchain_community.chat_message_histories.streamlit import (
StreamlitChatMessageHistory,
)
from langchain_community.chat_message_histories.upstash_redis import (
UpstashRedisChatMessageHistory,
)
from langchain_community.chat_message_histories.xata import XataChatMessageHistory
from langchain_community.chat_message_histories.zep import ZepChatMessageHistory
__all__ = [ __all__ = [
"AstraDBChatMessageHistory", "AstraDBChatMessageHistory",

View File

@ -17,59 +17,43 @@ the backbone of a retriever, but there are other types of retrievers as well.
Document, Serializable, Callbacks, Document, Serializable, Callbacks,
CallbackManagerForRetrieverRun, AsyncCallbackManagerForRetrieverRun CallbackManagerForRetrieverRun, AsyncCallbackManagerForRetrieverRun
""" """
import warnings
from typing import Any
from langchain_core._api import LangChainDeprecationWarning
from langchain.retrievers.arcee import ArceeRetriever
from langchain.retrievers.arxiv import ArxivRetriever
from langchain.retrievers.azure_cognitive_search import AzureCognitiveSearchRetriever
from langchain.retrievers.bedrock import AmazonKnowledgeBasesRetriever
from langchain.retrievers.bm25 import BM25Retriever
from langchain.retrievers.chaindesk import ChaindeskRetriever
from langchain.retrievers.chatgpt_plugin_retriever import ChatGPTPluginRetriever
from langchain.retrievers.cohere_rag_retriever import CohereRagRetriever
from langchain.retrievers.contextual_compression import ContextualCompressionRetriever from langchain.retrievers.contextual_compression import ContextualCompressionRetriever
from langchain.retrievers.docarray import DocArrayRetriever
from langchain.retrievers.elastic_search_bm25 import ElasticSearchBM25Retriever
from langchain.retrievers.embedchain import EmbedchainRetriever
from langchain.retrievers.ensemble import EnsembleRetriever from langchain.retrievers.ensemble import EnsembleRetriever
from langchain.retrievers.google_cloud_documentai_warehouse import (
GoogleDocumentAIWarehouseRetriever,
)
from langchain.retrievers.google_vertex_ai_search import (
GoogleCloudEnterpriseSearchRetriever,
GoogleVertexAIMultiTurnSearchRetriever,
GoogleVertexAISearchRetriever,
)
from langchain.retrievers.kay import KayAiRetriever
from langchain.retrievers.kendra import AmazonKendraRetriever
from langchain.retrievers.knn import KNNRetriever
from langchain.retrievers.llama_index import (
LlamaIndexGraphRetriever,
LlamaIndexRetriever,
)
from langchain.retrievers.merger_retriever import MergerRetriever from langchain.retrievers.merger_retriever import MergerRetriever
from langchain.retrievers.metal import MetalRetriever
from langchain.retrievers.milvus import MilvusRetriever
from langchain.retrievers.multi_query import MultiQueryRetriever from langchain.retrievers.multi_query import MultiQueryRetriever
from langchain.retrievers.multi_vector import MultiVectorRetriever from langchain.retrievers.multi_vector import MultiVectorRetriever
from langchain.retrievers.outline import OutlineRetriever from langchain.retrievers.outline import OutlineRetriever
from langchain.retrievers.parent_document_retriever import ParentDocumentRetriever from langchain.retrievers.parent_document_retriever import ParentDocumentRetriever
from langchain.retrievers.pinecone_hybrid_search import PineconeHybridSearchRetriever
from langchain.retrievers.pubmed import PubMedRetriever
from langchain.retrievers.re_phraser import RePhraseQueryRetriever from langchain.retrievers.re_phraser import RePhraseQueryRetriever
from langchain.retrievers.remote_retriever import RemoteLangChainRetriever
from langchain.retrievers.self_query.base import SelfQueryRetriever from langchain.retrievers.self_query.base import SelfQueryRetriever
from langchain.retrievers.svm import SVMRetriever
from langchain.retrievers.tavily_search_api import TavilySearchAPIRetriever
from langchain.retrievers.tfidf import TFIDFRetriever
from langchain.retrievers.time_weighted_retriever import ( from langchain.retrievers.time_weighted_retriever import (
TimeWeightedVectorStoreRetriever, TimeWeightedVectorStoreRetriever,
) )
from langchain.retrievers.vespa_retriever import VespaRetriever
from langchain.retrievers.weaviate_hybrid_search import WeaviateHybridSearchRetriever
from langchain.retrievers.web_research import WebResearchRetriever from langchain.retrievers.web_research import WebResearchRetriever
from langchain.retrievers.wikipedia import WikipediaRetriever from langchain.utils.interactive_env import is_interactive_env
from langchain.retrievers.zep import ZepRetriever
from langchain.retrievers.zilliz import ZillizRetriever
def __getattr__(name: str) -> Any:
from langchain_community import retrievers
# If not in interactive env, raise warning.
if not is_interactive_env():
warnings.warn(
"Importing this retriever from langchain is deprecated. Importing it from "
"langchain will no longer be supported as of langchain==0.2.0. "
"Please import from langchain-community instead:\n\n"
f"`from langchain_community.retrievers import {name}`.\n\n"
"To install langchain-community run `pip install -U langchain-community`.",
category=LangChainDeprecationWarning,
)
return getattr(retrievers, name)
__all__ = [ __all__ = [
"AmazonKendraRetriever", "AmazonKendraRetriever",

View File

@ -5,13 +5,34 @@ to a simple key-value interface.
The primary goal of these storages is to support implementation of caching. The primary goal of these storages is to support implementation of caching.
""" """
import warnings
from typing import Any
from langchain_core._api import LangChainDeprecationWarning
from langchain.storage._lc_store import create_kv_docstore, create_lc_store from langchain.storage._lc_store import create_kv_docstore, create_lc_store
from langchain.storage.encoder_backed import EncoderBackedStore from langchain.storage.encoder_backed import EncoderBackedStore
from langchain.storage.file_system import LocalFileStore from langchain.storage.file_system import LocalFileStore
from langchain.storage.in_memory import InMemoryByteStore, InMemoryStore from langchain.storage.in_memory import InMemoryByteStore, InMemoryStore
from langchain.storage.redis import RedisStore from langchain.utils.interactive_env import is_interactive_env
from langchain.storage.upstash_redis import UpstashRedisByteStore, UpstashRedisStore
def __getattr__(name: str) -> Any:
from langchain_community import storage
# If not in interactive env, raise warning.
if not is_interactive_env():
warnings.warn(
"Importing stores from langchain is deprecated. Importing from "
"langchain will no longer be supported as of langchain==0.2.0. "
"Please import from langchain-community instead:\n\n"
f"`from langchain_community.storage import {name}`.\n\n"
"To install langchain-community run `pip install -U langchain-community`.",
category=LangChainDeprecationWarning,
)
return getattr(storage, name)
__all__ = [ __all__ = [
"EncoderBackedStore", "EncoderBackedStore",

View File

@ -16,480 +16,18 @@ tool for the job.
CallbackManagerForToolRun, AsyncCallbackManagerForToolRun CallbackManagerForToolRun, AsyncCallbackManagerForToolRun
""" """
import warnings
from typing import Any from typing import Any
from langchain_core._api import LangChainDeprecationWarning
from langchain_core.tools import BaseTool, StructuredTool, Tool, tool from langchain_core.tools import BaseTool, StructuredTool, Tool, tool
from langchain.utils.interactive_env import is_interactive_env
# Used for internal purposes # Used for internal purposes
_DEPRECATED_TOOLS = {"PythonAstREPLTool", "PythonREPLTool"} _DEPRECATED_TOOLS = {"PythonAstREPLTool", "PythonREPLTool"}
def _import_ainetwork_app() -> Any:
from langchain_community.tools.ainetwork.app import AINAppOps
return AINAppOps
def _import_ainetwork_owner() -> Any:
from langchain_community.tools.ainetwork.owner import AINOwnerOps
return AINOwnerOps
def _import_ainetwork_rule() -> Any:
from langchain_community.tools.ainetwork.rule import AINRuleOps
return AINRuleOps
def _import_ainetwork_transfer() -> Any:
from langchain_community.tools.ainetwork.transfer import AINTransfer
return AINTransfer
def _import_ainetwork_value() -> Any:
from langchain_community.tools.ainetwork.value import AINValueOps
return AINValueOps
def _import_arxiv_tool() -> Any:
from langchain_community.tools.arxiv.tool import ArxivQueryRun
return ArxivQueryRun
def _import_azure_cognitive_services_AzureCogsFormRecognizerTool() -> Any:
from langchain_community.tools.azure_cognitive_services import (
AzureCogsFormRecognizerTool,
)
return AzureCogsFormRecognizerTool
def _import_azure_cognitive_services_AzureCogsImageAnalysisTool() -> Any:
from langchain_community.tools.azure_cognitive_services import (
AzureCogsImageAnalysisTool,
)
return AzureCogsImageAnalysisTool
def _import_azure_cognitive_services_AzureCogsSpeech2TextTool() -> Any:
from langchain_community.tools.azure_cognitive_services import (
AzureCogsSpeech2TextTool,
)
return AzureCogsSpeech2TextTool
def _import_azure_cognitive_services_AzureCogsText2SpeechTool() -> Any:
from langchain_community.tools.azure_cognitive_services import (
AzureCogsText2SpeechTool,
)
return AzureCogsText2SpeechTool
def _import_azure_cognitive_services_AzureCogsTextAnalyticsHealthTool() -> Any:
from langchain_community.tools.azure_cognitive_services import (
AzureCogsTextAnalyticsHealthTool,
)
return AzureCogsTextAnalyticsHealthTool
def _import_bing_search_tool_BingSearchResults() -> Any:
from langchain_community.tools.bing_search.tool import BingSearchResults
return BingSearchResults
def _import_bing_search_tool_BingSearchRun() -> Any:
from langchain_community.tools.bing_search.tool import BingSearchRun
return BingSearchRun
def _import_brave_search_tool() -> Any:
from langchain_community.tools.brave_search.tool import BraveSearch
return BraveSearch
def _import_ddg_search_tool_DuckDuckGoSearchResults() -> Any:
from langchain_community.tools.ddg_search.tool import DuckDuckGoSearchResults
return DuckDuckGoSearchResults
def _import_ddg_search_tool_DuckDuckGoSearchRun() -> Any:
from langchain_community.tools.ddg_search.tool import DuckDuckGoSearchRun
return DuckDuckGoSearchRun
def _import_edenai_EdenAiExplicitImageTool() -> Any:
from langchain_community.tools.edenai import EdenAiExplicitImageTool
return EdenAiExplicitImageTool
def _import_edenai_EdenAiObjectDetectionTool() -> Any:
from langchain_community.tools.edenai import EdenAiObjectDetectionTool
return EdenAiObjectDetectionTool
def _import_edenai_EdenAiParsingIDTool() -> Any:
from langchain_community.tools.edenai import EdenAiParsingIDTool
return EdenAiParsingIDTool
def _import_edenai_EdenAiParsingInvoiceTool() -> Any:
from langchain_community.tools.edenai import EdenAiParsingInvoiceTool
return EdenAiParsingInvoiceTool
def _import_edenai_EdenAiSpeechToTextTool() -> Any:
from langchain_community.tools.edenai import EdenAiSpeechToTextTool
return EdenAiSpeechToTextTool
def _import_edenai_EdenAiTextModerationTool() -> Any:
from langchain_community.tools.edenai import EdenAiTextModerationTool
return EdenAiTextModerationTool
def _import_edenai_EdenAiTextToSpeechTool() -> Any:
from langchain_community.tools.edenai import EdenAiTextToSpeechTool
return EdenAiTextToSpeechTool
def _import_edenai_EdenaiTool() -> Any:
from langchain_community.tools.edenai import EdenaiTool
return EdenaiTool
def _import_eleven_labs_text2speech() -> Any:
from langchain_community.tools.eleven_labs.text2speech import (
ElevenLabsText2SpeechTool,
)
return ElevenLabsText2SpeechTool
def _import_file_management_CopyFileTool() -> Any:
from langchain_community.tools.file_management import CopyFileTool
return CopyFileTool
def _import_file_management_DeleteFileTool() -> Any:
from langchain_community.tools.file_management import DeleteFileTool
return DeleteFileTool
def _import_file_management_FileSearchTool() -> Any:
from langchain_community.tools.file_management import FileSearchTool
return FileSearchTool
def _import_file_management_ListDirectoryTool() -> Any:
from langchain_community.tools.file_management import ListDirectoryTool
return ListDirectoryTool
def _import_file_management_MoveFileTool() -> Any:
from langchain_community.tools.file_management import MoveFileTool
return MoveFileTool
def _import_file_management_ReadFileTool() -> Any:
from langchain_community.tools.file_management import ReadFileTool
return ReadFileTool
def _import_file_management_WriteFileTool() -> Any:
from langchain_community.tools.file_management import WriteFileTool
return WriteFileTool
def _import_gmail_GmailCreateDraft() -> Any:
from langchain_community.tools.gmail import GmailCreateDraft
return GmailCreateDraft
def _import_gmail_GmailGetMessage() -> Any:
from langchain_community.tools.gmail import GmailGetMessage
return GmailGetMessage
def _import_gmail_GmailGetThread() -> Any:
from langchain_community.tools.gmail import GmailGetThread
return GmailGetThread
def _import_gmail_GmailSearch() -> Any:
from langchain_community.tools.gmail import GmailSearch
return GmailSearch
def _import_gmail_GmailSendMessage() -> Any:
from langchain_community.tools.gmail import GmailSendMessage
return GmailSendMessage
def _import_google_cloud_texttospeech() -> Any:
from langchain_community.tools.google_cloud.texttospeech import (
GoogleCloudTextToSpeechTool,
)
return GoogleCloudTextToSpeechTool
def _import_google_places_tool() -> Any:
from langchain_community.tools.google_places.tool import GooglePlacesTool
return GooglePlacesTool
def _import_google_search_tool_GoogleSearchResults() -> Any:
from langchain_community.tools.google_search.tool import GoogleSearchResults
return GoogleSearchResults
def _import_google_search_tool_GoogleSearchRun() -> Any:
from langchain_community.tools.google_search.tool import GoogleSearchRun
return GoogleSearchRun
def _import_google_serper_tool_GoogleSerperResults() -> Any:
from langchain_community.tools.google_serper.tool import GoogleSerperResults
return GoogleSerperResults
def _import_google_serper_tool_GoogleSerperRun() -> Any:
from langchain_community.tools.google_serper.tool import GoogleSerperRun
return GoogleSerperRun
def _import_searchapi_tool_SearchAPIResults() -> Any:
from langchain_community.tools.searchapi.tool import SearchAPIResults
return SearchAPIResults
def _import_searchapi_tool_SearchAPIRun() -> Any:
from langchain_community.tools.searchapi.tool import SearchAPIRun
return SearchAPIRun
def _import_graphql_tool() -> Any:
from langchain_community.tools.graphql.tool import BaseGraphQLTool
return BaseGraphQLTool
def _import_human_tool() -> Any:
from langchain_community.tools.human.tool import HumanInputRun
return HumanInputRun
def _import_ifttt() -> Any:
from langchain_community.tools.ifttt import IFTTTWebhook
return IFTTTWebhook
def _import_interaction_tool() -> Any:
from langchain_community.tools.interaction.tool import StdInInquireTool
return StdInInquireTool
def _import_jira_tool() -> Any:
from langchain_community.tools.jira.tool import JiraAction
return JiraAction
def _import_json_tool_JsonGetValueTool() -> Any:
from langchain_community.tools.json.tool import JsonGetValueTool
return JsonGetValueTool
def _import_json_tool_JsonListKeysTool() -> Any:
from langchain_community.tools.json.tool import JsonListKeysTool
return JsonListKeysTool
def _import_merriam_webster_tool() -> Any:
from langchain_community.tools.merriam_webster.tool import MerriamWebsterQueryRun
return MerriamWebsterQueryRun
def _import_metaphor_search() -> Any:
from langchain_community.tools.metaphor_search import MetaphorSearchResults
return MetaphorSearchResults
def _import_nasa_tool() -> Any:
from langchain_community.tools.nasa.tool import NasaAction
return NasaAction
def _import_office365_create_draft_message() -> Any:
from langchain_community.tools.office365.create_draft_message import (
O365CreateDraftMessage,
)
return O365CreateDraftMessage
def _import_office365_events_search() -> Any:
from langchain_community.tools.office365.events_search import O365SearchEvents
return O365SearchEvents
def _import_office365_messages_search() -> Any:
from langchain_community.tools.office365.messages_search import O365SearchEmails
return O365SearchEmails
def _import_office365_send_event() -> Any:
from langchain_community.tools.office365.send_event import O365SendEvent
return O365SendEvent
def _import_office365_send_message() -> Any:
from langchain_community.tools.office365.send_message import O365SendMessage
return O365SendMessage
def _import_openapi_utils_api_models() -> Any:
from langchain_community.tools.openapi.utils.api_models import APIOperation
return APIOperation
def _import_openapi_utils_openapi_utils() -> Any:
from langchain_community.tools.openapi.utils.openapi_utils import OpenAPISpec
return OpenAPISpec
def _import_openweathermap_tool() -> Any:
from langchain_community.tools.openweathermap.tool import OpenWeatherMapQueryRun
return OpenWeatherMapQueryRun
def _import_playwright_ClickTool() -> Any:
from langchain_community.tools.playwright import ClickTool
return ClickTool
def _import_playwright_CurrentWebPageTool() -> Any:
from langchain_community.tools.playwright import CurrentWebPageTool
return CurrentWebPageTool
def _import_playwright_ExtractHyperlinksTool() -> Any:
from langchain_community.tools.playwright import ExtractHyperlinksTool
return ExtractHyperlinksTool
def _import_playwright_ExtractTextTool() -> Any:
from langchain_community.tools.playwright import ExtractTextTool
return ExtractTextTool
def _import_playwright_GetElementsTool() -> Any:
from langchain_community.tools.playwright import GetElementsTool
return GetElementsTool
def _import_playwright_NavigateBackTool() -> Any:
from langchain_community.tools.playwright import NavigateBackTool
return NavigateBackTool
def _import_playwright_NavigateTool() -> Any:
from langchain_community.tools.playwright import NavigateTool
return NavigateTool
def _import_plugin() -> Any:
from langchain_community.tools.plugin import AIPluginTool
return AIPluginTool
def _import_powerbi_tool_InfoPowerBITool() -> Any:
from langchain_community.tools.powerbi.tool import InfoPowerBITool
return InfoPowerBITool
def _import_powerbi_tool_ListPowerBITool() -> Any:
from langchain_community.tools.powerbi.tool import ListPowerBITool
return ListPowerBITool
def _import_powerbi_tool_QueryPowerBITool() -> Any:
from langchain_community.tools.powerbi.tool import QueryPowerBITool
return QueryPowerBITool
def _import_pubmed_tool() -> Any:
from langchain_community.tools.pubmed.tool import PubmedQueryRun
return PubmedQueryRun
def _import_python_tool_PythonAstREPLTool() -> Any: def _import_python_tool_PythonAstREPLTool() -> Any:
raise ImportError( raise ImportError(
"This tool has been moved to langchain experiment. " "This tool has been moved to langchain experiment. "
@ -512,487 +50,27 @@ def _import_python_tool_PythonREPLTool() -> Any:
) )
def _import_reddit_search_RedditSearchRun() -> Any:
from langchain_community.tools.reddit_search.tool import RedditSearchRun
return RedditSearchRun
def _import_render() -> Any:
from langchain_community.tools.convert_to_openai import (
format_tool_to_openai_function,
)
return format_tool_to_openai_function
def _import_requests_tool_BaseRequestsTool() -> Any:
from langchain_community.tools.requests.tool import BaseRequestsTool
return BaseRequestsTool
def _import_requests_tool_RequestsDeleteTool() -> Any:
from langchain_community.tools.requests.tool import RequestsDeleteTool
return RequestsDeleteTool
def _import_requests_tool_RequestsGetTool() -> Any:
from langchain_community.tools.requests.tool import RequestsGetTool
return RequestsGetTool
def _import_requests_tool_RequestsPatchTool() -> Any:
from langchain_community.tools.requests.tool import RequestsPatchTool
return RequestsPatchTool
def _import_requests_tool_RequestsPostTool() -> Any:
from langchain_community.tools.requests.tool import RequestsPostTool
return RequestsPostTool
def _import_requests_tool_RequestsPutTool() -> Any:
from langchain_community.tools.requests.tool import RequestsPutTool
return RequestsPutTool
def _import_steam_webapi_tool() -> Any:
from langchain_community.tools.steam.tool import SteamWebAPIQueryRun
return SteamWebAPIQueryRun
def _import_scenexplain_tool() -> Any:
from langchain_community.tools.scenexplain.tool import SceneXplainTool
return SceneXplainTool
def _import_searx_search_tool_SearxSearchResults() -> Any:
from langchain_community.tools.searx_search.tool import SearxSearchResults
return SearxSearchResults
def _import_searx_search_tool_SearxSearchRun() -> Any:
from langchain_community.tools.searx_search.tool import SearxSearchRun
return SearxSearchRun
def _import_shell_tool() -> Any:
from langchain_community.tools.shell.tool import ShellTool
return ShellTool
def _import_slack_get_channel() -> Any:
from langchain_community.tools.slack.get_channel import SlackGetChannel
return SlackGetChannel
def _import_slack_get_message() -> Any:
from langchain_community.tools.slack.get_message import SlackGetMessage
return SlackGetMessage
def _import_slack_schedule_message() -> Any:
from langchain_community.tools.slack.schedule_message import SlackScheduleMessage
return SlackScheduleMessage
def _import_slack_send_message() -> Any:
from langchain_community.tools.slack.send_message import SlackSendMessage
return SlackSendMessage
def _import_sleep_tool() -> Any:
from langchain_community.tools.sleep.tool import SleepTool
return SleepTool
def _import_spark_sql_tool_BaseSparkSQLTool() -> Any:
from langchain_community.tools.spark_sql.tool import BaseSparkSQLTool
return BaseSparkSQLTool
def _import_spark_sql_tool_InfoSparkSQLTool() -> Any:
from langchain_community.tools.spark_sql.tool import InfoSparkSQLTool
return InfoSparkSQLTool
def _import_spark_sql_tool_ListSparkSQLTool() -> Any:
from langchain_community.tools.spark_sql.tool import ListSparkSQLTool
return ListSparkSQLTool
def _import_spark_sql_tool_QueryCheckerTool() -> Any:
from langchain_community.tools.spark_sql.tool import QueryCheckerTool
return QueryCheckerTool
def _import_spark_sql_tool_QuerySparkSQLTool() -> Any:
from langchain_community.tools.spark_sql.tool import QuerySparkSQLTool
return QuerySparkSQLTool
def _import_sql_database_tool_BaseSQLDatabaseTool() -> Any:
from langchain_community.tools.sql_database.tool import BaseSQLDatabaseTool
return BaseSQLDatabaseTool
def _import_sql_database_tool_InfoSQLDatabaseTool() -> Any:
from langchain_community.tools.sql_database.tool import InfoSQLDatabaseTool
return InfoSQLDatabaseTool
def _import_sql_database_tool_ListSQLDatabaseTool() -> Any:
from langchain_community.tools.sql_database.tool import ListSQLDatabaseTool
return ListSQLDatabaseTool
def _import_sql_database_tool_QuerySQLCheckerTool() -> Any:
from langchain_community.tools.sql_database.tool import QuerySQLCheckerTool
return QuerySQLCheckerTool
def _import_sql_database_tool_QuerySQLDataBaseTool() -> Any:
from langchain_community.tools.sql_database.tool import QuerySQLDataBaseTool
return QuerySQLDataBaseTool
def _import_stackexchange_tool() -> Any:
from langchain_community.tools.stackexchange.tool import StackExchangeTool
return StackExchangeTool
def _import_steamship_image_generation() -> Any:
from langchain_community.tools.steamship_image_generation import (
SteamshipImageGenerationTool,
)
return SteamshipImageGenerationTool
def _import_vectorstore_tool_VectorStoreQATool() -> Any:
from langchain_community.tools.vectorstore.tool import VectorStoreQATool
return VectorStoreQATool
def _import_vectorstore_tool_VectorStoreQAWithSourcesTool() -> Any:
from langchain_community.tools.vectorstore.tool import VectorStoreQAWithSourcesTool
return VectorStoreQAWithSourcesTool
def _import_wikipedia_tool() -> Any:
from langchain_community.tools.wikipedia.tool import WikipediaQueryRun
return WikipediaQueryRun
def _import_wolfram_alpha_tool() -> Any:
from langchain_community.tools.wolfram_alpha.tool import WolframAlphaQueryRun
return WolframAlphaQueryRun
def _import_yahoo_finance_news() -> Any:
from langchain_community.tools.yahoo_finance_news import YahooFinanceNewsTool
return YahooFinanceNewsTool
def _import_youtube_search() -> Any:
from langchain_community.tools.youtube.search import YouTubeSearchTool
return YouTubeSearchTool
def _import_zapier_tool_ZapierNLAListActions() -> Any:
from langchain_community.tools.zapier.tool import ZapierNLAListActions
return ZapierNLAListActions
def _import_zapier_tool_ZapierNLARunAction() -> Any:
from langchain_community.tools.zapier.tool import ZapierNLARunAction
return ZapierNLARunAction
def _import_bearly_tool() -> Any:
from langchain_community.tools.bearly.tool import BearlyInterpreterTool
return BearlyInterpreterTool
def _import_e2b_data_analysis() -> Any:
from langchain_community.tools.e2b_data_analysis.tool import E2BDataAnalysisTool
return E2BDataAnalysisTool
def __getattr__(name: str) -> Any: def __getattr__(name: str) -> Any:
if name == "AINAppOps": if name == "PythonAstREPLTool":
return _import_ainetwork_app()
elif name == "AINOwnerOps":
return _import_ainetwork_owner()
elif name == "AINRuleOps":
return _import_ainetwork_rule()
elif name == "AINTransfer":
return _import_ainetwork_transfer()
elif name == "AINValueOps":
return _import_ainetwork_value()
elif name == "ArxivQueryRun":
return _import_arxiv_tool()
elif name == "AzureCogsFormRecognizerTool":
return _import_azure_cognitive_services_AzureCogsFormRecognizerTool()
elif name == "AzureCogsImageAnalysisTool":
return _import_azure_cognitive_services_AzureCogsImageAnalysisTool()
elif name == "AzureCogsSpeech2TextTool":
return _import_azure_cognitive_services_AzureCogsSpeech2TextTool()
elif name == "AzureCogsText2SpeechTool":
return _import_azure_cognitive_services_AzureCogsText2SpeechTool()
elif name == "AzureCogsTextAnalyticsHealthTool":
return _import_azure_cognitive_services_AzureCogsTextAnalyticsHealthTool()
elif name == "BingSearchResults":
return _import_bing_search_tool_BingSearchResults()
elif name == "BingSearchRun":
return _import_bing_search_tool_BingSearchRun()
elif name == "BraveSearch":
return _import_brave_search_tool()
elif name == "DuckDuckGoSearchResults":
return _import_ddg_search_tool_DuckDuckGoSearchResults()
elif name == "DuckDuckGoSearchRun":
return _import_ddg_search_tool_DuckDuckGoSearchRun()
elif name == "EdenAiExplicitImageTool":
return _import_edenai_EdenAiExplicitImageTool()
elif name == "EdenAiObjectDetectionTool":
return _import_edenai_EdenAiObjectDetectionTool()
elif name == "EdenAiParsingIDTool":
return _import_edenai_EdenAiParsingIDTool()
elif name == "EdenAiParsingInvoiceTool":
return _import_edenai_EdenAiParsingInvoiceTool()
elif name == "EdenAiSpeechToTextTool":
return _import_edenai_EdenAiSpeechToTextTool()
elif name == "EdenAiTextModerationTool":
return _import_edenai_EdenAiTextModerationTool()
elif name == "EdenAiTextToSpeechTool":
return _import_edenai_EdenAiTextToSpeechTool()
elif name == "EdenaiTool":
return _import_edenai_EdenaiTool()
elif name == "ElevenLabsText2SpeechTool":
return _import_eleven_labs_text2speech()
elif name == "CopyFileTool":
return _import_file_management_CopyFileTool()
elif name == "DeleteFileTool":
return _import_file_management_DeleteFileTool()
elif name == "FileSearchTool":
return _import_file_management_FileSearchTool()
elif name == "ListDirectoryTool":
return _import_file_management_ListDirectoryTool()
elif name == "MoveFileTool":
return _import_file_management_MoveFileTool()
elif name == "ReadFileTool":
return _import_file_management_ReadFileTool()
elif name == "WriteFileTool":
return _import_file_management_WriteFileTool()
elif name == "GmailCreateDraft":
return _import_gmail_GmailCreateDraft()
elif name == "GmailGetMessage":
return _import_gmail_GmailGetMessage()
elif name == "GmailGetThread":
return _import_gmail_GmailGetThread()
elif name == "GmailSearch":
return _import_gmail_GmailSearch()
elif name == "GmailSendMessage":
return _import_gmail_GmailSendMessage()
elif name == "GoogleCloudTextToSpeechTool":
return _import_google_cloud_texttospeech()
elif name == "GooglePlacesTool":
return _import_google_places_tool()
elif name == "GoogleSearchResults":
return _import_google_search_tool_GoogleSearchResults()
elif name == "GoogleSearchRun":
return _import_google_search_tool_GoogleSearchRun()
elif name == "GoogleSerperResults":
return _import_google_serper_tool_GoogleSerperResults()
elif name == "GoogleSerperRun":
return _import_google_serper_tool_GoogleSerperRun()
elif name == "SearchAPIResults":
return _import_searchapi_tool_SearchAPIResults()
elif name == "SearchAPIRun":
return _import_searchapi_tool_SearchAPIRun()
elif name == "BaseGraphQLTool":
return _import_graphql_tool()
elif name == "HumanInputRun":
return _import_human_tool()
elif name == "IFTTTWebhook":
return _import_ifttt()
elif name == "StdInInquireTool":
return _import_interaction_tool()
elif name == "JiraAction":
return _import_jira_tool()
elif name == "JsonGetValueTool":
return _import_json_tool_JsonGetValueTool()
elif name == "JsonListKeysTool":
return _import_json_tool_JsonListKeysTool()
elif name == "MerriamWebsterQueryRun":
return _import_merriam_webster_tool()
elif name == "MetaphorSearchResults":
return _import_metaphor_search()
elif name == "NasaAction":
return _import_nasa_tool()
elif name == "O365CreateDraftMessage":
return _import_office365_create_draft_message()
elif name == "O365SearchEvents":
return _import_office365_events_search()
elif name == "O365SearchEmails":
return _import_office365_messages_search()
elif name == "O365SendEvent":
return _import_office365_send_event()
elif name == "O365SendMessage":
return _import_office365_send_message()
elif name == "APIOperation":
return _import_openapi_utils_api_models()
elif name == "OpenAPISpec":
return _import_openapi_utils_openapi_utils()
elif name == "OpenWeatherMapQueryRun":
return _import_openweathermap_tool()
elif name == "ClickTool":
return _import_playwright_ClickTool()
elif name == "CurrentWebPageTool":
return _import_playwright_CurrentWebPageTool()
elif name == "ExtractHyperlinksTool":
return _import_playwright_ExtractHyperlinksTool()
elif name == "ExtractTextTool":
return _import_playwright_ExtractTextTool()
elif name == "GetElementsTool":
return _import_playwright_GetElementsTool()
elif name == "NavigateBackTool":
return _import_playwright_NavigateBackTool()
elif name == "NavigateTool":
return _import_playwright_NavigateTool()
elif name == "AIPluginTool":
return _import_plugin()
elif name == "InfoPowerBITool":
return _import_powerbi_tool_InfoPowerBITool()
elif name == "ListPowerBITool":
return _import_powerbi_tool_ListPowerBITool()
elif name == "QueryPowerBITool":
return _import_powerbi_tool_QueryPowerBITool()
elif name == "PubmedQueryRun":
return _import_pubmed_tool()
elif name == "PythonAstREPLTool":
return _import_python_tool_PythonAstREPLTool() return _import_python_tool_PythonAstREPLTool()
elif name == "PythonREPLTool": elif name == "PythonREPLTool":
return _import_python_tool_PythonREPLTool() return _import_python_tool_PythonREPLTool()
elif name == "RedditSearchRun":
return _import_reddit_search_RedditSearchRun()
elif name == "format_tool_to_openai_function":
return _import_render()
elif name == "BaseRequestsTool":
return _import_requests_tool_BaseRequestsTool()
elif name == "RequestsDeleteTool":
return _import_requests_tool_RequestsDeleteTool()
elif name == "RequestsGetTool":
return _import_requests_tool_RequestsGetTool()
elif name == "RequestsPatchTool":
return _import_requests_tool_RequestsPatchTool()
elif name == "RequestsPostTool":
return _import_requests_tool_RequestsPostTool()
elif name == "RequestsPutTool":
return _import_requests_tool_RequestsPutTool()
elif name == "SteamWebAPIQueryRun":
return _import_steam_webapi_tool()
elif name == "SceneXplainTool":
return _import_scenexplain_tool()
elif name == "SearxSearchResults":
return _import_searx_search_tool_SearxSearchResults()
elif name == "SearxSearchRun":
return _import_searx_search_tool_SearxSearchRun()
elif name == "ShellTool":
return _import_shell_tool()
elif name == "SlackGetChannel":
return _import_slack_get_channel
elif name == "SlackGetMessage":
return _import_slack_get_message
elif name == "SlackScheduleMessage":
return _import_slack_schedule_message
elif name == "SlackSendMessage":
return _import_slack_send_message
elif name == "SleepTool":
return _import_sleep_tool()
elif name == "BaseSparkSQLTool":
return _import_spark_sql_tool_BaseSparkSQLTool()
elif name == "InfoSparkSQLTool":
return _import_spark_sql_tool_InfoSparkSQLTool()
elif name == "ListSparkSQLTool":
return _import_spark_sql_tool_ListSparkSQLTool()
elif name == "QueryCheckerTool":
return _import_spark_sql_tool_QueryCheckerTool()
elif name == "QuerySparkSQLTool":
return _import_spark_sql_tool_QuerySparkSQLTool()
elif name == "BaseSQLDatabaseTool":
return _import_sql_database_tool_BaseSQLDatabaseTool()
elif name == "InfoSQLDatabaseTool":
return _import_sql_database_tool_InfoSQLDatabaseTool()
elif name == "ListSQLDatabaseTool":
return _import_sql_database_tool_ListSQLDatabaseTool()
elif name == "QuerySQLCheckerTool":
return _import_sql_database_tool_QuerySQLCheckerTool()
elif name == "QuerySQLDataBaseTool":
return _import_sql_database_tool_QuerySQLDataBaseTool()
elif name == "StackExchangeTool":
return _import_stackexchange_tool()
elif name == "SteamshipImageGenerationTool":
return _import_steamship_image_generation()
elif name == "VectorStoreQATool":
return _import_vectorstore_tool_VectorStoreQATool()
elif name == "VectorStoreQAWithSourcesTool":
return _import_vectorstore_tool_VectorStoreQAWithSourcesTool()
elif name == "WikipediaQueryRun":
return _import_wikipedia_tool()
elif name == "WolframAlphaQueryRun":
return _import_wolfram_alpha_tool()
elif name == "YahooFinanceNewsTool":
return _import_yahoo_finance_news()
elif name == "YouTubeSearchTool":
return _import_youtube_search()
elif name == "ZapierNLAListActions":
return _import_zapier_tool_ZapierNLAListActions()
elif name == "ZapierNLARunAction":
return _import_zapier_tool_ZapierNLARunAction()
elif name == "BearlyInterpreterTool":
return _import_bearly_tool()
elif name == "E2BDataAnalysisTool":
return _import_e2b_data_analysis()
else: else:
raise AttributeError(f"Could not find: {name}") from langchain_community import tools
# If not in interactive env, raise warning.
if not is_interactive_env():
warnings.warn(
"Importing tools from langchain is deprecated. Importing from "
"langchain will no longer be supported as of langchain==0.2.0. "
"Please import from langchain-community instead:\n\n"
f"`from langchain_community.tools import {name}`.\n\n"
"To install langchain-community run "
"`pip install -U langchain-community`.",
category=LangChainDeprecationWarning,
)
return getattr(tools, name)
__all__ = [ __all__ = [

View File

@ -3,6 +3,7 @@
Other LangChain classes use **Utilities** to interact with third-part systems Other LangChain classes use **Utilities** to interact with third-part systems
and packages. and packages.
""" """
import warnings
from typing import Any from typing import Any
from langchain_community.utilities.requests import ( from langchain_community.utilities.requests import (
@ -10,357 +11,26 @@ from langchain_community.utilities.requests import (
RequestsWrapper, RequestsWrapper,
TextRequestsWrapper, TextRequestsWrapper,
) )
from langchain_core._api import LangChainDeprecationWarning
from langchain.utils.interactive_env import is_interactive_env
def _import_alpha_vantage() -> Any:
from langchain_community.utilities.alpha_vantage import AlphaVantageAPIWrapper
return AlphaVantageAPIWrapper
def _import_apify() -> Any:
from langchain_community.utilities.apify import ApifyWrapper
return ApifyWrapper
def _import_arcee() -> Any:
from langchain_community.utilities.arcee import ArceeWrapper
return ArceeWrapper
def _import_arxiv() -> Any:
from langchain_community.utilities.arxiv import ArxivAPIWrapper
return ArxivAPIWrapper
def _import_awslambda() -> Any:
from langchain_community.utilities.awslambda import LambdaWrapper
return LambdaWrapper
def _import_bibtex() -> Any:
from langchain_community.utilities.bibtex import BibtexparserWrapper
return BibtexparserWrapper
def _import_bing_search() -> Any:
from langchain_community.utilities.bing_search import BingSearchAPIWrapper
return BingSearchAPIWrapper
def _import_brave_search() -> Any:
from langchain_community.utilities.brave_search import BraveSearchWrapper
return BraveSearchWrapper
def _import_duckduckgo_search() -> Any:
from langchain_community.utilities.duckduckgo_search import (
DuckDuckGoSearchAPIWrapper,
)
return DuckDuckGoSearchAPIWrapper
def _import_golden_query() -> Any:
from langchain_community.utilities.golden_query import GoldenQueryAPIWrapper
return GoldenQueryAPIWrapper
def _import_google_lens() -> Any:
from langchain_community.utilities.google_lens import GoogleLensAPIWrapper
return GoogleLensAPIWrapper
def _import_google_places_api() -> Any:
from langchain_community.utilities.google_places_api import GooglePlacesAPIWrapper
return GooglePlacesAPIWrapper
def _import_google_jobs() -> Any:
from langchain_community.utilities.google_jobs import GoogleJobsAPIWrapper
return GoogleJobsAPIWrapper
def _import_google_scholar() -> Any:
from langchain_community.utilities.google_scholar import GoogleScholarAPIWrapper
return GoogleScholarAPIWrapper
def _import_google_trends() -> Any:
from langchain_community.utilities.google_trends import GoogleTrendsAPIWrapper
return GoogleTrendsAPIWrapper
def _import_google_finance() -> Any:
from langchain_community.utilities.google_finance import GoogleFinanceAPIWrapper
return GoogleFinanceAPIWrapper
def _import_google_search() -> Any:
from langchain_community.utilities.google_search import GoogleSearchAPIWrapper
return GoogleSearchAPIWrapper
def _import_google_serper() -> Any:
from langchain_community.utilities.google_serper import GoogleSerperAPIWrapper
return GoogleSerperAPIWrapper
def _import_graphql() -> Any:
from langchain_community.utilities.graphql import GraphQLAPIWrapper
return GraphQLAPIWrapper
def _import_jira() -> Any:
from langchain_community.utilities.jira import JiraAPIWrapper
return JiraAPIWrapper
def _import_max_compute() -> Any:
from langchain_community.utilities.max_compute import MaxComputeAPIWrapper
return MaxComputeAPIWrapper
def _import_merriam_webster() -> Any:
from langchain_community.utilities.merriam_webster import MerriamWebsterAPIWrapper
return MerriamWebsterAPIWrapper
def _import_metaphor_search() -> Any:
from langchain_community.utilities.metaphor_search import MetaphorSearchAPIWrapper
return MetaphorSearchAPIWrapper
def _import_openweathermap() -> Any:
from langchain_community.utilities.openweathermap import OpenWeatherMapAPIWrapper
return OpenWeatherMapAPIWrapper
def _import_outline() -> Any:
from langchain_community.utilities.outline import OutlineAPIWrapper
return OutlineAPIWrapper
def _import_portkey() -> Any:
from langchain_community.utilities.portkey import Portkey
return Portkey
def _import_powerbi() -> Any:
from langchain_community.utilities.powerbi import PowerBIDataset
return PowerBIDataset
def _import_pubmed() -> Any:
from langchain_community.utilities.pubmed import PubMedAPIWrapper
return PubMedAPIWrapper
def _import_python() -> Any:
from langchain_community.utilities.python import PythonREPL
return PythonREPL
def _import_scenexplain() -> Any:
from langchain_community.utilities.scenexplain import SceneXplainAPIWrapper
return SceneXplainAPIWrapper
def _import_searchapi() -> Any:
from langchain_community.utilities.searchapi import SearchApiAPIWrapper
return SearchApiAPIWrapper
def _import_searx_search() -> Any:
from langchain_community.utilities.searx_search import SearxSearchWrapper
return SearxSearchWrapper
def _import_serpapi() -> Any:
from langchain_community.utilities.serpapi import SerpAPIWrapper
return SerpAPIWrapper
def _import_spark_sql() -> Any:
from langchain_community.utilities.spark_sql import SparkSQL
return SparkSQL
def _import_sql_database() -> Any:
from langchain_community.utilities.sql_database import SQLDatabase
return SQLDatabase
def _import_steam_webapi() -> Any:
from langchain_community.utilities.steam import SteamWebAPIWrapper
return SteamWebAPIWrapper
def _import_stackexchange() -> Any:
from langchain_community.utilities.stackexchange import StackExchangeAPIWrapper
return StackExchangeAPIWrapper
def _import_tensorflow_datasets() -> Any:
from langchain_community.utilities.tensorflow_datasets import TensorflowDatasets
return TensorflowDatasets
def _import_twilio() -> Any:
from langchain_community.utilities.twilio import TwilioAPIWrapper
return TwilioAPIWrapper
def _import_wikipedia() -> Any:
from langchain_community.utilities.wikipedia import WikipediaAPIWrapper
return WikipediaAPIWrapper
def _import_wolfram_alpha() -> Any:
from langchain_community.utilities.wolfram_alpha import WolframAlphaAPIWrapper
return WolframAlphaAPIWrapper
def _import_zapier() -> Any:
from langchain_community.utilities.zapier import ZapierNLAWrapper
return ZapierNLAWrapper
def _import_nasa() -> Any:
from langchain_community.utilities.nasa import NasaAPIWrapper
return NasaAPIWrapper
def __getattr__(name: str) -> Any: def __getattr__(name: str) -> Any:
if name == "AlphaVantageAPIWrapper": from langchain_community import utilities
return _import_alpha_vantage()
elif name == "ApifyWrapper": # If not in interactive env, raise warning.
return _import_apify() if not is_interactive_env():
elif name == "ArceeWrapper": warnings.warn(
return _import_arcee() "Importing this utility from langchain is deprecated. Importing it from "
elif name == "ArxivAPIWrapper": "langchain will no longer be supported as of langchain==0.2.0. "
return _import_arxiv() "Please import from langchain-community instead:\n\n"
elif name == "LambdaWrapper": f"`from langchain_community.utilities import {name}`.\n\n"
return _import_awslambda() "To install langchain-community run `pip install -U langchain-community`.",
elif name == "BibtexparserWrapper": category=LangChainDeprecationWarning,
return _import_bibtex() )
elif name == "BingSearchAPIWrapper":
return _import_bing_search() return getattr(utilities, name)
elif name == "BraveSearchWrapper":
return _import_brave_search()
elif name == "DuckDuckGoSearchAPIWrapper":
return _import_duckduckgo_search()
elif name == "GoogleLensAPIWrapper":
return _import_google_lens()
elif name == "GoldenQueryAPIWrapper":
return _import_golden_query()
elif name == "GoogleJobsAPIWrapper":
return _import_google_jobs()
elif name == "GoogleScholarAPIWrapper":
return _import_google_scholar()
elif name == "GoogleFinanceAPIWrapper":
return _import_google_finance()
elif name == "GoogleTrendsAPIWrapper":
return _import_google_trends()
elif name == "GooglePlacesAPIWrapper":
return _import_google_places_api()
elif name == "GoogleSearchAPIWrapper":
return _import_google_search()
elif name == "GoogleSerperAPIWrapper":
return _import_google_serper()
elif name == "GraphQLAPIWrapper":
return _import_graphql()
elif name == "JiraAPIWrapper":
return _import_jira()
elif name == "MaxComputeAPIWrapper":
return _import_max_compute()
elif name == "MerriamWebsterAPIWrapper":
return _import_merriam_webster()
elif name == "MetaphorSearchAPIWrapper":
return _import_metaphor_search()
elif name == "NasaAPIWrapper":
return _import_nasa()
elif name == "OpenWeatherMapAPIWrapper":
return _import_openweathermap()
elif name == "OutlineAPIWrapper":
return _import_outline()
elif name == "Portkey":
return _import_portkey()
elif name == "PowerBIDataset":
return _import_powerbi()
elif name == "PubMedAPIWrapper":
return _import_pubmed()
elif name == "PythonREPL":
return _import_python()
elif name == "SceneXplainAPIWrapper":
return _import_scenexplain()
elif name == "SearchApiAPIWrapper":
return _import_searchapi()
elif name == "SearxSearchWrapper":
return _import_searx_search()
elif name == "SerpAPIWrapper":
return _import_serpapi()
elif name == "SparkSQL":
return _import_spark_sql()
elif name == "StackExchangeAPIWrapper":
return _import_stackexchange()
elif name == "SQLDatabase":
return _import_sql_database()
elif name == "SteamWebAPIWrapper":
return _import_steam_webapi()
elif name == "TensorflowDatasets":
return _import_tensorflow_datasets()
elif name == "TwilioAPIWrapper":
return _import_twilio()
elif name == "WikipediaAPIWrapper":
return _import_wikipedia()
elif name == "WolframAlphaAPIWrapper":
return _import_wolfram_alpha()
elif name == "ZapierNLAWrapper":
return _import_zapier()
else:
raise AttributeError(f"Could not find: {name}")
__all__ = [ __all__ = [

View File

@ -0,0 +1,5 @@
def is_interactive_env() -> bool:
"""Determine if running within IPython or Jupyter."""
import sys
return hasattr(sys, "ps2")

View File

@ -18,571 +18,30 @@ and retrieve the data that are 'most similar' to the embedded query.
Embeddings, Document Embeddings, Document
""" # noqa: E501 """ # noqa: E501
import warnings
from typing import Any from typing import Any
from langchain_core._api import LangChainDeprecationWarning
from langchain_core.vectorstores import VectorStore from langchain_core.vectorstores import VectorStore
from langchain.utils.interactive_env import is_interactive_env
def _import_alibaba_cloud_open_search() -> Any:
from langchain_community.vectorstores.alibabacloud_opensearch import (
AlibabaCloudOpenSearch,
)
return AlibabaCloudOpenSearch
def _import_alibaba_cloud_open_search_settings() -> Any:
from langchain_community.vectorstores.alibabacloud_opensearch import (
AlibabaCloudOpenSearchSettings,
)
return AlibabaCloudOpenSearchSettings
def _import_azure_cosmos_db() -> Any:
from langchain_community.vectorstores.azure_cosmos_db import (
AzureCosmosDBVectorSearch,
)
return AzureCosmosDBVectorSearch
def _import_elastic_knn_search() -> Any:
from langchain_community.vectorstores.elastic_vector_search import ElasticKnnSearch
return ElasticKnnSearch
def _import_elastic_vector_search() -> Any:
from langchain_community.vectorstores.elastic_vector_search import (
ElasticVectorSearch,
)
return ElasticVectorSearch
def _import_analyticdb() -> Any:
from langchain_community.vectorstores.analyticdb import AnalyticDB
return AnalyticDB
def _import_annoy() -> Any:
from langchain_community.vectorstores.annoy import Annoy
return Annoy
def _import_atlas() -> Any:
from langchain_community.vectorstores.atlas import AtlasDB
return AtlasDB
def _import_awadb() -> Any:
from langchain_community.vectorstores.awadb import AwaDB
return AwaDB
def _import_azuresearch() -> Any:
from langchain_community.vectorstores.azuresearch import AzureSearch
return AzureSearch
def _import_bageldb() -> Any:
from langchain_community.vectorstores.bageldb import Bagel
return Bagel
def _import_baiducloud_vector_search() -> Any:
from langchain_community.vectorstores.baiducloud_vector_search import BESVectorStore
return BESVectorStore
def _import_cassandra() -> Any:
from langchain_community.vectorstores.cassandra import Cassandra
return Cassandra
def _import_astradb() -> Any:
from langchain_community.vectorstores.astradb import AstraDB
return AstraDB
def _import_chroma() -> Any:
from langchain_community.vectorstores.chroma import Chroma
return Chroma
def _import_clarifai() -> Any:
from langchain_community.vectorstores.clarifai import Clarifai
return Clarifai
def _import_clickhouse() -> Any:
from langchain_community.vectorstores.clickhouse import Clickhouse
return Clickhouse
def _import_clickhouse_settings() -> Any:
from langchain_community.vectorstores.clickhouse import ClickhouseSettings
return ClickhouseSettings
def _import_dashvector() -> Any:
from langchain_community.vectorstores.dashvector import DashVector
return DashVector
def _import_databricks_vector_search() -> Any:
from langchain_community.vectorstores.databricks_vector_search import (
DatabricksVectorSearch,
)
return DatabricksVectorSearch
def _import_deeplake() -> Any:
from langchain_community.vectorstores.deeplake import DeepLake
return DeepLake
def _import_dingo() -> Any:
from langchain_community.vectorstores.dingo import Dingo
return Dingo
def _import_docarray_hnsw() -> Any:
from langchain_community.vectorstores.docarray import DocArrayHnswSearch
return DocArrayHnswSearch
def _import_docarray_inmemory() -> Any:
from langchain_community.vectorstores.docarray import DocArrayInMemorySearch
return DocArrayInMemorySearch
def _import_elasticsearch() -> Any:
from langchain_community.vectorstores.elasticsearch import ElasticsearchStore
return ElasticsearchStore
def _import_epsilla() -> Any:
from langchain_community.vectorstores.epsilla import Epsilla
return Epsilla
def _import_faiss() -> Any:
from langchain_community.vectorstores.faiss import FAISS
return FAISS
def _import_hologres() -> Any:
from langchain_community.vectorstores.hologres import Hologres
return Hologres
def _import_lancedb() -> Any:
from langchain_community.vectorstores.lancedb import LanceDB
return LanceDB
def _import_llm_rails() -> Any:
from langchain_community.vectorstores.llm_rails import LLMRails
return LLMRails
def _import_marqo() -> Any:
from langchain_community.vectorstores.marqo import Marqo
return Marqo
def _import_matching_engine() -> Any:
from langchain_community.vectorstores.matching_engine import MatchingEngine
return MatchingEngine
def _import_meilisearch() -> Any:
from langchain_community.vectorstores.meilisearch import Meilisearch
return Meilisearch
def _import_milvus() -> Any:
from langchain_community.vectorstores.milvus import Milvus
return Milvus
def _import_momento_vector_index() -> Any:
from langchain_community.vectorstores.momento_vector_index import MomentoVectorIndex
return MomentoVectorIndex
def _import_mongodb_atlas() -> Any:
from langchain_community.vectorstores.mongodb_atlas import MongoDBAtlasVectorSearch
return MongoDBAtlasVectorSearch
def _import_myscale() -> Any:
from langchain_community.vectorstores.myscale import MyScale
return MyScale
def _import_myscale_settings() -> Any:
from langchain_community.vectorstores.myscale import MyScaleSettings
return MyScaleSettings
def _import_neo4j_vector() -> Any:
from langchain_community.vectorstores.neo4j_vector import Neo4jVector
return Neo4jVector
def _import_opensearch_vector_search() -> Any:
from langchain_community.vectorstores.opensearch_vector_search import (
OpenSearchVectorSearch,
)
return OpenSearchVectorSearch
def _import_pgembedding() -> Any:
from langchain_community.vectorstores.pgembedding import PGEmbedding
return PGEmbedding
def _import_pgvector() -> Any:
from langchain_community.vectorstores.pgvector import PGVector
return PGVector
def _import_pinecone() -> Any:
from langchain_community.vectorstores.pinecone import Pinecone
return Pinecone
def _import_qdrant() -> Any:
from langchain_community.vectorstores.qdrant import Qdrant
return Qdrant
def _import_redis() -> Any:
from langchain_community.vectorstores.redis import Redis
return Redis
def _import_rocksetdb() -> Any:
from langchain_community.vectorstores.rocksetdb import Rockset
return Rockset
def _import_vespa() -> Any:
from langchain_community.vectorstores.vespa import VespaStore
return VespaStore
def _import_scann() -> Any:
from langchain_community.vectorstores.scann import ScaNN
return ScaNN
def _import_semadb() -> Any:
from langchain_community.vectorstores.semadb import SemaDB
return SemaDB
def _import_singlestoredb() -> Any:
from langchain_community.vectorstores.singlestoredb import SingleStoreDB
return SingleStoreDB
def _import_sklearn() -> Any:
from langchain_community.vectorstores.sklearn import SKLearnVectorStore
return SKLearnVectorStore
def _import_sqlitevss() -> Any:
from langchain_community.vectorstores.sqlitevss import SQLiteVSS
return SQLiteVSS
def _import_starrocks() -> Any:
from langchain_community.vectorstores.starrocks import StarRocks
return StarRocks
def _import_supabase() -> Any:
from langchain_community.vectorstores.supabase import SupabaseVectorStore
return SupabaseVectorStore
def _import_tair() -> Any:
from langchain_community.vectorstores.tair import Tair
return Tair
def _import_tencentvectordb() -> Any:
from langchain_community.vectorstores.tencentvectordb import TencentVectorDB
return TencentVectorDB
def _import_tiledb() -> Any:
from langchain_community.vectorstores.tiledb import TileDB
return TileDB
def _import_tigris() -> Any:
from langchain_community.vectorstores.tigris import Tigris
return Tigris
def _import_timescalevector() -> Any:
from langchain_community.vectorstores.timescalevector import TimescaleVector
return TimescaleVector
def _import_typesense() -> Any:
from langchain_community.vectorstores.typesense import Typesense
return Typesense
def _import_usearch() -> Any:
from langchain_community.vectorstores.usearch import USearch
return USearch
def _import_vald() -> Any:
from langchain_community.vectorstores.vald import Vald
return Vald
def _import_vearch() -> Any:
from langchain_community.vectorstores.vearch import Vearch
return Vearch
def _import_vectara() -> Any:
from langchain_community.vectorstores.vectara import Vectara
return Vectara
def _import_weaviate() -> Any:
from langchain_community.vectorstores.weaviate import Weaviate
return Weaviate
def _import_yellowbrick() -> Any:
from langchain_community.vectorstores.yellowbrick import Yellowbrick
return Yellowbrick
def _import_zep() -> Any:
from langchain_community.vectorstores.zep import ZepVectorStore
return ZepVectorStore
def _import_zilliz() -> Any:
from langchain_community.vectorstores.zilliz import Zilliz
return Zilliz
def __getattr__(name: str) -> Any: def __getattr__(name: str) -> Any:
if name == "AnalyticDB": from langchain_community import vectorstores
return _import_analyticdb()
elif name == "AlibabaCloudOpenSearch": # If not in interactive env, raise warning.
return _import_alibaba_cloud_open_search() if not is_interactive_env():
elif name == "AlibabaCloudOpenSearchSettings": warnings.warn(
return _import_alibaba_cloud_open_search_settings() "Importing vector stores from langchain is deprecated. Importing from "
elif name == "AzureCosmosDBVectorSearch": "langchain will no longer be supported as of langchain==0.2.0. "
return _import_azure_cosmos_db() "Please import from langchain-community instead:\n\n"
elif name == "ElasticKnnSearch": f"`from langchain_community.vectorstores import {name}`.\n\n"
return _import_elastic_knn_search() "To install langchain-community run `pip install -U langchain-community`.",
elif name == "ElasticVectorSearch": category=LangChainDeprecationWarning,
return _import_elastic_vector_search() )
elif name == "Annoy":
return _import_annoy() return getattr(vectorstores, name)
elif name == "AtlasDB":
return _import_atlas()
elif name == "AwaDB":
return _import_awadb()
elif name == "AzureSearch":
return _import_azuresearch()
elif name == "Bagel":
return _import_bageldb()
elif name == "BESVectorStore":
return _import_baiducloud_vector_search()
elif name == "Cassandra":
return _import_cassandra()
elif name == "AstraDB":
return _import_astradb()
elif name == "Chroma":
return _import_chroma()
elif name == "Clarifai":
return _import_clarifai()
elif name == "ClickhouseSettings":
return _import_clickhouse_settings()
elif name == "Clickhouse":
return _import_clickhouse()
elif name == "DashVector":
return _import_dashvector()
elif name == "DatabricksVectorSearch":
return _import_databricks_vector_search()
elif name == "DeepLake":
return _import_deeplake()
elif name == "Dingo":
return _import_dingo()
elif name == "DocArrayInMemorySearch":
return _import_docarray_inmemory()
elif name == "DocArrayHnswSearch":
return _import_docarray_hnsw()
elif name == "ElasticsearchStore":
return _import_elasticsearch()
elif name == "Epsilla":
return _import_epsilla()
elif name == "FAISS":
return _import_faiss()
elif name == "Hologres":
return _import_hologres()
elif name == "LanceDB":
return _import_lancedb()
elif name == "LLMRails":
return _import_llm_rails()
elif name == "Marqo":
return _import_marqo()
elif name == "MatchingEngine":
return _import_matching_engine()
elif name == "Meilisearch":
return _import_meilisearch()
elif name == "Milvus":
return _import_milvus()
elif name == "MomentoVectorIndex":
return _import_momento_vector_index()
elif name == "MongoDBAtlasVectorSearch":
return _import_mongodb_atlas()
elif name == "MyScaleSettings":
return _import_myscale_settings()
elif name == "MyScale":
return _import_myscale()
elif name == "Neo4jVector":
return _import_neo4j_vector()
elif name == "OpenSearchVectorSearch":
return _import_opensearch_vector_search()
elif name == "PGEmbedding":
return _import_pgembedding()
elif name == "PGVector":
return _import_pgvector()
elif name == "Pinecone":
return _import_pinecone()
elif name == "Qdrant":
return _import_qdrant()
elif name == "Redis":
return _import_redis()
elif name == "Rockset":
return _import_rocksetdb()
elif name == "ScaNN":
return _import_scann()
elif name == "SemaDB":
return _import_semadb()
elif name == "SingleStoreDB":
return _import_singlestoredb()
elif name == "SKLearnVectorStore":
return _import_sklearn()
elif name == "SQLiteVSS":
return _import_sqlitevss()
elif name == "StarRocks":
return _import_starrocks()
elif name == "SupabaseVectorStore":
return _import_supabase()
elif name == "Tair":
return _import_tair()
elif name == "TencentVectorDB":
return _import_tencentvectordb()
elif name == "TileDB":
return _import_tiledb()
elif name == "Tigris":
return _import_tigris()
elif name == "TimescaleVector":
return _import_timescalevector()
elif name == "Typesense":
return _import_typesense()
elif name == "USearch":
return _import_usearch()
elif name == "Vald":
return _import_vald()
elif name == "Vearch":
return _import_vearch()
elif name == "Vectara":
return _import_vectara()
elif name == "Weaviate":
return _import_weaviate()
elif name == "Yellowbrick":
return _import_yellowbrick()
elif name == "ZepVectorStore":
return _import_zep()
elif name == "Zilliz":
return _import_zilliz()
elif name == "VespaStore":
return _import_vespa()
else:
raise AttributeError(f"Could not find: {name}")
__all__ = [ __all__ = [

View File

@ -1 +1,7 @@
"""All unit tests (lightweight tests).""" """All unit tests (lightweight tests)."""
from typing import Any
def assert_all_importable(module: Any) -> None:
for attr in module.__all__:
getattr(module, attr)

View File

@ -0,0 +1,42 @@
from langchain.agents import agent_toolkits
from tests.unit_tests import assert_all_importable
EXPECTED_ALL = [
"AINetworkToolkit",
"AmadeusToolkit",
"AzureCognitiveServicesToolkit",
"FileManagementToolkit",
"GmailToolkit",
"JiraToolkit",
"JsonToolkit",
"MultionToolkit",
"NasaToolkit",
"NLAToolkit",
"O365Toolkit",
"OpenAPIToolkit",
"PlayWrightBrowserToolkit",
"PowerBIToolkit",
"SlackToolkit",
"SteamToolkit",
"SQLDatabaseToolkit",
"SparkSQLToolkit",
"VectorStoreInfo",
"VectorStoreRouterToolkit",
"VectorStoreToolkit",
"ZapierToolkit",
"create_json_agent",
"create_openapi_agent",
"create_pbi_agent",
"create_pbi_chat_agent",
"create_spark_sql_agent",
"create_sql_agent",
"create_vectorstore_agent",
"create_vectorstore_router_agent",
"create_conversational_retrieval_agent",
"create_retriever_tool",
]
def test_imports() -> None:
assert sorted(agent_toolkits.__all__) == sorted(EXPECTED_ALL)
assert_all_importable(agent_toolkits)

View File

@ -1,4 +1,5 @@
from langchain.agents import __all__ from langchain import agents
from tests.unit_tests import assert_all_importable
EXPECTED_ALL = [ EXPECTED_ALL = [
"Agent", "Agent",
@ -46,4 +47,5 @@ EXPECTED_ALL = [
def test_all_imports() -> None: def test_all_imports() -> None:
assert set(__all__) == set(EXPECTED_ALL) assert set(agents.__all__) == set(EXPECTED_ALL)
assert_all_importable(agents)

View File

@ -1,4 +1,5 @@
from langchain.callbacks import __all__ from langchain import callbacks
from tests.unit_tests import assert_all_importable
EXPECTED_ALL = [ EXPECTED_ALL = [
"AimCallbackHandler", "AimCallbackHandler",
@ -37,4 +38,5 @@ EXPECTED_ALL = [
def test_all_imports() -> None: def test_all_imports() -> None:
assert set(__all__) == set(EXPECTED_ALL) assert set(callbacks.__all__) == set(EXPECTED_ALL)
assert_all_importable(callbacks)

View File

@ -1,4 +1,5 @@
from langchain.chains import __all__ from langchain import chains
from tests.unit_tests import assert_all_importable
EXPECTED_ALL = [ EXPECTED_ALL = [
"APIChain", "APIChain",
@ -62,4 +63,5 @@ EXPECTED_ALL = [
def test_all_imports() -> None: def test_all_imports() -> None:
assert set(__all__) == set(EXPECTED_ALL) assert set(chains.__all__) == set(EXPECTED_ALL)
assert_all_importable(chains)

View File

@ -1,4 +1,5 @@
from langchain.chat_models import __all__ from langchain import chat_models
from tests.unit_tests import assert_all_importable
EXPECTED_ALL = [ EXPECTED_ALL = [
"ChatOpenAI", "ChatOpenAI",
@ -35,4 +36,5 @@ EXPECTED_ALL = [
def test_all_imports() -> None: def test_all_imports() -> None:
assert set(__all__) == set(EXPECTED_ALL) assert set(chat_models.__all__) == set(EXPECTED_ALL)
assert_all_importable(chat_models)

View File

@ -1,7 +1,9 @@
from langchain.docstore import __all__ from langchain import docstore
from tests.unit_tests import assert_all_importable
EXPECTED_ALL = ["DocstoreFn", "InMemoryDocstore", "Wikipedia"] EXPECTED_ALL = ["DocstoreFn", "InMemoryDocstore", "Wikipedia"]
def test_all_imports() -> None: def test_all_imports() -> None:
assert set(__all__) == set(EXPECTED_ALL) assert set(docstore.__all__) == set(EXPECTED_ALL)
assert_all_importable(docstore)

View File

@ -1,4 +1,5 @@
from langchain.document_loaders import __all__ from langchain import document_loaders
from tests.unit_tests import assert_all_importable
EXPECTED_ALL = [ EXPECTED_ALL = [
"AcreomLoader", "AcreomLoader",
@ -173,4 +174,5 @@ EXPECTED_ALL = [
def test_all_imports() -> None: def test_all_imports() -> None:
assert set(__all__) == set(EXPECTED_ALL) assert set(document_loaders.__all__) == set(EXPECTED_ALL)
assert_all_importable(document_loaders)

View File

@ -1,4 +1,5 @@
from langchain.document_transformers import __all__ from langchain import document_transformers
from tests.unit_tests import assert_all_importable
EXPECTED_ALL = [ EXPECTED_ALL = [
"BeautifulSoupTransformer", "BeautifulSoupTransformer",
@ -17,4 +18,5 @@ EXPECTED_ALL = [
def test_all_imports() -> None: def test_all_imports() -> None:
assert set(__all__) == set(EXPECTED_ALL) assert set(document_transformers.__all__) == set(EXPECTED_ALL)
assert_all_importable(document_transformers)

View File

@ -1,4 +1,5 @@
from langchain.embeddings import __all__ from langchain import embeddings
from tests.unit_tests import assert_all_importable
EXPECTED_ALL = [ EXPECTED_ALL = [
"OpenAIEmbeddings", "OpenAIEmbeddings",
@ -58,4 +59,5 @@ EXPECTED_ALL = [
def test_all_imports() -> None: def test_all_imports() -> None:
assert set(__all__) == set(EXPECTED_ALL) assert set(embeddings.__all__) == set(EXPECTED_ALL)
assert_all_importable(embeddings)

View File

@ -1,4 +1,5 @@
from langchain.graphs import __all__ from langchain import graphs
from tests.unit_tests import assert_all_importable
EXPECTED_ALL = [ EXPECTED_ALL = [
"MemgraphGraph", "MemgraphGraph",
@ -15,4 +16,5 @@ EXPECTED_ALL = [
def test_all_imports() -> None: def test_all_imports() -> None:
assert set(__all__) == set(EXPECTED_ALL) assert set(graphs.__all__) == set(EXPECTED_ALL)
assert_all_importable(graphs)

View File

@ -0,0 +1,30 @@
from langchain.memory import chat_message_histories
from tests.unit_tests import assert_all_importable
EXPECTED_ALL = [
"AstraDBChatMessageHistory",
"ChatMessageHistory",
"CassandraChatMessageHistory",
"CosmosDBChatMessageHistory",
"DynamoDBChatMessageHistory",
"ElasticsearchChatMessageHistory",
"FileChatMessageHistory",
"FirestoreChatMessageHistory",
"MomentoChatMessageHistory",
"MongoDBChatMessageHistory",
"PostgresChatMessageHistory",
"RedisChatMessageHistory",
"RocksetChatMessageHistory",
"SQLChatMessageHistory",
"StreamlitChatMessageHistory",
"SingleStoreDBChatMessageHistory",
"XataChatMessageHistory",
"ZepChatMessageHistory",
"UpstashRedisChatMessageHistory",
"Neo4jChatMessageHistory",
]
def test_imports() -> None:
assert sorted(chat_message_histories.__all__) == sorted(EXPECTED_ALL)
assert_all_importable(chat_message_histories)

View File

@ -1,4 +1,5 @@
from langchain.memory import __all__ from langchain import memory
from tests.unit_tests import assert_all_importable
EXPECTED_ALL = [ EXPECTED_ALL = [
"AstraDBChatMessageHistory", "AstraDBChatMessageHistory",
@ -40,4 +41,5 @@ EXPECTED_ALL = [
def test_all_imports() -> None: def test_all_imports() -> None:
assert set(__all__) == set(EXPECTED_ALL) assert set(memory.__all__) == set(EXPECTED_ALL)
assert_all_importable(memory)

View File

@ -1,4 +1,5 @@
from langchain.output_parsers import __all__ from langchain import output_parsers
from tests.unit_tests import assert_all_importable
EXPECTED_ALL = [ EXPECTED_ALL = [
"BooleanOutputParser", "BooleanOutputParser",
@ -28,4 +29,5 @@ EXPECTED_ALL = [
def test_all_imports() -> None: def test_all_imports() -> None:
assert set(__all__) == set(EXPECTED_ALL) assert set(output_parsers.__all__) == set(EXPECTED_ALL)
assert_all_importable(output_parsers)

View File

@ -1,4 +1,5 @@
from langchain.prompts import __all__ from langchain import prompts
from tests.unit_tests import assert_all_importable
EXPECTED_ALL = [ EXPECTED_ALL = [
"AIMessagePromptTemplate", "AIMessagePromptTemplate",
@ -25,4 +26,5 @@ EXPECTED_ALL = [
def test_all_imports() -> None: def test_all_imports() -> None:
assert set(__all__) == set(EXPECTED_ALL) assert set(prompts.__all__) == set(EXPECTED_ALL)
assert_all_importable(prompts)

View File

@ -0,0 +1,54 @@
from langchain import retrievers
from tests.unit_tests import assert_all_importable
EXPECTED_ALL = [
"AmazonKendraRetriever",
"AmazonKnowledgeBasesRetriever",
"ArceeRetriever",
"ArxivRetriever",
"AzureCognitiveSearchRetriever",
"ChatGPTPluginRetriever",
"ContextualCompressionRetriever",
"ChaindeskRetriever",
"CohereRagRetriever",
"ElasticSearchBM25Retriever",
"EmbedchainRetriever",
"GoogleDocumentAIWarehouseRetriever",
"GoogleCloudEnterpriseSearchRetriever",
"GoogleVertexAIMultiTurnSearchRetriever",
"GoogleVertexAISearchRetriever",
"KayAiRetriever",
"KNNRetriever",
"LlamaIndexGraphRetriever",
"LlamaIndexRetriever",
"MergerRetriever",
"MetalRetriever",
"MilvusRetriever",
"MultiQueryRetriever",
"OutlineRetriever",
"PineconeHybridSearchRetriever",
"PubMedRetriever",
"RemoteLangChainRetriever",
"SVMRetriever",
"SelfQueryRetriever",
"TavilySearchAPIRetriever",
"TFIDFRetriever",
"BM25Retriever",
"TimeWeightedVectorStoreRetriever",
"VespaRetriever",
"WeaviateHybridSearchRetriever",
"WikipediaRetriever",
"ZepRetriever",
"ZillizRetriever",
"DocArrayRetriever",
"RePhraseQueryRetriever",
"WebResearchRetriever",
"EnsembleRetriever",
"ParentDocumentRetriever",
"MultiVectorRetriever",
]
def test_imports() -> None:
assert sorted(retrievers.__all__) == sorted(EXPECTED_ALL)
assert_all_importable(retrievers)

View File

@ -1,4 +1,5 @@
from langchain.smith import __all__ from langchain import smith
from tests.unit_tests import assert_all_importable
EXPECTED_ALL = [ EXPECTED_ALL = [
"arun_on_dataset", "arun_on_dataset",
@ -8,4 +9,5 @@ EXPECTED_ALL = [
def test_all_imports() -> None: def test_all_imports() -> None:
assert set(__all__) == set(EXPECTED_ALL) assert set(smith.__all__) == set(EXPECTED_ALL)
assert_all_importable(smith)

View File

@ -1,4 +1,5 @@
from langchain.storage import __all__ from langchain import storage
from tests.unit_tests import assert_all_importable
EXPECTED_ALL = [ EXPECTED_ALL = [
"EncoderBackedStore", "EncoderBackedStore",
@ -14,4 +15,5 @@ EXPECTED_ALL = [
def test_all_imports() -> None: def test_all_imports() -> None:
assert set(__all__) == set(EXPECTED_ALL) assert set(storage.__all__) == set(EXPECTED_ALL)
assert_all_importable(storage)

View File

@ -1,4 +1,5 @@
from langchain.tools import __all__ from langchain import tools
from tests.unit_tests import assert_all_importable
EXPECTED_ALL = [ EXPECTED_ALL = [
"AINAppOps", "AINAppOps",
@ -124,4 +125,5 @@ EXPECTED_ALL = [
def test_all_imports() -> None: def test_all_imports() -> None:
assert set(__all__) == set(EXPECTED_ALL) assert set(tools.__all__) == set(EXPECTED_ALL)
assert_all_importable(tools)

View File

@ -1,4 +1,5 @@
from langchain_community.utilities import __all__ from langchain import utilities
from tests.unit_tests import assert_all_importable
EXPECTED_ALL = [ EXPECTED_ALL = [
"AlphaVantageAPIWrapper", "AlphaVantageAPIWrapper",
@ -51,4 +52,5 @@ EXPECTED_ALL = [
def test_all_imports() -> None: def test_all_imports() -> None:
assert set(__all__) == set(EXPECTED_ALL) assert set(utilities.__all__) == set(EXPECTED_ALL)
assert_all_importable(utilities)

View File

@ -1,4 +1,5 @@
from langchain.utils import __all__ from langchain import utils
from tests.unit_tests import assert_all_importable
EXPECTED_ALL = [ EXPECTED_ALL = [
"StrictFormatter", "StrictFormatter",
@ -25,4 +26,5 @@ EXPECTED_ALL = [
def test_all_imports() -> None: def test_all_imports() -> None:
assert set(__all__) == set(EXPECTED_ALL) assert set(utils.__all__) == set(EXPECTED_ALL)
assert_all_importable(utils)