mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-28 17:38:36 +00:00
parent
af50f21765
commit
9c8523b529
@ -15,48 +15,30 @@
|
||||
Document
|
||||
""" # noqa: E501
|
||||
|
||||
from langchain_community.document_transformers.beautiful_soup_transformer import (
|
||||
BeautifulSoupTransformer,
|
||||
)
|
||||
from langchain_community.document_transformers.doctran_text_extract import (
|
||||
DoctranPropertyExtractor,
|
||||
)
|
||||
from langchain_community.document_transformers.doctran_text_qa import (
|
||||
DoctranQATransformer,
|
||||
)
|
||||
from langchain_community.document_transformers.doctran_text_translate import (
|
||||
DoctranTextTranslator,
|
||||
)
|
||||
from langchain_community.document_transformers.embeddings_redundant_filter import (
|
||||
EmbeddingsClusteringFilter,
|
||||
EmbeddingsRedundantFilter,
|
||||
get_stateful_documents,
|
||||
)
|
||||
from langchain_community.document_transformers.google_translate import (
|
||||
GoogleTranslateTransformer,
|
||||
)
|
||||
from langchain_community.document_transformers.html2text import Html2TextTransformer
|
||||
from langchain_community.document_transformers.long_context_reorder import (
|
||||
LongContextReorder,
|
||||
)
|
||||
from langchain_community.document_transformers.nuclia_text_transform import (
|
||||
NucliaTextTransformer,
|
||||
)
|
||||
from langchain_community.document_transformers.openai_functions import (
|
||||
OpenAIMetadataTagger,
|
||||
)
|
||||
import importlib
|
||||
from typing import Any
|
||||
|
||||
__all__ = [
|
||||
"BeautifulSoupTransformer",
|
||||
"DoctranQATransformer",
|
||||
"DoctranTextTranslator",
|
||||
"DoctranPropertyExtractor",
|
||||
"EmbeddingsClusteringFilter",
|
||||
"EmbeddingsRedundantFilter",
|
||||
"GoogleTranslateTransformer",
|
||||
"get_stateful_documents",
|
||||
"LongContextReorder",
|
||||
"NucliaTextTransformer",
|
||||
"OpenAIMetadataTagger",
|
||||
"Html2TextTransformer",
|
||||
]
|
||||
_module_lookup = {
|
||||
"BeautifulSoupTransformer": "langchain_community.document_transformers.beautiful_soup_transformer", # noqa: E501
|
||||
"DoctranPropertyExtractor": "langchain_community.document_transformers.doctran_text_extract", # noqa: E501
|
||||
"DoctranQATransformer": "langchain_community.document_transformers.doctran_text_qa",
|
||||
"DoctranTextTranslator": "langchain_community.document_transformers.doctran_text_translate", # noqa: E501
|
||||
"EmbeddingsClusteringFilter": "langchain_community.document_transformers.embeddings_redundant_filter", # noqa: E501
|
||||
"EmbeddingsRedundantFilter": "langchain_community.document_transformers.embeddings_redundant_filter", # noqa: E501
|
||||
"GoogleTranslateTransformer": "langchain_community.document_transformers.google_translate", # noqa: E501
|
||||
"Html2TextTransformer": "langchain_community.document_transformers.html2text",
|
||||
"LongContextReorder": "langchain_community.document_transformers.long_context_reorder", # noqa: E501
|
||||
"NucliaTextTransformer": "langchain_community.document_transformers.nuclia_text_transform", # noqa: E501
|
||||
"OpenAIMetadataTagger": "langchain_community.document_transformers.openai_functions", # noqa: E501
|
||||
"get_stateful_documents": "langchain_community.document_transformers.embeddings_redundant_filter", # noqa: E501
|
||||
}
|
||||
|
||||
|
||||
def __getattr__(name: str) -> Any:
|
||||
if name in _module_lookup:
|
||||
module = importlib.import_module(_module_lookup[name])
|
||||
return getattr(module, name)
|
||||
raise AttributeError(f"module {__name__} has no attribute {name}")
|
||||
|
||||
|
||||
__all__ = list(_module_lookup.keys())
|
||||
|
@ -11,157 +11,88 @@ from different APIs and services.
|
||||
"""
|
||||
|
||||
|
||||
import importlib
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from langchain_community.embeddings.aleph_alpha import (
|
||||
AlephAlphaAsymmetricSemanticEmbedding,
|
||||
AlephAlphaSymmetricSemanticEmbedding,
|
||||
)
|
||||
from langchain_community.embeddings.awa import AwaEmbeddings
|
||||
from langchain_community.embeddings.azure_openai import AzureOpenAIEmbeddings
|
||||
from langchain_community.embeddings.baichuan import BaichuanTextEmbeddings
|
||||
from langchain_community.embeddings.baidu_qianfan_endpoint import (
|
||||
QianfanEmbeddingsEndpoint,
|
||||
)
|
||||
from langchain_community.embeddings.bedrock import BedrockEmbeddings
|
||||
from langchain_community.embeddings.bookend import BookendEmbeddings
|
||||
from langchain_community.embeddings.clarifai import ClarifaiEmbeddings
|
||||
from langchain_community.embeddings.cohere import CohereEmbeddings
|
||||
from langchain_community.embeddings.dashscope import DashScopeEmbeddings
|
||||
from langchain_community.embeddings.databricks import DatabricksEmbeddings
|
||||
from langchain_community.embeddings.deepinfra import DeepInfraEmbeddings
|
||||
from langchain_community.embeddings.edenai import EdenAiEmbeddings
|
||||
from langchain_community.embeddings.elasticsearch import ElasticsearchEmbeddings
|
||||
from langchain_community.embeddings.embaas import EmbaasEmbeddings
|
||||
from langchain_community.embeddings.ernie import ErnieEmbeddings
|
||||
from langchain_community.embeddings.fake import (
|
||||
DeterministicFakeEmbedding,
|
||||
FakeEmbeddings,
|
||||
)
|
||||
from langchain_community.embeddings.fastembed import FastEmbedEmbeddings
|
||||
from langchain_community.embeddings.google_palm import GooglePalmEmbeddings
|
||||
from langchain_community.embeddings.gpt4all import GPT4AllEmbeddings
|
||||
from langchain_community.embeddings.gradient_ai import GradientEmbeddings
|
||||
from langchain_community.embeddings.huggingface import (
|
||||
HuggingFaceBgeEmbeddings,
|
||||
HuggingFaceEmbeddings,
|
||||
HuggingFaceInferenceAPIEmbeddings,
|
||||
HuggingFaceInstructEmbeddings,
|
||||
)
|
||||
from langchain_community.embeddings.huggingface_hub import HuggingFaceHubEmbeddings
|
||||
from langchain_community.embeddings.infinity import InfinityEmbeddings
|
||||
from langchain_community.embeddings.infinity_local import InfinityEmbeddingsLocal
|
||||
from langchain_community.embeddings.javelin_ai_gateway import JavelinAIGatewayEmbeddings
|
||||
from langchain_community.embeddings.jina import JinaEmbeddings
|
||||
from langchain_community.embeddings.johnsnowlabs import JohnSnowLabsEmbeddings
|
||||
from langchain_community.embeddings.laser import LaserEmbeddings
|
||||
from langchain_community.embeddings.llamacpp import LlamaCppEmbeddings
|
||||
from langchain_community.embeddings.llamafile import LlamafileEmbeddings
|
||||
from langchain_community.embeddings.llm_rails import LLMRailsEmbeddings
|
||||
from langchain_community.embeddings.localai import LocalAIEmbeddings
|
||||
from langchain_community.embeddings.minimax import MiniMaxEmbeddings
|
||||
from langchain_community.embeddings.mlflow import (
|
||||
MlflowCohereEmbeddings,
|
||||
MlflowEmbeddings,
|
||||
)
|
||||
from langchain_community.embeddings.mlflow_gateway import MlflowAIGatewayEmbeddings
|
||||
from langchain_community.embeddings.modelscope_hub import ModelScopeEmbeddings
|
||||
from langchain_community.embeddings.mosaicml import MosaicMLInstructorEmbeddings
|
||||
from langchain_community.embeddings.nemo import NeMoEmbeddings
|
||||
from langchain_community.embeddings.nlpcloud import NLPCloudEmbeddings
|
||||
from langchain_community.embeddings.oci_generative_ai import OCIGenAIEmbeddings
|
||||
from langchain_community.embeddings.octoai_embeddings import OctoAIEmbeddings
|
||||
from langchain_community.embeddings.ollama import OllamaEmbeddings
|
||||
from langchain_community.embeddings.openai import OpenAIEmbeddings
|
||||
from langchain_community.embeddings.optimum_intel import QuantizedBiEncoderEmbeddings
|
||||
from langchain_community.embeddings.sagemaker_endpoint import (
|
||||
SagemakerEndpointEmbeddings,
|
||||
)
|
||||
from langchain_community.embeddings.self_hosted import SelfHostedEmbeddings
|
||||
from langchain_community.embeddings.self_hosted_hugging_face import (
|
||||
SelfHostedHuggingFaceEmbeddings,
|
||||
SelfHostedHuggingFaceInstructEmbeddings,
|
||||
)
|
||||
from langchain_community.embeddings.sentence_transformer import (
|
||||
SentenceTransformerEmbeddings,
|
||||
)
|
||||
from langchain_community.embeddings.spacy_embeddings import SpacyEmbeddings
|
||||
from langchain_community.embeddings.sparkllm import SparkLLMTextEmbeddings
|
||||
from langchain_community.embeddings.tensorflow_hub import TensorflowHubEmbeddings
|
||||
from langchain_community.embeddings.vertexai import VertexAIEmbeddings
|
||||
from langchain_community.embeddings.volcengine import VolcanoEmbeddings
|
||||
from langchain_community.embeddings.voyageai import VoyageEmbeddings
|
||||
from langchain_community.embeddings.xinference import XinferenceEmbeddings
|
||||
_module_lookup = {
|
||||
"AlephAlphaAsymmetricSemanticEmbedding": "langchain_community.embeddings.aleph_alpha", # noqa: E501
|
||||
"AlephAlphaSymmetricSemanticEmbedding": "langchain_community.embeddings.aleph_alpha", # noqa: E501
|
||||
"AwaEmbeddings": "langchain_community.embeddings.awa",
|
||||
"AzureOpenAIEmbeddings": "langchain_community.embeddings.azure_openai",
|
||||
"BaichuanTextEmbeddings": "langchain_community.embeddings.baichuan",
|
||||
"BedrockEmbeddings": "langchain_community.embeddings.bedrock",
|
||||
"BookendEmbeddings": "langchain_community.embeddings.bookend",
|
||||
"ClarifaiEmbeddings": "langchain_community.embeddings.clarifai",
|
||||
"CohereEmbeddings": "langchain_community.embeddings.cohere",
|
||||
"DashScopeEmbeddings": "langchain_community.embeddings.dashscope",
|
||||
"DatabricksEmbeddings": "langchain_community.embeddings.databricks",
|
||||
"DeepInfraEmbeddings": "langchain_community.embeddings.deepinfra",
|
||||
"DeterministicFakeEmbedding": "langchain_community.embeddings.fake",
|
||||
"EdenAiEmbeddings": "langchain_community.embeddings.edenai",
|
||||
"ElasticsearchEmbeddings": "langchain_community.embeddings.elasticsearch",
|
||||
"EmbaasEmbeddings": "langchain_community.embeddings.embaas",
|
||||
"ErnieEmbeddings": "langchain_community.embeddings.ernie",
|
||||
"FakeEmbeddings": "langchain_community.embeddings.fake",
|
||||
"FastEmbedEmbeddings": "langchain_community.embeddings.fastembed",
|
||||
"GPT4AllEmbeddings": "langchain_community.embeddings.gpt4all",
|
||||
"GooglePalmEmbeddings": "langchain_community.embeddings.google_palm",
|
||||
"GradientEmbeddings": "langchain_community.embeddings.gradient_ai",
|
||||
"HuggingFaceBgeEmbeddings": "langchain_community.embeddings.huggingface",
|
||||
"HuggingFaceEmbeddings": "langchain_community.embeddings.huggingface",
|
||||
"HuggingFaceHubEmbeddings": "langchain_community.embeddings.huggingface_hub",
|
||||
"HuggingFaceInferenceAPIEmbeddings": "langchain_community.embeddings.huggingface",
|
||||
"HuggingFaceInstructEmbeddings": "langchain_community.embeddings.huggingface",
|
||||
"InfinityEmbeddings": "langchain_community.embeddings.infinity",
|
||||
"InfinityEmbeddingsLocal": "langchain_community.embeddings.infinity_local",
|
||||
"JavelinAIGatewayEmbeddings": "langchain_community.embeddings.javelin_ai_gateway",
|
||||
"JinaEmbeddings": "langchain_community.embeddings.jina",
|
||||
"JohnSnowLabsEmbeddings": "langchain_community.embeddings.johnsnowlabs",
|
||||
"LLMRailsEmbeddings": "langchain_community.embeddings.llm_rails",
|
||||
"LaserEmbeddings": "langchain_community.embeddings.laser",
|
||||
"LlamaCppEmbeddings": "langchain_community.embeddings.llamacpp",
|
||||
"LlamafileEmbeddings": "langchain_community.embeddings.llamafile",
|
||||
"LocalAIEmbeddings": "langchain_community.embeddings.localai",
|
||||
"MiniMaxEmbeddings": "langchain_community.embeddings.minimax",
|
||||
"MlflowAIGatewayEmbeddings": "langchain_community.embeddings.mlflow_gateway",
|
||||
"MlflowCohereEmbeddings": "langchain_community.embeddings.mlflow",
|
||||
"MlflowEmbeddings": "langchain_community.embeddings.mlflow",
|
||||
"ModelScopeEmbeddings": "langchain_community.embeddings.modelscope_hub",
|
||||
"MosaicMLInstructorEmbeddings": "langchain_community.embeddings.mosaicml",
|
||||
"NLPCloudEmbeddings": "langchain_community.embeddings.nlpcloud",
|
||||
"NeMoEmbeddings": "langchain_community.embeddings.nemo",
|
||||
"OCIGenAIEmbeddings": "langchain_community.embeddings.oci_generative_ai",
|
||||
"OctoAIEmbeddings": "langchain_community.embeddings.octoai_embeddings",
|
||||
"OllamaEmbeddings": "langchain_community.embeddings.ollama",
|
||||
"OpenAIEmbeddings": "langchain_community.embeddings.openai",
|
||||
"QianfanEmbeddingsEndpoint": "langchain_community.embeddings.baidu_qianfan_endpoint", # noqa: E501
|
||||
"QuantizedBiEncoderEmbeddings": "langchain_community.embeddings.optimum_intel",
|
||||
"SagemakerEndpointEmbeddings": "langchain_community.embeddings.sagemaker_endpoint",
|
||||
"SelfHostedEmbeddings": "langchain_community.embeddings.self_hosted",
|
||||
"SelfHostedHuggingFaceEmbeddings": "langchain_community.embeddings.self_hosted_hugging_face", # noqa: E501
|
||||
"SelfHostedHuggingFaceInstructEmbeddings": "langchain_community.embeddings.self_hosted_hugging_face", # noqa: E501
|
||||
"SentenceTransformerEmbeddings": "langchain_community.embeddings.sentence_transformer", # noqa: E501
|
||||
"SpacyEmbeddings": "langchain_community.embeddings.spacy_embeddings",
|
||||
"SparkLLMTextEmbeddings": "langchain_community.embeddings.sparkllm",
|
||||
"TensorflowHubEmbeddings": "langchain_community.embeddings.tensorflow_hub",
|
||||
"VertexAIEmbeddings": "langchain_community.embeddings.vertexai",
|
||||
"VolcanoEmbeddings": "langchain_community.embeddings.volcengine",
|
||||
"VoyageEmbeddings": "langchain_community.embeddings.voyageai",
|
||||
"XinferenceEmbeddings": "langchain_community.embeddings.xinference",
|
||||
}
|
||||
|
||||
|
||||
def __getattr__(name: str) -> Any:
|
||||
if name in _module_lookup:
|
||||
module = importlib.import_module(_module_lookup[name])
|
||||
return getattr(module, name)
|
||||
raise AttributeError(f"module {__name__} has no attribute {name}")
|
||||
|
||||
|
||||
__all__ = list(_module_lookup.keys())
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
__all__ = [
|
||||
"OpenAIEmbeddings",
|
||||
"AzureOpenAIEmbeddings",
|
||||
"BaichuanTextEmbeddings",
|
||||
"ClarifaiEmbeddings",
|
||||
"CohereEmbeddings",
|
||||
"DatabricksEmbeddings",
|
||||
"ElasticsearchEmbeddings",
|
||||
"FastEmbedEmbeddings",
|
||||
"HuggingFaceEmbeddings",
|
||||
"HuggingFaceInferenceAPIEmbeddings",
|
||||
"InfinityEmbeddings",
|
||||
"InfinityEmbeddingsLocal",
|
||||
"GradientEmbeddings",
|
||||
"JinaEmbeddings",
|
||||
"LaserEmbeddings",
|
||||
"LlamaCppEmbeddings",
|
||||
"LlamafileEmbeddings",
|
||||
"LLMRailsEmbeddings",
|
||||
"HuggingFaceHubEmbeddings",
|
||||
"MlflowEmbeddings",
|
||||
"MlflowCohereEmbeddings",
|
||||
"MlflowAIGatewayEmbeddings",
|
||||
"ModelScopeEmbeddings",
|
||||
"TensorflowHubEmbeddings",
|
||||
"SagemakerEndpointEmbeddings",
|
||||
"HuggingFaceInstructEmbeddings",
|
||||
"MosaicMLInstructorEmbeddings",
|
||||
"SelfHostedEmbeddings",
|
||||
"SelfHostedHuggingFaceEmbeddings",
|
||||
"SelfHostedHuggingFaceInstructEmbeddings",
|
||||
"FakeEmbeddings",
|
||||
"DeterministicFakeEmbedding",
|
||||
"AlephAlphaAsymmetricSemanticEmbedding",
|
||||
"AlephAlphaSymmetricSemanticEmbedding",
|
||||
"SentenceTransformerEmbeddings",
|
||||
"GooglePalmEmbeddings",
|
||||
"MiniMaxEmbeddings",
|
||||
"VertexAIEmbeddings",
|
||||
"BedrockEmbeddings",
|
||||
"DeepInfraEmbeddings",
|
||||
"EdenAiEmbeddings",
|
||||
"DashScopeEmbeddings",
|
||||
"EmbaasEmbeddings",
|
||||
"OctoAIEmbeddings",
|
||||
"SpacyEmbeddings",
|
||||
"NLPCloudEmbeddings",
|
||||
"GPT4AllEmbeddings",
|
||||
"XinferenceEmbeddings",
|
||||
"LocalAIEmbeddings",
|
||||
"AwaEmbeddings",
|
||||
"HuggingFaceBgeEmbeddings",
|
||||
"ErnieEmbeddings",
|
||||
"JavelinAIGatewayEmbeddings",
|
||||
"OllamaEmbeddings",
|
||||
"QianfanEmbeddingsEndpoint",
|
||||
"JohnSnowLabsEmbeddings",
|
||||
"VoyageEmbeddings",
|
||||
"BookendEmbeddings",
|
||||
"VolcanoEmbeddings",
|
||||
"OCIGenAIEmbeddings",
|
||||
"QuantizedBiEncoderEmbeddings",
|
||||
"NeMoEmbeddings",
|
||||
"SparkLLMTextEmbeddings",
|
||||
]
|
||||
|
||||
|
||||
# TODO: this is in here to maintain backwards compatibility
|
||||
class HypotheticalDocumentEmbedder:
|
||||
|
@ -1,33 +1,31 @@
|
||||
"""**Graphs** provide a natural language interface to graph databases."""
|
||||
|
||||
from langchain_community.graphs.arangodb_graph import ArangoGraph
|
||||
from langchain_community.graphs.falkordb_graph import FalkorDBGraph
|
||||
from langchain_community.graphs.gremlin_graph import GremlinGraph
|
||||
from langchain_community.graphs.hugegraph import HugeGraph
|
||||
from langchain_community.graphs.kuzu_graph import KuzuGraph
|
||||
from langchain_community.graphs.memgraph_graph import MemgraphGraph
|
||||
from langchain_community.graphs.nebula_graph import NebulaGraph
|
||||
from langchain_community.graphs.neo4j_graph import Neo4jGraph
|
||||
from langchain_community.graphs.neptune_graph import NeptuneGraph
|
||||
from langchain_community.graphs.neptune_rdf_graph import NeptuneRdfGraph
|
||||
from langchain_community.graphs.networkx_graph import NetworkxEntityGraph
|
||||
from langchain_community.graphs.ontotext_graphdb_graph import OntotextGraphDBGraph
|
||||
from langchain_community.graphs.rdf_graph import RdfGraph
|
||||
from langchain_community.graphs.tigergraph_graph import TigerGraph
|
||||
import importlib
|
||||
from typing import Any
|
||||
|
||||
__all__ = [
|
||||
"MemgraphGraph",
|
||||
"NetworkxEntityGraph",
|
||||
"Neo4jGraph",
|
||||
"NebulaGraph",
|
||||
"NeptuneGraph",
|
||||
"NeptuneRdfGraph",
|
||||
"KuzuGraph",
|
||||
"HugeGraph",
|
||||
"RdfGraph",
|
||||
"ArangoGraph",
|
||||
"FalkorDBGraph",
|
||||
"TigerGraph",
|
||||
"OntotextGraphDBGraph",
|
||||
"GremlinGraph",
|
||||
]
|
||||
_module_lookup = {
|
||||
"ArangoGraph": "langchain_community.graphs.arangodb_graph",
|
||||
"FalkorDBGraph": "langchain_community.graphs.falkordb_graph",
|
||||
"GremlinGraph": "langchain_community.graphs.gremlin_graph",
|
||||
"HugeGraph": "langchain_community.graphs.hugegraph",
|
||||
"KuzuGraph": "langchain_community.graphs.kuzu_graph",
|
||||
"MemgraphGraph": "langchain_community.graphs.memgraph_graph",
|
||||
"NebulaGraph": "langchain_community.graphs.nebula_graph",
|
||||
"Neo4jGraph": "langchain_community.graphs.neo4j_graph",
|
||||
"NeptuneGraph": "langchain_community.graphs.neptune_graph",
|
||||
"NeptuneRdfGraph": "langchain_community.graphs.neptune_rdf_graph",
|
||||
"NetworkxEntityGraph": "langchain_community.graphs.networkx_graph",
|
||||
"OntotextGraphDBGraph": "langchain_community.graphs.ontotext_graphdb_graph",
|
||||
"RdfGraph": "langchain_community.graphs.rdf_graph",
|
||||
"TigerGraph": "langchain_community.graphs.tigergraph_graph",
|
||||
}
|
||||
|
||||
|
||||
def __getattr__(name: str) -> Any:
|
||||
if name in _module_lookup:
|
||||
module = importlib.import_module(_module_lookup[name])
|
||||
return getattr(module, name)
|
||||
raise AttributeError(f"module {__name__} has no attribute {name}")
|
||||
|
||||
|
||||
__all__ = list(_module_lookup.keys())
|
||||
|
@ -18,98 +18,55 @@ the backbone of a retriever, but there are other types of retrievers as well.
|
||||
CallbackManagerForRetrieverRun, AsyncCallbackManagerForRetrieverRun
|
||||
"""
|
||||
|
||||
from langchain_community.retrievers.arcee import ArceeRetriever
|
||||
from langchain_community.retrievers.arxiv import ArxivRetriever
|
||||
from langchain_community.retrievers.azure_cognitive_search import (
|
||||
AzureCognitiveSearchRetriever,
|
||||
)
|
||||
from langchain_community.retrievers.bedrock import AmazonKnowledgeBasesRetriever
|
||||
from langchain_community.retrievers.bm25 import BM25Retriever
|
||||
from langchain_community.retrievers.breebs import BreebsRetriever
|
||||
from langchain_community.retrievers.chaindesk import ChaindeskRetriever
|
||||
from langchain_community.retrievers.chatgpt_plugin_retriever import (
|
||||
ChatGPTPluginRetriever,
|
||||
)
|
||||
from langchain_community.retrievers.cohere_rag_retriever import CohereRagRetriever
|
||||
from langchain_community.retrievers.docarray import DocArrayRetriever
|
||||
from langchain_community.retrievers.elastic_search_bm25 import (
|
||||
ElasticSearchBM25Retriever,
|
||||
)
|
||||
from langchain_community.retrievers.embedchain import EmbedchainRetriever
|
||||
from langchain_community.retrievers.google_cloud_documentai_warehouse import (
|
||||
GoogleDocumentAIWarehouseRetriever,
|
||||
)
|
||||
from langchain_community.retrievers.google_vertex_ai_search import (
|
||||
GoogleCloudEnterpriseSearchRetriever,
|
||||
GoogleVertexAIMultiTurnSearchRetriever,
|
||||
GoogleVertexAISearchRetriever,
|
||||
)
|
||||
from langchain_community.retrievers.kay import KayAiRetriever
|
||||
from langchain_community.retrievers.kendra import AmazonKendraRetriever
|
||||
from langchain_community.retrievers.knn import KNNRetriever
|
||||
from langchain_community.retrievers.llama_index import (
|
||||
LlamaIndexGraphRetriever,
|
||||
LlamaIndexRetriever,
|
||||
)
|
||||
from langchain_community.retrievers.metal import MetalRetriever
|
||||
from langchain_community.retrievers.milvus import MilvusRetriever
|
||||
from langchain_community.retrievers.outline import OutlineRetriever
|
||||
from langchain_community.retrievers.pinecone_hybrid_search import (
|
||||
PineconeHybridSearchRetriever,
|
||||
)
|
||||
from langchain_community.retrievers.pubmed import PubMedRetriever
|
||||
from langchain_community.retrievers.qdrant_sparse_vector_retriever import (
|
||||
QdrantSparseVectorRetriever,
|
||||
)
|
||||
from langchain_community.retrievers.remote_retriever import RemoteLangChainRetriever
|
||||
from langchain_community.retrievers.svm import SVMRetriever
|
||||
from langchain_community.retrievers.tavily_search_api import TavilySearchAPIRetriever
|
||||
from langchain_community.retrievers.tfidf import TFIDFRetriever
|
||||
from langchain_community.retrievers.vespa_retriever import VespaRetriever
|
||||
from langchain_community.retrievers.weaviate_hybrid_search import (
|
||||
WeaviateHybridSearchRetriever,
|
||||
)
|
||||
from langchain_community.retrievers.wikipedia import WikipediaRetriever
|
||||
from langchain_community.retrievers.you import YouRetriever
|
||||
from langchain_community.retrievers.zep import ZepRetriever
|
||||
from langchain_community.retrievers.zilliz import ZillizRetriever
|
||||
import importlib
|
||||
from typing import Any
|
||||
|
||||
__all__ = [
|
||||
"AmazonKendraRetriever",
|
||||
"AmazonKnowledgeBasesRetriever",
|
||||
"ArceeRetriever",
|
||||
"ArxivRetriever",
|
||||
"AzureCognitiveSearchRetriever",
|
||||
"BM25Retriever",
|
||||
"BreebsRetriever",
|
||||
"ChatGPTPluginRetriever",
|
||||
"ChaindeskRetriever",
|
||||
"CohereRagRetriever",
|
||||
"ElasticSearchBM25Retriever",
|
||||
"EmbedchainRetriever",
|
||||
"GoogleDocumentAIWarehouseRetriever",
|
||||
"GoogleCloudEnterpriseSearchRetriever",
|
||||
"GoogleVertexAIMultiTurnSearchRetriever",
|
||||
"GoogleVertexAISearchRetriever",
|
||||
"KayAiRetriever",
|
||||
"KNNRetriever",
|
||||
"LlamaIndexGraphRetriever",
|
||||
"LlamaIndexRetriever",
|
||||
"MetalRetriever",
|
||||
"MilvusRetriever",
|
||||
"OutlineRetriever",
|
||||
"PineconeHybridSearchRetriever",
|
||||
"PubMedRetriever",
|
||||
"QdrantSparseVectorRetriever",
|
||||
"RemoteLangChainRetriever",
|
||||
"SVMRetriever",
|
||||
"TavilySearchAPIRetriever",
|
||||
"TFIDFRetriever",
|
||||
"VespaRetriever",
|
||||
"WeaviateHybridSearchRetriever",
|
||||
"WikipediaRetriever",
|
||||
"YouRetriever",
|
||||
"ZepRetriever",
|
||||
"ZillizRetriever",
|
||||
"DocArrayRetriever",
|
||||
]
|
||||
_module_lookup = {
|
||||
"AmazonKendraRetriever": "langchain_community.retrievers.kendra",
|
||||
"AmazonKnowledgeBasesRetriever": "langchain_community.retrievers.bedrock",
|
||||
"ArceeRetriever": "langchain_community.retrievers.arcee",
|
||||
"ArxivRetriever": "langchain_community.retrievers.arxiv",
|
||||
"AzureCognitiveSearchRetriever": "langchain_community.retrievers.azure_cognitive_search", # noqa: E501
|
||||
"BM25Retriever": "langchain_community.retrievers.bm25",
|
||||
"BreebsRetriever": "langchain_community.retrievers.breebs",
|
||||
"ChaindeskRetriever": "langchain_community.retrievers.chaindesk",
|
||||
"ChatGPTPluginRetriever": "langchain_community.retrievers.chatgpt_plugin_retriever",
|
||||
"CohereRagRetriever": "langchain_community.retrievers.cohere_rag_retriever",
|
||||
"DocArrayRetriever": "langchain_community.retrievers.docarray",
|
||||
"ElasticSearchBM25Retriever": "langchain_community.retrievers.elastic_search_bm25",
|
||||
"EmbedchainRetriever": "langchain_community.retrievers.embedchain",
|
||||
"GoogleCloudEnterpriseSearchRetriever": "langchain_community.retrievers.google_vertex_ai_search", # noqa: E501
|
||||
"GoogleDocumentAIWarehouseRetriever": "langchain_community.retrievers.google_cloud_documentai_warehouse", # noqa: E501
|
||||
"GoogleVertexAIMultiTurnSearchRetriever": "langchain_community.retrievers.google_vertex_ai_search", # noqa: E501
|
||||
"GoogleVertexAISearchRetriever": "langchain_community.retrievers.google_vertex_ai_search", # noqa: E501
|
||||
"KNNRetriever": "langchain_community.retrievers.knn",
|
||||
"KayAiRetriever": "langchain_community.retrievers.kay",
|
||||
"LlamaIndexGraphRetriever": "langchain_community.retrievers.llama_index",
|
||||
"LlamaIndexRetriever": "langchain_community.retrievers.llama_index",
|
||||
"MetalRetriever": "langchain_community.retrievers.metal",
|
||||
"MilvusRetriever": "langchain_community.retrievers.milvus",
|
||||
"OutlineRetriever": "langchain_community.retrievers.outline",
|
||||
"PineconeHybridSearchRetriever": "langchain_community.retrievers.pinecone_hybrid_search", # noqa: E501
|
||||
"PubMedRetriever": "langchain_community.retrievers.pubmed",
|
||||
"QdrantSparseVectorRetriever": "langchain_community.retrievers.qdrant_sparse_vector_retriever", # noqa: E501
|
||||
"RemoteLangChainRetriever": "langchain_community.retrievers.remote_retriever",
|
||||
"SVMRetriever": "langchain_community.retrievers.svm",
|
||||
"TFIDFRetriever": "langchain_community.retrievers.tfidf",
|
||||
"TavilySearchAPIRetriever": "langchain_community.retrievers.tavily_search_api",
|
||||
"VespaRetriever": "langchain_community.retrievers.vespa_retriever",
|
||||
"WeaviateHybridSearchRetriever": "langchain_community.retrievers.weaviate_hybrid_search", # noqa: E501
|
||||
"WikipediaRetriever": "langchain_community.retrievers.wikipedia",
|
||||
"YouRetriever": "langchain_community.retrievers.you",
|
||||
"ZepRetriever": "langchain_community.retrievers.zep",
|
||||
"ZillizRetriever": "langchain_community.retrievers.zilliz",
|
||||
}
|
||||
|
||||
|
||||
def __getattr__(name: str) -> Any:
|
||||
if name in _module_lookup:
|
||||
module = importlib.import_module(_module_lookup[name])
|
||||
return getattr(module, name)
|
||||
raise AttributeError(f"module {__name__} has no attribute {name}")
|
||||
|
||||
|
||||
__all__ = list(_module_lookup.keys())
|
||||
|
@ -14,22 +14,24 @@ The primary goal of these storages is to support caching.
|
||||
|
||||
""" # noqa: E501
|
||||
|
||||
from langchain_community.storage.astradb import (
|
||||
AstraDBByteStore,
|
||||
AstraDBStore,
|
||||
)
|
||||
from langchain_community.storage.mongodb import MongoDBStore
|
||||
from langchain_community.storage.redis import RedisStore
|
||||
from langchain_community.storage.upstash_redis import (
|
||||
UpstashRedisByteStore,
|
||||
UpstashRedisStore,
|
||||
)
|
||||
import importlib
|
||||
from typing import Any
|
||||
|
||||
__all__ = [
|
||||
"AstraDBStore",
|
||||
"AstraDBByteStore",
|
||||
"MongoDBStore",
|
||||
"RedisStore",
|
||||
"UpstashRedisByteStore",
|
||||
"UpstashRedisStore",
|
||||
]
|
||||
_module_lookup = {
|
||||
"AstraDBByteStore": "langchain_community.storage.astradb",
|
||||
"AstraDBStore": "langchain_community.storage.astradb",
|
||||
"MongoDBStore": "langchain_community.storage.mongodb",
|
||||
"RedisStore": "langchain_community.storage.redis",
|
||||
"UpstashRedisByteStore": "langchain_community.storage.upstash_redis",
|
||||
"UpstashRedisStore": "langchain_community.storage.upstash_redis",
|
||||
}
|
||||
|
||||
|
||||
def __getattr__(name: str) -> Any:
|
||||
if name in _module_lookup:
|
||||
module = importlib.import_module(_module_lookup[name])
|
||||
return getattr(module, name)
|
||||
raise AttributeError(f"module {__name__} has no attribute {name}")
|
||||
|
||||
|
||||
__all__ = list(_module_lookup.keys())
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -3,456 +3,72 @@
|
||||
Other LangChain classes use **Utilities** to interact with third-part systems
|
||||
and packages.
|
||||
"""
|
||||
import importlib
|
||||
from typing import Any
|
||||
|
||||
from langchain_community.utilities.requests import (
|
||||
Requests,
|
||||
RequestsWrapper,
|
||||
TextRequestsWrapper,
|
||||
)
|
||||
|
||||
|
||||
def _import_alpha_vantage() -> Any:
|
||||
from langchain_community.utilities.alpha_vantage import AlphaVantageAPIWrapper
|
||||
|
||||
return AlphaVantageAPIWrapper
|
||||
|
||||
|
||||
def _import_apify() -> Any:
|
||||
from langchain_community.utilities.apify import ApifyWrapper
|
||||
|
||||
return ApifyWrapper
|
||||
|
||||
|
||||
def _import_arcee() -> Any:
|
||||
from langchain_community.utilities.arcee import ArceeWrapper
|
||||
|
||||
return ArceeWrapper
|
||||
|
||||
|
||||
def _import_arxiv() -> Any:
|
||||
from langchain_community.utilities.arxiv import ArxivAPIWrapper
|
||||
|
||||
return ArxivAPIWrapper
|
||||
|
||||
|
||||
def _import_awslambda() -> Any:
|
||||
from langchain_community.utilities.awslambda import LambdaWrapper
|
||||
|
||||
return LambdaWrapper
|
||||
|
||||
|
||||
def _import_bibtex() -> Any:
|
||||
from langchain_community.utilities.bibtex import BibtexparserWrapper
|
||||
|
||||
return BibtexparserWrapper
|
||||
|
||||
|
||||
def _import_bing_search() -> Any:
|
||||
from langchain_community.utilities.bing_search import BingSearchAPIWrapper
|
||||
|
||||
return BingSearchAPIWrapper
|
||||
|
||||
|
||||
def _import_brave_search() -> Any:
|
||||
from langchain_community.utilities.brave_search import BraveSearchWrapper
|
||||
|
||||
return BraveSearchWrapper
|
||||
|
||||
|
||||
def _import_duckduckgo_search() -> Any:
|
||||
from langchain_community.utilities.duckduckgo_search import (
|
||||
DuckDuckGoSearchAPIWrapper,
|
||||
)
|
||||
|
||||
return DuckDuckGoSearchAPIWrapper
|
||||
|
||||
|
||||
def _import_golden_query() -> Any:
|
||||
from langchain_community.utilities.golden_query import GoldenQueryAPIWrapper
|
||||
|
||||
return GoldenQueryAPIWrapper
|
||||
|
||||
|
||||
def _import_google_lens() -> Any:
|
||||
from langchain_community.utilities.google_lens import GoogleLensAPIWrapper
|
||||
|
||||
return GoogleLensAPIWrapper
|
||||
|
||||
|
||||
def _import_google_places_api() -> Any:
|
||||
from langchain_community.utilities.google_places_api import GooglePlacesAPIWrapper
|
||||
|
||||
return GooglePlacesAPIWrapper
|
||||
|
||||
|
||||
def _import_google_jobs() -> Any:
|
||||
from langchain_community.utilities.google_jobs import GoogleJobsAPIWrapper
|
||||
|
||||
return GoogleJobsAPIWrapper
|
||||
|
||||
|
||||
def _import_google_scholar() -> Any:
|
||||
from langchain_community.utilities.google_scholar import GoogleScholarAPIWrapper
|
||||
|
||||
return GoogleScholarAPIWrapper
|
||||
|
||||
|
||||
def _import_google_trends() -> Any:
|
||||
from langchain_community.utilities.google_trends import GoogleTrendsAPIWrapper
|
||||
|
||||
return GoogleTrendsAPIWrapper
|
||||
|
||||
|
||||
def _import_google_finance() -> Any:
|
||||
from langchain_community.utilities.google_finance import GoogleFinanceAPIWrapper
|
||||
|
||||
return GoogleFinanceAPIWrapper
|
||||
|
||||
|
||||
def _import_google_search() -> Any:
|
||||
from langchain_community.utilities.google_search import GoogleSearchAPIWrapper
|
||||
|
||||
return GoogleSearchAPIWrapper
|
||||
|
||||
|
||||
def _import_google_serper() -> Any:
|
||||
from langchain_community.utilities.google_serper import GoogleSerperAPIWrapper
|
||||
|
||||
return GoogleSerperAPIWrapper
|
||||
|
||||
|
||||
def _import_graphql() -> Any:
|
||||
from langchain_community.utilities.graphql import GraphQLAPIWrapper
|
||||
|
||||
return GraphQLAPIWrapper
|
||||
|
||||
|
||||
def _import_jira() -> Any:
|
||||
from langchain_community.utilities.jira import JiraAPIWrapper
|
||||
|
||||
return JiraAPIWrapper
|
||||
|
||||
|
||||
def _import_max_compute() -> Any:
|
||||
from langchain_community.utilities.max_compute import MaxComputeAPIWrapper
|
||||
|
||||
return MaxComputeAPIWrapper
|
||||
|
||||
|
||||
def _import_merriam_webster() -> Any:
|
||||
from langchain_community.utilities.merriam_webster import MerriamWebsterAPIWrapper
|
||||
|
||||
return MerriamWebsterAPIWrapper
|
||||
|
||||
|
||||
def _import_metaphor_search() -> Any:
|
||||
from langchain_community.utilities.metaphor_search import MetaphorSearchAPIWrapper
|
||||
|
||||
return MetaphorSearchAPIWrapper
|
||||
|
||||
|
||||
def _import_openweathermap() -> Any:
|
||||
from langchain_community.utilities.openweathermap import OpenWeatherMapAPIWrapper
|
||||
|
||||
return OpenWeatherMapAPIWrapper
|
||||
|
||||
|
||||
def _import_outline() -> Any:
|
||||
from langchain_community.utilities.outline import OutlineAPIWrapper
|
||||
|
||||
return OutlineAPIWrapper
|
||||
|
||||
|
||||
def _import_passio_nutrition_ai() -> Any:
|
||||
from langchain_community.utilities.passio_nutrition_ai import NutritionAIAPI
|
||||
|
||||
return NutritionAIAPI
|
||||
|
||||
|
||||
def _import_portkey() -> Any:
|
||||
from langchain_community.utilities.portkey import Portkey
|
||||
|
||||
return Portkey
|
||||
|
||||
|
||||
def _import_powerbi() -> Any:
|
||||
from langchain_community.utilities.powerbi import PowerBIDataset
|
||||
|
||||
return PowerBIDataset
|
||||
|
||||
|
||||
def _import_pubmed() -> Any:
|
||||
from langchain_community.utilities.pubmed import PubMedAPIWrapper
|
||||
|
||||
return PubMedAPIWrapper
|
||||
|
||||
|
||||
def _import_python() -> Any:
|
||||
from langchain_community.utilities.python import PythonREPL
|
||||
|
||||
return PythonREPL
|
||||
|
||||
|
||||
def _import_scenexplain() -> Any:
|
||||
from langchain_community.utilities.scenexplain import SceneXplainAPIWrapper
|
||||
|
||||
return SceneXplainAPIWrapper
|
||||
|
||||
|
||||
def _import_searchapi() -> Any:
|
||||
from langchain_community.utilities.searchapi import SearchApiAPIWrapper
|
||||
|
||||
return SearchApiAPIWrapper
|
||||
|
||||
|
||||
def _import_searx_search() -> Any:
|
||||
from langchain_community.utilities.searx_search import SearxSearchWrapper
|
||||
|
||||
return SearxSearchWrapper
|
||||
|
||||
|
||||
def _import_serpapi() -> Any:
|
||||
from langchain_community.utilities.serpapi import SerpAPIWrapper
|
||||
|
||||
return SerpAPIWrapper
|
||||
|
||||
|
||||
def _import_spark_sql() -> Any:
|
||||
from langchain_community.utilities.spark_sql import SparkSQL
|
||||
|
||||
return SparkSQL
|
||||
|
||||
|
||||
def _import_sql_database() -> Any:
|
||||
from langchain_community.utilities.sql_database import SQLDatabase
|
||||
|
||||
return SQLDatabase
|
||||
|
||||
|
||||
def _import_steam_webapi() -> Any:
|
||||
from langchain_community.utilities.steam import SteamWebAPIWrapper
|
||||
|
||||
return SteamWebAPIWrapper
|
||||
|
||||
|
||||
def _import_stackexchange() -> Any:
|
||||
from langchain_community.utilities.stackexchange import StackExchangeAPIWrapper
|
||||
|
||||
return StackExchangeAPIWrapper
|
||||
|
||||
|
||||
def _import_tensorflow_datasets() -> Any:
|
||||
from langchain_community.utilities.tensorflow_datasets import TensorflowDatasets
|
||||
|
||||
return TensorflowDatasets
|
||||
|
||||
|
||||
def _import_twilio() -> Any:
|
||||
from langchain_community.utilities.twilio import TwilioAPIWrapper
|
||||
|
||||
return TwilioAPIWrapper
|
||||
|
||||
|
||||
def _import_you() -> Any:
|
||||
from langchain_community.utilities.you import YouSearchAPIWrapper
|
||||
|
||||
return YouSearchAPIWrapper
|
||||
|
||||
|
||||
def _import_wikipedia() -> Any:
|
||||
from langchain_community.utilities.wikipedia import WikipediaAPIWrapper
|
||||
|
||||
return WikipediaAPIWrapper
|
||||
|
||||
|
||||
def _import_wolfram_alpha() -> Any:
|
||||
from langchain_community.utilities.wolfram_alpha import WolframAlphaAPIWrapper
|
||||
|
||||
return WolframAlphaAPIWrapper
|
||||
|
||||
|
||||
def _import_zapier() -> Any:
|
||||
from langchain_community.utilities.zapier import ZapierNLAWrapper
|
||||
|
||||
return ZapierNLAWrapper
|
||||
|
||||
|
||||
def _import_nasa() -> Any:
|
||||
from langchain_community.utilities.nasa import NasaAPIWrapper
|
||||
|
||||
return NasaAPIWrapper
|
||||
|
||||
|
||||
def _import_nvidia_riva_asr() -> Any:
|
||||
from langchain_community.utilities.nvidia_riva import RivaASR
|
||||
|
||||
return RivaASR
|
||||
|
||||
|
||||
def _import_nvidia_riva_tts() -> Any:
|
||||
from langchain_community.utilities.nvidia_riva import RivaTTS
|
||||
|
||||
return RivaTTS
|
||||
|
||||
|
||||
def _import_nvidia_riva_stream() -> Any:
|
||||
from langchain_community.utilities.nvidia_riva import AudioStream
|
||||
|
||||
return AudioStream
|
||||
_module_lookup = {
|
||||
"AlphaVantageAPIWrapper": "langchain_community.utilities.alpha_vantage",
|
||||
"ApifyWrapper": "langchain_community.utilities.apify",
|
||||
"ArceeWrapper": "langchain_community.utilities.arcee",
|
||||
"ArxivAPIWrapper": "langchain_community.utilities.arxiv",
|
||||
"AudioStream": "langchain_community.utilities.nvidia_riva",
|
||||
"BibtexparserWrapper": "langchain_community.utilities.bibtex",
|
||||
"BingSearchAPIWrapper": "langchain_community.utilities.bing_search",
|
||||
"BraveSearchWrapper": "langchain_community.utilities.brave_search",
|
||||
"DuckDuckGoSearchAPIWrapper": "langchain_community.utilities.duckduckgo_search",
|
||||
"GoldenQueryAPIWrapper": "langchain_community.utilities.golden_query",
|
||||
"GoogleFinanceAPIWrapper": "langchain_community.utilities.google_finance",
|
||||
"GoogleJobsAPIWrapper": "langchain_community.utilities.google_jobs",
|
||||
"GoogleLensAPIWrapper": "langchain_community.utilities.google_lens",
|
||||
"GooglePlacesAPIWrapper": "langchain_community.utilities.google_places_api",
|
||||
"GoogleScholarAPIWrapper": "langchain_community.utilities.google_scholar",
|
||||
"GoogleSearchAPIWrapper": "langchain_community.utilities.google_search",
|
||||
"GoogleSerperAPIWrapper": "langchain_community.utilities.google_serper",
|
||||
"GoogleTrendsAPIWrapper": "langchain_community.utilities.google_trends",
|
||||
"GraphQLAPIWrapper": "langchain_community.utilities.graphql",
|
||||
"JiraAPIWrapper": "langchain_community.utilities.jira",
|
||||
"LambdaWrapper": "langchain_community.utilities.awslambda",
|
||||
"MaxComputeAPIWrapper": "langchain_community.utilities.max_compute",
|
||||
"MerriamWebsterAPIWrapper": "langchain_community.utilities.merriam_webster",
|
||||
"MetaphorSearchAPIWrapper": "langchain_community.utilities.metaphor_search",
|
||||
"NVIDIARivaASR": "langchain_community.utilities.nvidia_riva",
|
||||
"NVIDIARivaStream": "langchain_community.utilities.nvidia_riva",
|
||||
"NVIDIARivaTTS": "langchain_community.utilities.nvidia_riva",
|
||||
"NasaAPIWrapper": "langchain_community.utilities.nasa",
|
||||
"NutritionAIAPI": "langchain_community.utilities.passio_nutrition_ai",
|
||||
"OpenWeatherMapAPIWrapper": "langchain_community.utilities.openweathermap",
|
||||
"OutlineAPIWrapper": "langchain_community.utilities.outline",
|
||||
"Portkey": "langchain_community.utilities.portkey",
|
||||
"PowerBIDataset": "langchain_community.utilities.powerbi",
|
||||
"PubMedAPIWrapper": "langchain_community.utilities.pubmed",
|
||||
"PythonREPL": "langchain_community.utilities.python",
|
||||
"Requests": "langchain_community.utilities.requests",
|
||||
"RequestsWrapper": "langchain_community.utilities.requests",
|
||||
"RivaASR": "langchain_community.utilities.nvidia_riva",
|
||||
"RivaTTS": "langchain_community.utilities.nvidia_riva",
|
||||
"SQLDatabase": "langchain_community.utilities.sql_database",
|
||||
"SceneXplainAPIWrapper": "langchain_community.utilities.scenexplain",
|
||||
"SearchApiAPIWrapper": "langchain_community.utilities.searchapi",
|
||||
"SearxSearchWrapper": "langchain_community.utilities.searx_search",
|
||||
"SerpAPIWrapper": "langchain_community.utilities.serpapi",
|
||||
"SparkSQL": "langchain_community.utilities.spark_sql",
|
||||
"StackExchangeAPIWrapper": "langchain_community.utilities.stackexchange",
|
||||
"SteamWebAPIWrapper": "langchain_community.utilities.steam",
|
||||
"TensorflowDatasets": "langchain_community.utilities.tensorflow_datasets",
|
||||
"TextRequestsWrapper": "langchain_community.utilities.requests",
|
||||
"TwilioAPIWrapper": "langchain_community.utilities.twilio",
|
||||
"WikipediaAPIWrapper": "langchain_community.utilities.wikipedia",
|
||||
"WolframAlphaAPIWrapper": "langchain_community.utilities.wolfram_alpha",
|
||||
"YouSearchAPIWrapper": "langchain_community.utilities.you",
|
||||
"ZapierNLAWrapper": "langchain_community.utilities.zapier",
|
||||
}
|
||||
|
||||
|
||||
def __getattr__(name: str) -> Any:
|
||||
if name == "AlphaVantageAPIWrapper":
|
||||
return _import_alpha_vantage()
|
||||
elif name == "ApifyWrapper":
|
||||
return _import_apify()
|
||||
elif name == "ArceeWrapper":
|
||||
return _import_arcee()
|
||||
elif name == "ArxivAPIWrapper":
|
||||
return _import_arxiv()
|
||||
elif name == "LambdaWrapper":
|
||||
return _import_awslambda()
|
||||
elif name == "BibtexparserWrapper":
|
||||
return _import_bibtex()
|
||||
elif name == "BingSearchAPIWrapper":
|
||||
return _import_bing_search()
|
||||
elif name == "BraveSearchWrapper":
|
||||
return _import_brave_search()
|
||||
elif name == "DuckDuckGoSearchAPIWrapper":
|
||||
return _import_duckduckgo_search()
|
||||
elif name == "GoogleLensAPIWrapper":
|
||||
return _import_google_lens()
|
||||
elif name == "GoldenQueryAPIWrapper":
|
||||
return _import_golden_query()
|
||||
elif name == "GoogleJobsAPIWrapper":
|
||||
return _import_google_jobs()
|
||||
elif name == "GoogleScholarAPIWrapper":
|
||||
return _import_google_scholar()
|
||||
elif name == "GoogleFinanceAPIWrapper":
|
||||
return _import_google_finance()
|
||||
elif name == "GoogleTrendsAPIWrapper":
|
||||
return _import_google_trends()
|
||||
elif name == "GooglePlacesAPIWrapper":
|
||||
return _import_google_places_api()
|
||||
elif name == "GoogleSearchAPIWrapper":
|
||||
return _import_google_search()
|
||||
elif name == "GoogleSerperAPIWrapper":
|
||||
return _import_google_serper()
|
||||
elif name == "GraphQLAPIWrapper":
|
||||
return _import_graphql()
|
||||
elif name == "JiraAPIWrapper":
|
||||
return _import_jira()
|
||||
elif name == "MaxComputeAPIWrapper":
|
||||
return _import_max_compute()
|
||||
elif name == "MerriamWebsterAPIWrapper":
|
||||
return _import_merriam_webster()
|
||||
elif name == "MetaphorSearchAPIWrapper":
|
||||
return _import_metaphor_search()
|
||||
elif name == "NasaAPIWrapper":
|
||||
return _import_nasa()
|
||||
elif name == "NVIDIARivaASR":
|
||||
return _import_nvidia_riva_asr()
|
||||
elif name == "NVIDIARivaStream":
|
||||
return _import_nvidia_riva_stream()
|
||||
elif name == "NVIDIARivaTTS":
|
||||
return _import_nvidia_riva_tts()
|
||||
elif name == "OpenWeatherMapAPIWrapper":
|
||||
return _import_openweathermap()
|
||||
elif name == "OutlineAPIWrapper":
|
||||
return _import_outline()
|
||||
elif name == "NutritionAIAPI":
|
||||
return _import_passio_nutrition_ai()
|
||||
elif name == "Portkey":
|
||||
return _import_portkey()
|
||||
elif name == "PowerBIDataset":
|
||||
return _import_powerbi()
|
||||
elif name == "PubMedAPIWrapper":
|
||||
return _import_pubmed()
|
||||
elif name == "PythonREPL":
|
||||
return _import_python()
|
||||
elif name == "SceneXplainAPIWrapper":
|
||||
return _import_scenexplain()
|
||||
elif name == "SearchApiAPIWrapper":
|
||||
return _import_searchapi()
|
||||
elif name == "SearxSearchWrapper":
|
||||
return _import_searx_search()
|
||||
elif name == "SerpAPIWrapper":
|
||||
return _import_serpapi()
|
||||
elif name == "SparkSQL":
|
||||
return _import_spark_sql()
|
||||
elif name == "StackExchangeAPIWrapper":
|
||||
return _import_stackexchange()
|
||||
elif name == "SQLDatabase":
|
||||
return _import_sql_database()
|
||||
elif name == "SteamWebAPIWrapper":
|
||||
return _import_steam_webapi()
|
||||
elif name == "TensorflowDatasets":
|
||||
return _import_tensorflow_datasets()
|
||||
elif name == "TwilioAPIWrapper":
|
||||
return _import_twilio()
|
||||
elif name == "YouSearchAPIWrapper":
|
||||
return _import_you()
|
||||
elif name == "WikipediaAPIWrapper":
|
||||
return _import_wikipedia()
|
||||
elif name == "WolframAlphaAPIWrapper":
|
||||
return _import_wolfram_alpha()
|
||||
elif name == "ZapierNLAWrapper":
|
||||
return _import_zapier()
|
||||
else:
|
||||
raise AttributeError(f"Could not find: {name}")
|
||||
if name in _module_lookup:
|
||||
module = importlib.import_module(_module_lookup[name])
|
||||
return getattr(module, name)
|
||||
raise AttributeError(f"module {__name__} has no attribute {name}")
|
||||
|
||||
|
||||
__all__ = [
|
||||
"AlphaVantageAPIWrapper",
|
||||
"ApifyWrapper",
|
||||
"ArceeWrapper",
|
||||
"ArxivAPIWrapper",
|
||||
"BibtexparserWrapper",
|
||||
"BingSearchAPIWrapper",
|
||||
"BraveSearchWrapper",
|
||||
"DuckDuckGoSearchAPIWrapper",
|
||||
"GoldenQueryAPIWrapper",
|
||||
"GoogleFinanceAPIWrapper",
|
||||
"GoogleLensAPIWrapper",
|
||||
"GoogleJobsAPIWrapper",
|
||||
"GooglePlacesAPIWrapper",
|
||||
"GoogleScholarAPIWrapper",
|
||||
"GoogleTrendsAPIWrapper",
|
||||
"GoogleSearchAPIWrapper",
|
||||
"GoogleSerperAPIWrapper",
|
||||
"GraphQLAPIWrapper",
|
||||
"JiraAPIWrapper",
|
||||
"LambdaWrapper",
|
||||
"MaxComputeAPIWrapper",
|
||||
"MerriamWebsterAPIWrapper",
|
||||
"MetaphorSearchAPIWrapper",
|
||||
"NasaAPIWrapper",
|
||||
"NVIDIARivaASR",
|
||||
"NVIDIARivaStream",
|
||||
"NVIDIARivaTTS",
|
||||
"OpenWeatherMapAPIWrapper",
|
||||
"OutlineAPIWrapper",
|
||||
"NutritionAIAPI",
|
||||
"Portkey",
|
||||
"PowerBIDataset",
|
||||
"PubMedAPIWrapper",
|
||||
"PythonREPL",
|
||||
"Requests",
|
||||
"RequestsWrapper",
|
||||
"SteamWebAPIWrapper",
|
||||
"SQLDatabase",
|
||||
"SceneXplainAPIWrapper",
|
||||
"SearchApiAPIWrapper",
|
||||
"SearxSearchWrapper",
|
||||
"SerpAPIWrapper",
|
||||
"SparkSQL",
|
||||
"StackExchangeAPIWrapper",
|
||||
"TensorflowDatasets",
|
||||
"TextRequestsWrapper",
|
||||
"TwilioAPIWrapper",
|
||||
"YouSearchAPIWrapper",
|
||||
"WikipediaAPIWrapper",
|
||||
"WolframAlphaAPIWrapper",
|
||||
"ZapierNLAWrapper",
|
||||
]
|
||||
__all__ = list(_module_lookup.keys())
|
||||
|
@ -665,3 +665,9 @@ class RivaTTS(
|
||||
|
||||
await producer
|
||||
await consumer
|
||||
|
||||
|
||||
# Backwards compatibility:
|
||||
NVIDIARivaASR = RivaASR
|
||||
NVIDIARivaTTS = RivaTTS
|
||||
NVIDIARivaStream = AudioStream
|
||||
|
@ -19,759 +19,100 @@ and retrieve the data that are 'most similar' to the embedded query.
|
||||
Embeddings, Document
|
||||
""" # noqa: E501
|
||||
|
||||
import importlib
|
||||
from typing import Any
|
||||
|
||||
from langchain_core.vectorstores import VectorStore
|
||||
|
||||
|
||||
def _import_alibaba_cloud_open_search() -> Any:
|
||||
from langchain_community.vectorstores.alibabacloud_opensearch import (
|
||||
AlibabaCloudOpenSearch,
|
||||
)
|
||||
|
||||
return AlibabaCloudOpenSearch
|
||||
|
||||
|
||||
def _import_alibaba_cloud_open_search_settings() -> Any:
|
||||
from langchain_community.vectorstores.alibabacloud_opensearch import (
|
||||
AlibabaCloudOpenSearchSettings,
|
||||
)
|
||||
|
||||
return AlibabaCloudOpenSearchSettings
|
||||
|
||||
|
||||
def _import_azure_cosmos_db() -> Any:
|
||||
from langchain_community.vectorstores.azure_cosmos_db import (
|
||||
AzureCosmosDBVectorSearch,
|
||||
)
|
||||
|
||||
return AzureCosmosDBVectorSearch
|
||||
|
||||
|
||||
def _import_elastic_knn_search() -> Any:
|
||||
from langchain_community.vectorstores.elastic_vector_search import ElasticKnnSearch
|
||||
|
||||
return ElasticKnnSearch
|
||||
|
||||
|
||||
def _import_elastic_vector_search() -> Any:
|
||||
from langchain_community.vectorstores.elastic_vector_search import (
|
||||
ElasticVectorSearch,
|
||||
)
|
||||
|
||||
return ElasticVectorSearch
|
||||
|
||||
|
||||
def _import_analyticdb() -> Any:
|
||||
from langchain_community.vectorstores.analyticdb import AnalyticDB
|
||||
|
||||
return AnalyticDB
|
||||
|
||||
|
||||
def _import_annoy() -> Any:
|
||||
from langchain_community.vectorstores.annoy import Annoy
|
||||
|
||||
return Annoy
|
||||
|
||||
|
||||
def _import_apache_doris() -> Any:
|
||||
from langchain_community.vectorstores.apache_doris import ApacheDoris
|
||||
|
||||
return ApacheDoris
|
||||
|
||||
|
||||
def _import_atlas() -> Any:
|
||||
from langchain_community.vectorstores.atlas import AtlasDB
|
||||
|
||||
return AtlasDB
|
||||
|
||||
|
||||
def _import_awadb() -> Any:
|
||||
from langchain_community.vectorstores.awadb import AwaDB
|
||||
|
||||
return AwaDB
|
||||
|
||||
|
||||
def _import_azuresearch() -> Any:
|
||||
from langchain_community.vectorstores.azuresearch import AzureSearch
|
||||
|
||||
return AzureSearch
|
||||
|
||||
|
||||
def _import_bageldb() -> Any:
|
||||
from langchain_community.vectorstores.bageldb import Bagel
|
||||
|
||||
return Bagel
|
||||
|
||||
|
||||
def _import_baiducloud_vector_search() -> Any:
|
||||
from langchain_community.vectorstores.baiducloud_vector_search import BESVectorStore
|
||||
|
||||
return BESVectorStore
|
||||
|
||||
|
||||
def _import_bigquery() -> Any:
|
||||
from langchain_community.vectorstores.bigquery_vector_search import (
|
||||
BigQueryVectorSearch,
|
||||
)
|
||||
|
||||
return BigQueryVectorSearch
|
||||
|
||||
|
||||
def _import_cassandra() -> Any:
|
||||
from langchain_community.vectorstores.cassandra import Cassandra
|
||||
|
||||
return Cassandra
|
||||
|
||||
|
||||
def _import_astradb() -> Any:
|
||||
from langchain_community.vectorstores.astradb import AstraDB
|
||||
|
||||
return AstraDB
|
||||
|
||||
|
||||
def _import_chroma() -> Any:
|
||||
from langchain_community.vectorstores.chroma import Chroma
|
||||
|
||||
return Chroma
|
||||
|
||||
|
||||
def _import_clarifai() -> Any:
|
||||
from langchain_community.vectorstores.clarifai import Clarifai
|
||||
|
||||
return Clarifai
|
||||
|
||||
|
||||
def _import_clickhouse() -> Any:
|
||||
from langchain_community.vectorstores.clickhouse import Clickhouse
|
||||
|
||||
return Clickhouse
|
||||
|
||||
|
||||
def _import_clickhouse_settings() -> Any:
|
||||
from langchain_community.vectorstores.clickhouse import ClickhouseSettings
|
||||
|
||||
return ClickhouseSettings
|
||||
|
||||
|
||||
def _import_dashvector() -> Any:
|
||||
from langchain_community.vectorstores.dashvector import DashVector
|
||||
|
||||
return DashVector
|
||||
|
||||
|
||||
def _import_databricks_vector_search() -> Any:
|
||||
from langchain_community.vectorstores.databricks_vector_search import (
|
||||
DatabricksVectorSearch,
|
||||
)
|
||||
|
||||
return DatabricksVectorSearch
|
||||
|
||||
|
||||
def _import_deeplake() -> Any:
|
||||
from langchain_community.vectorstores.deeplake import DeepLake
|
||||
|
||||
return DeepLake
|
||||
|
||||
|
||||
def _import_dingo() -> Any:
|
||||
from langchain_community.vectorstores.dingo import Dingo
|
||||
|
||||
return Dingo
|
||||
|
||||
|
||||
def _import_docarray_hnsw() -> Any:
|
||||
from langchain_community.vectorstores.docarray import DocArrayHnswSearch
|
||||
|
||||
return DocArrayHnswSearch
|
||||
|
||||
|
||||
def _import_docarray_inmemory() -> Any:
|
||||
from langchain_community.vectorstores.docarray import DocArrayInMemorySearch
|
||||
|
||||
return DocArrayInMemorySearch
|
||||
|
||||
|
||||
def _import_documentdb() -> Any:
|
||||
from langchain_community.vectorstores.documentdb import (
|
||||
DocumentDBVectorSearch,
|
||||
)
|
||||
|
||||
return DocumentDBVectorSearch
|
||||
|
||||
|
||||
def _import_elasticsearch() -> Any:
|
||||
from langchain_community.vectorstores.elasticsearch import ElasticsearchStore
|
||||
|
||||
return ElasticsearchStore
|
||||
|
||||
|
||||
def _import_epsilla() -> Any:
|
||||
from langchain_community.vectorstores.epsilla import Epsilla
|
||||
|
||||
return Epsilla
|
||||
|
||||
|
||||
def _import_faiss() -> Any:
|
||||
from langchain_community.vectorstores.faiss import FAISS
|
||||
|
||||
return FAISS
|
||||
|
||||
|
||||
def _import_hanavector() -> Any:
|
||||
from langchain_community.vectorstores.hanavector import HanaDB
|
||||
|
||||
return HanaDB
|
||||
|
||||
|
||||
def _import_kinetica() -> Any:
|
||||
from langchain_community.vectorstores.kinetica import Kinetica
|
||||
|
||||
return Kinetica
|
||||
|
||||
|
||||
def _import_kinetica_settings() -> Any:
|
||||
from langchain_community.vectorstores.kinetica import KineticaSettings
|
||||
|
||||
return KineticaSettings
|
||||
|
||||
|
||||
def _import_distance_strategy() -> Any:
|
||||
from langchain_community.vectorstores.kinetica import DistanceStrategy
|
||||
|
||||
return DistanceStrategy
|
||||
|
||||
|
||||
def _import_hologres() -> Any:
|
||||
from langchain_community.vectorstores.hologres import Hologres
|
||||
|
||||
return Hologres
|
||||
|
||||
|
||||
def _import_infinispanvs() -> Any:
|
||||
from langchain_community.vectorstores.infinispanvs import InfinispanVS
|
||||
|
||||
return InfinispanVS
|
||||
|
||||
|
||||
def _import_kdbai() -> Any:
|
||||
from langchain_community.vectorstores.kdbai import KDBAI
|
||||
|
||||
return KDBAI
|
||||
|
||||
|
||||
def _import_lancedb() -> Any:
|
||||
from langchain_community.vectorstores.lancedb import LanceDB
|
||||
|
||||
return LanceDB
|
||||
|
||||
|
||||
def _import_llm_rails() -> Any:
|
||||
from langchain_community.vectorstores.llm_rails import LLMRails
|
||||
|
||||
return LLMRails
|
||||
|
||||
|
||||
def _import_marqo() -> Any:
|
||||
from langchain_community.vectorstores.marqo import Marqo
|
||||
|
||||
return Marqo
|
||||
|
||||
|
||||
def _import_matching_engine() -> Any:
|
||||
from langchain_community.vectorstores.matching_engine import MatchingEngine
|
||||
|
||||
return MatchingEngine
|
||||
|
||||
|
||||
def _import_meilisearch() -> Any:
|
||||
from langchain_community.vectorstores.meilisearch import Meilisearch
|
||||
|
||||
return Meilisearch
|
||||
|
||||
|
||||
def _import_milvus() -> Any:
|
||||
from langchain_community.vectorstores.milvus import Milvus
|
||||
|
||||
return Milvus
|
||||
|
||||
|
||||
def _import_momento_vector_index() -> Any:
|
||||
from langchain_community.vectorstores.momento_vector_index import MomentoVectorIndex
|
||||
|
||||
return MomentoVectorIndex
|
||||
|
||||
|
||||
def _import_mongodb_atlas() -> Any:
|
||||
from langchain_community.vectorstores.mongodb_atlas import MongoDBAtlasVectorSearch
|
||||
|
||||
return MongoDBAtlasVectorSearch
|
||||
|
||||
|
||||
def _import_myscale() -> Any:
|
||||
from langchain_community.vectorstores.myscale import MyScale
|
||||
|
||||
return MyScale
|
||||
|
||||
|
||||
def _import_myscale_settings() -> Any:
|
||||
from langchain_community.vectorstores.myscale import MyScaleSettings
|
||||
|
||||
return MyScaleSettings
|
||||
|
||||
|
||||
def _import_neo4j_vector() -> Any:
|
||||
from langchain_community.vectorstores.neo4j_vector import Neo4jVector
|
||||
|
||||
return Neo4jVector
|
||||
|
||||
|
||||
def _import_opensearch_vector_search() -> Any:
|
||||
from langchain_community.vectorstores.opensearch_vector_search import (
|
||||
OpenSearchVectorSearch,
|
||||
)
|
||||
|
||||
return OpenSearchVectorSearch
|
||||
|
||||
|
||||
def _import_pgembedding() -> Any:
|
||||
from langchain_community.vectorstores.pgembedding import PGEmbedding
|
||||
|
||||
return PGEmbedding
|
||||
|
||||
|
||||
def _import_pgvector() -> Any:
|
||||
from langchain_community.vectorstores.pgvector import PGVector
|
||||
|
||||
return PGVector
|
||||
|
||||
|
||||
def _import_pinecone() -> Any:
|
||||
from langchain_community.vectorstores.pinecone import Pinecone
|
||||
|
||||
return Pinecone
|
||||
|
||||
|
||||
def _import_qdrant() -> Any:
|
||||
from langchain_community.vectorstores.qdrant import Qdrant
|
||||
|
||||
return Qdrant
|
||||
|
||||
|
||||
def _import_redis() -> Any:
|
||||
from langchain_community.vectorstores.redis import Redis
|
||||
|
||||
return Redis
|
||||
|
||||
|
||||
def _import_rocksetdb() -> Any:
|
||||
from langchain_community.vectorstores.rocksetdb import Rockset
|
||||
|
||||
return Rockset
|
||||
|
||||
|
||||
def _import_vespa() -> Any:
|
||||
from langchain_community.vectorstores.vespa import VespaStore
|
||||
|
||||
return VespaStore
|
||||
|
||||
|
||||
def _import_scann() -> Any:
|
||||
from langchain_community.vectorstores.scann import ScaNN
|
||||
|
||||
return ScaNN
|
||||
|
||||
|
||||
def _import_semadb() -> Any:
|
||||
from langchain_community.vectorstores.semadb import SemaDB
|
||||
|
||||
return SemaDB
|
||||
|
||||
|
||||
def _import_singlestoredb() -> Any:
|
||||
from langchain_community.vectorstores.singlestoredb import SingleStoreDB
|
||||
|
||||
return SingleStoreDB
|
||||
|
||||
|
||||
def _import_sklearn() -> Any:
|
||||
from langchain_community.vectorstores.sklearn import SKLearnVectorStore
|
||||
|
||||
return SKLearnVectorStore
|
||||
|
||||
|
||||
def _import_sqlitevss() -> Any:
|
||||
from langchain_community.vectorstores.sqlitevss import SQLiteVSS
|
||||
|
||||
return SQLiteVSS
|
||||
|
||||
|
||||
def _import_starrocks() -> Any:
|
||||
from langchain_community.vectorstores.starrocks import StarRocks
|
||||
|
||||
return StarRocks
|
||||
|
||||
|
||||
def _import_supabase() -> Any:
|
||||
from langchain_community.vectorstores.supabase import SupabaseVectorStore
|
||||
|
||||
return SupabaseVectorStore
|
||||
|
||||
|
||||
def _import_surrealdb() -> Any:
|
||||
from langchain_community.vectorstores.surrealdb import SurrealDBStore
|
||||
|
||||
return SurrealDBStore
|
||||
|
||||
|
||||
def _import_tair() -> Any:
|
||||
from langchain_community.vectorstores.tair import Tair
|
||||
|
||||
return Tair
|
||||
|
||||
|
||||
def _import_tencentvectordb() -> Any:
|
||||
from langchain_community.vectorstores.tencentvectordb import TencentVectorDB
|
||||
|
||||
return TencentVectorDB
|
||||
|
||||
|
||||
def _import_tidb_vectorstore() -> Any:
|
||||
from langchain_community.vectorstores.tidb_vector import TiDBVectorStore
|
||||
|
||||
return TiDBVectorStore
|
||||
|
||||
|
||||
def _import_tiledb() -> Any:
|
||||
from langchain_community.vectorstores.tiledb import TileDB
|
||||
|
||||
return TileDB
|
||||
|
||||
|
||||
def _import_tigris() -> Any:
|
||||
from langchain_community.vectorstores.tigris import Tigris
|
||||
|
||||
return Tigris
|
||||
|
||||
|
||||
def _import_timescalevector() -> Any:
|
||||
from langchain_community.vectorstores.timescalevector import TimescaleVector
|
||||
|
||||
return TimescaleVector
|
||||
|
||||
|
||||
def _import_typesense() -> Any:
|
||||
from langchain_community.vectorstores.typesense import Typesense
|
||||
|
||||
return Typesense
|
||||
|
||||
|
||||
def _import_usearch() -> Any:
|
||||
from langchain_community.vectorstores.usearch import USearch
|
||||
|
||||
return USearch
|
||||
|
||||
|
||||
def _import_vald() -> Any:
|
||||
from langchain_community.vectorstores.vald import Vald
|
||||
|
||||
return Vald
|
||||
|
||||
|
||||
def _import_vearch() -> Any:
|
||||
from langchain_community.vectorstores.vearch import Vearch
|
||||
|
||||
return Vearch
|
||||
|
||||
|
||||
def _import_vectara() -> Any:
|
||||
from langchain_community.vectorstores.vectara import Vectara
|
||||
|
||||
return Vectara
|
||||
|
||||
|
||||
def _import_weaviate() -> Any:
|
||||
from langchain_community.vectorstores.weaviate import Weaviate
|
||||
|
||||
return Weaviate
|
||||
|
||||
|
||||
def _import_yellowbrick() -> Any:
|
||||
from langchain_community.vectorstores.yellowbrick import Yellowbrick
|
||||
|
||||
return Yellowbrick
|
||||
|
||||
|
||||
def _import_zep() -> Any:
|
||||
from langchain_community.vectorstores.zep import ZepVectorStore
|
||||
|
||||
return ZepVectorStore
|
||||
|
||||
|
||||
def _import_zilliz() -> Any:
|
||||
from langchain_community.vectorstores.zilliz import Zilliz
|
||||
|
||||
return Zilliz
|
||||
|
||||
|
||||
def _import_neuraldb() -> Any:
|
||||
from langchain_community.vectorstores.thirdai_neuraldb import NeuralDBVectorStore
|
||||
|
||||
return NeuralDBVectorStore
|
||||
|
||||
|
||||
def _import_lantern() -> Any:
|
||||
from langchain_community.vectorstores.lantern import Lantern
|
||||
|
||||
return Lantern
|
||||
_module_lookup = {
|
||||
"AlibabaCloudOpenSearch": "langchain_community.vectorstores.alibabacloud_opensearch", # noqa: E501
|
||||
"AlibabaCloudOpenSearchSettings": "langchain_community.vectorstores.alibabacloud_opensearch", # noqa: E501
|
||||
"AnalyticDB": "langchain_community.vectorstores.analyticdb",
|
||||
"Annoy": "langchain_community.vectorstores.annoy",
|
||||
"ApacheDoris": "langchain_community.vectorstores.apache_doris",
|
||||
"AstraDB": "langchain_community.vectorstores.astradb",
|
||||
"AtlasDB": "langchain_community.vectorstores.atlas",
|
||||
"AwaDB": "langchain_community.vectorstores.awadb",
|
||||
"AzureCosmosDBVectorSearch": "langchain_community.vectorstores.azure_cosmos_db",
|
||||
"AzureSearch": "langchain_community.vectorstores.azuresearch",
|
||||
"BESVectorStore": "langchain_community.vectorstores.baiducloud_vector_search",
|
||||
"Bagel": "langchain_community.vectorstores.bageldb",
|
||||
"BigQueryVectorSearch": "langchain_community.vectorstores.bigquery_vector_search",
|
||||
"Cassandra": "langchain_community.vectorstores.cassandra",
|
||||
"Chroma": "langchain_community.vectorstores.chroma",
|
||||
"Clarifai": "langchain_community.vectorstores.clarifai",
|
||||
"Clickhouse": "langchain_community.vectorstores.clickhouse",
|
||||
"ClickhouseSettings": "langchain_community.vectorstores.clickhouse",
|
||||
"DashVector": "langchain_community.vectorstores.dashvector",
|
||||
"DatabricksVectorSearch": "langchain_community.vectorstores.databricks_vector_search", # noqa: E501
|
||||
"DeepLake": "langchain_community.vectorstores.deeplake",
|
||||
"Dingo": "langchain_community.vectorstores.dingo",
|
||||
"DistanceStrategy": "langchain_community.vectorstores.kinetica",
|
||||
"DocArrayHnswSearch": "langchain_community.vectorstores.docarray",
|
||||
"DocArrayInMemorySearch": "langchain_community.vectorstores.docarray",
|
||||
"DocumentDBVectorSearch": "langchain_community.vectorstores.documentdb",
|
||||
"ElasticKnnSearch": "langchain_community.vectorstores.elastic_vector_search",
|
||||
"ElasticVectorSearch": "langchain_community.vectorstores.elastic_vector_search",
|
||||
"ElasticsearchStore": "langchain_community.vectorstores.elasticsearch",
|
||||
"Epsilla": "langchain_community.vectorstores.epsilla",
|
||||
"FAISS": "langchain_community.vectorstores.faiss",
|
||||
"HanaDB": "langchain_community.vectorstores.hanavector",
|
||||
"Hologres": "langchain_community.vectorstores.hologres",
|
||||
"InfinispanVS": "langchain_community.vectorstores.infinispanvs",
|
||||
"KDBAI": "langchain_community.vectorstores.kdbai",
|
||||
"Kinetica": "langchain_community.vectorstores.kinetica",
|
||||
"KineticaSettings": "langchain_community.vectorstores.kinetica",
|
||||
"LLMRails": "langchain_community.vectorstores.llm_rails",
|
||||
"LanceDB": "langchain_community.vectorstores.lancedb",
|
||||
"Lantern": "langchain_community.vectorstores.lantern",
|
||||
"Marqo": "langchain_community.vectorstores.marqo",
|
||||
"MatchingEngine": "langchain_community.vectorstores.matching_engine",
|
||||
"Meilisearch": "langchain_community.vectorstores.meilisearch",
|
||||
"Milvus": "langchain_community.vectorstores.milvus",
|
||||
"MomentoVectorIndex": "langchain_community.vectorstores.momento_vector_index",
|
||||
"MongoDBAtlasVectorSearch": "langchain_community.vectorstores.mongodb_atlas",
|
||||
"MyScale": "langchain_community.vectorstores.myscale",
|
||||
"MyScaleSettings": "langchain_community.vectorstores.myscale",
|
||||
"Neo4jVector": "langchain_community.vectorstores.neo4j_vector",
|
||||
"NeuralDBVectorStore": "langchain_community.vectorstores.thirdai_neuraldb",
|
||||
"OpenSearchVectorSearch": "langchain_community.vectorstores.opensearch_vector_search", # noqa: E501
|
||||
"PGEmbedding": "langchain_community.vectorstores.pgembedding",
|
||||
"PGVector": "langchain_community.vectorstores.pgvector",
|
||||
"Pinecone": "langchain_community.vectorstores.pinecone",
|
||||
"Qdrant": "langchain_community.vectorstores.qdrant",
|
||||
"Redis": "langchain_community.vectorstores.redis",
|
||||
"Rockset": "langchain_community.vectorstores.rocksetdb",
|
||||
"SKLearnVectorStore": "langchain_community.vectorstores.sklearn",
|
||||
"SQLiteVSS": "langchain_community.vectorstores.sqlitevss",
|
||||
"ScaNN": "langchain_community.vectorstores.scann",
|
||||
"SemaDB": "langchain_community.vectorstores.semadb",
|
||||
"SingleStoreDB": "langchain_community.vectorstores.singlestoredb",
|
||||
"StarRocks": "langchain_community.vectorstores.starrocks",
|
||||
"SupabaseVectorStore": "langchain_community.vectorstores.supabase",
|
||||
"SurrealDBStore": "langchain_community.vectorstores.surrealdb",
|
||||
"Tair": "langchain_community.vectorstores.tair",
|
||||
"TencentVectorDB": "langchain_community.vectorstores.tencentvectordb",
|
||||
"TiDBVectorStore": "langchain_community.vectorstores.tidb_vector",
|
||||
"Tigris": "langchain_community.vectorstores.tigris",
|
||||
"TileDB": "langchain_community.vectorstores.tiledb",
|
||||
"TimescaleVector": "langchain_community.vectorstores.timescalevector",
|
||||
"Typesense": "langchain_community.vectorstores.typesense",
|
||||
"USearch": "langchain_community.vectorstores.usearch",
|
||||
"Vald": "langchain_community.vectorstores.vald",
|
||||
"Vearch": "langchain_community.vectorstores.vearch",
|
||||
"Vectara": "langchain_community.vectorstores.vectara",
|
||||
"VectorStore": "langchain_core.vectorstores",
|
||||
"VespaStore": "langchain_community.vectorstores.vespa",
|
||||
"Weaviate": "langchain_community.vectorstores.weaviate",
|
||||
"Yellowbrick": "langchain_community.vectorstores.yellowbrick",
|
||||
"ZepVectorStore": "langchain_community.vectorstores.zep",
|
||||
"Zilliz": "langchain_community.vectorstores.zilliz",
|
||||
}
|
||||
|
||||
|
||||
def __getattr__(name: str) -> Any:
|
||||
if name == "AnalyticDB":
|
||||
return _import_analyticdb()
|
||||
elif name == "AlibabaCloudOpenSearch":
|
||||
return _import_alibaba_cloud_open_search()
|
||||
elif name == "AlibabaCloudOpenSearchSettings":
|
||||
return _import_alibaba_cloud_open_search_settings()
|
||||
elif name == "AzureCosmosDBVectorSearch":
|
||||
return _import_azure_cosmos_db()
|
||||
elif name == "ElasticKnnSearch":
|
||||
return _import_elastic_knn_search()
|
||||
elif name == "ElasticVectorSearch":
|
||||
return _import_elastic_vector_search()
|
||||
elif name == "Annoy":
|
||||
return _import_annoy()
|
||||
elif name == "ApacheDoris":
|
||||
return _import_apache_doris()
|
||||
elif name == "AtlasDB":
|
||||
return _import_atlas()
|
||||
elif name == "AwaDB":
|
||||
return _import_awadb()
|
||||
elif name == "AzureSearch":
|
||||
return _import_azuresearch()
|
||||
elif name == "Bagel":
|
||||
return _import_bageldb()
|
||||
elif name == "BigQueryVectorSearch":
|
||||
return _import_bigquery()
|
||||
elif name == "BESVectorStore":
|
||||
return _import_baiducloud_vector_search()
|
||||
elif name == "Cassandra":
|
||||
return _import_cassandra()
|
||||
elif name == "AstraDB":
|
||||
return _import_astradb()
|
||||
elif name == "Chroma":
|
||||
return _import_chroma()
|
||||
elif name == "Clarifai":
|
||||
return _import_clarifai()
|
||||
elif name == "ClickhouseSettings":
|
||||
return _import_clickhouse_settings()
|
||||
elif name == "Clickhouse":
|
||||
return _import_clickhouse()
|
||||
elif name == "DashVector":
|
||||
return _import_dashvector()
|
||||
elif name == "DatabricksVectorSearch":
|
||||
return _import_databricks_vector_search()
|
||||
elif name == "DeepLake":
|
||||
return _import_deeplake()
|
||||
elif name == "Dingo":
|
||||
return _import_dingo()
|
||||
elif name == "DocArrayInMemorySearch":
|
||||
return _import_docarray_inmemory()
|
||||
elif name == "DocumentDBVectorSearch":
|
||||
return _import_documentdb()
|
||||
elif name == "DocArrayHnswSearch":
|
||||
return _import_docarray_hnsw()
|
||||
elif name == "ElasticsearchStore":
|
||||
return _import_elasticsearch()
|
||||
elif name == "Epsilla":
|
||||
return _import_epsilla()
|
||||
elif name == "FAISS":
|
||||
return _import_faiss()
|
||||
elif name == "HanaDB":
|
||||
return _import_hanavector()
|
||||
elif name == "Hologres":
|
||||
return _import_hologres()
|
||||
elif name == "InfinispanVS":
|
||||
return _import_infinispanvs()
|
||||
elif name == "KDBAI":
|
||||
return _import_kdbai()
|
||||
elif name == "DistanceStrategy":
|
||||
return _import_distance_strategy()
|
||||
elif name == "KineticaSettings":
|
||||
return _import_kinetica_settings()
|
||||
elif name == "Kinetica":
|
||||
return _import_kinetica()
|
||||
elif name == "LanceDB":
|
||||
return _import_lancedb()
|
||||
elif name == "LLMRails":
|
||||
return _import_llm_rails()
|
||||
elif name == "Marqo":
|
||||
return _import_marqo()
|
||||
elif name == "MatchingEngine":
|
||||
return _import_matching_engine()
|
||||
elif name == "Meilisearch":
|
||||
return _import_meilisearch()
|
||||
elif name == "Milvus":
|
||||
return _import_milvus()
|
||||
elif name == "MomentoVectorIndex":
|
||||
return _import_momento_vector_index()
|
||||
elif name == "MongoDBAtlasVectorSearch":
|
||||
return _import_mongodb_atlas()
|
||||
elif name == "MyScaleSettings":
|
||||
return _import_myscale_settings()
|
||||
elif name == "MyScale":
|
||||
return _import_myscale()
|
||||
elif name == "Neo4jVector":
|
||||
return _import_neo4j_vector()
|
||||
elif name == "OpenSearchVectorSearch":
|
||||
return _import_opensearch_vector_search()
|
||||
elif name == "PGEmbedding":
|
||||
return _import_pgembedding()
|
||||
elif name == "PGVector":
|
||||
return _import_pgvector()
|
||||
elif name == "Pinecone":
|
||||
return _import_pinecone()
|
||||
elif name == "Qdrant":
|
||||
return _import_qdrant()
|
||||
elif name == "Redis":
|
||||
return _import_redis()
|
||||
elif name == "Rockset":
|
||||
return _import_rocksetdb()
|
||||
elif name == "ScaNN":
|
||||
return _import_scann()
|
||||
elif name == "SemaDB":
|
||||
return _import_semadb()
|
||||
elif name == "SingleStoreDB":
|
||||
return _import_singlestoredb()
|
||||
elif name == "SKLearnVectorStore":
|
||||
return _import_sklearn()
|
||||
elif name == "SQLiteVSS":
|
||||
return _import_sqlitevss()
|
||||
elif name == "StarRocks":
|
||||
return _import_starrocks()
|
||||
elif name == "SupabaseVectorStore":
|
||||
return _import_supabase()
|
||||
elif name == "SurrealDBStore":
|
||||
return _import_surrealdb()
|
||||
elif name == "Tair":
|
||||
return _import_tair()
|
||||
elif name == "TencentVectorDB":
|
||||
return _import_tencentvectordb()
|
||||
elif name == "TiDBVectorStore":
|
||||
return _import_tidb_vectorstore()
|
||||
elif name == "TileDB":
|
||||
return _import_tiledb()
|
||||
elif name == "Tigris":
|
||||
return _import_tigris()
|
||||
elif name == "TimescaleVector":
|
||||
return _import_timescalevector()
|
||||
elif name == "Typesense":
|
||||
return _import_typesense()
|
||||
elif name == "USearch":
|
||||
return _import_usearch()
|
||||
elif name == "Vald":
|
||||
return _import_vald()
|
||||
elif name == "Vearch":
|
||||
return _import_vearch()
|
||||
elif name == "Vectara":
|
||||
return _import_vectara()
|
||||
elif name == "Weaviate":
|
||||
return _import_weaviate()
|
||||
elif name == "Yellowbrick":
|
||||
return _import_yellowbrick()
|
||||
elif name == "ZepVectorStore":
|
||||
return _import_zep()
|
||||
elif name == "Zilliz":
|
||||
return _import_zilliz()
|
||||
elif name == "VespaStore":
|
||||
return _import_vespa()
|
||||
elif name == "NeuralDBVectorStore":
|
||||
return _import_neuraldb()
|
||||
elif name == "Lantern":
|
||||
return _import_lantern()
|
||||
else:
|
||||
raise AttributeError(f"Could not find: {name}")
|
||||
if name in _module_lookup:
|
||||
module = importlib.import_module(_module_lookup[name])
|
||||
return getattr(module, name)
|
||||
raise AttributeError(f"module {__name__} has no attribute {name}")
|
||||
|
||||
|
||||
__all__ = [
|
||||
"AlibabaCloudOpenSearch",
|
||||
"AlibabaCloudOpenSearchSettings",
|
||||
"AnalyticDB",
|
||||
"Annoy",
|
||||
"ApacheDoris",
|
||||
"AtlasDB",
|
||||
"AwaDB",
|
||||
"AzureSearch",
|
||||
"Bagel",
|
||||
"Cassandra",
|
||||
"AstraDB",
|
||||
"Chroma",
|
||||
"Clarifai",
|
||||
"Clickhouse",
|
||||
"ClickhouseSettings",
|
||||
"DashVector",
|
||||
"DatabricksVectorSearch",
|
||||
"DeepLake",
|
||||
"Dingo",
|
||||
"DocArrayHnswSearch",
|
||||
"DocArrayInMemorySearch",
|
||||
"ElasticKnnSearch",
|
||||
"ElasticVectorSearch",
|
||||
"ElasticsearchStore",
|
||||
"Epsilla",
|
||||
"FAISS",
|
||||
"HanaDB",
|
||||
"Hologres",
|
||||
"InfinispanVS",
|
||||
"KDBAI",
|
||||
"DistanceStrategy",
|
||||
"Kinetica",
|
||||
"KineticaSettings",
|
||||
"LanceDB",
|
||||
"LLMRails",
|
||||
"Marqo",
|
||||
"MatchingEngine",
|
||||
"Meilisearch",
|
||||
"Milvus",
|
||||
"MomentoVectorIndex",
|
||||
"MongoDBAtlasVectorSearch",
|
||||
"MyScale",
|
||||
"MyScaleSettings",
|
||||
"Neo4jVector",
|
||||
"OpenSearchVectorSearch",
|
||||
"PGEmbedding",
|
||||
"PGVector",
|
||||
"Pinecone",
|
||||
"Qdrant",
|
||||
"Redis",
|
||||
"Rockset",
|
||||
"SKLearnVectorStore",
|
||||
"ScaNN",
|
||||
"SemaDB",
|
||||
"SingleStoreDB",
|
||||
"SQLiteVSS",
|
||||
"StarRocks",
|
||||
"SupabaseVectorStore",
|
||||
"SurrealDBStore",
|
||||
"Tair",
|
||||
"TiDBVectorStore",
|
||||
"TileDB",
|
||||
"Tigris",
|
||||
"TimescaleVector",
|
||||
"Typesense",
|
||||
"USearch",
|
||||
"Vald",
|
||||
"Vearch",
|
||||
"Vectara",
|
||||
"VespaStore",
|
||||
"Weaviate",
|
||||
"Yellowbrick",
|
||||
"ZepVectorStore",
|
||||
"Zilliz",
|
||||
"TencentVectorDB",
|
||||
"AzureCosmosDBVectorSearch",
|
||||
"VectorStore",
|
||||
"NeuralDBVectorStore",
|
||||
"Lantern",
|
||||
"DocumentDBVectorSearch",
|
||||
]
|
||||
__all__ = list(_module_lookup.keys())
|
||||
|
@ -89,6 +89,7 @@ EXPECTED_ALL = [
|
||||
"PolygonLastQuote",
|
||||
"PolygonTickerNews",
|
||||
"RedditSearchRun",
|
||||
"RedditSearchSchema",
|
||||
"QueryCheckerTool",
|
||||
"QueryPowerBITool",
|
||||
"QuerySQLCheckerTool",
|
||||
|
@ -91,6 +91,7 @@ _EXPECTED = [
|
||||
"PolygonLastQuote",
|
||||
"PolygonTickerNews",
|
||||
"RedditSearchRun",
|
||||
"RedditSearchSchema",
|
||||
"QueryCheckerTool",
|
||||
"QueryPowerBITool",
|
||||
"QuerySQLCheckerTool",
|
||||
|
@ -24,6 +24,9 @@ EXPECTED_ALL = [
|
||||
"MaxComputeAPIWrapper",
|
||||
"MetaphorSearchAPIWrapper",
|
||||
"NasaAPIWrapper",
|
||||
"RivaASR",
|
||||
"RivaTTS",
|
||||
"AudioStream",
|
||||
"NVIDIARivaASR",
|
||||
"NVIDIARivaTTS",
|
||||
"NVIDIARivaStream",
|
||||
|
@ -51,6 +51,8 @@ def test_compatible_vectorstore_documentation() -> None:
|
||||
"AzureCosmosDBVectorSearch",
|
||||
"AwaDB",
|
||||
"Bagel",
|
||||
"BESVectorStore",
|
||||
"BigQueryVectorSearch",
|
||||
"Cassandra",
|
||||
"Chroma",
|
||||
"DashVector",
|
||||
|
@ -11,6 +11,8 @@ _EXPECTED = [
|
||||
"AwaDB",
|
||||
"AzureSearch",
|
||||
"Bagel",
|
||||
"BESVectorStore",
|
||||
"BigQueryVectorSearch",
|
||||
"Cassandra",
|
||||
"AstraDB",
|
||||
"Chroma",
|
||||
|
Loading…
Reference in New Issue
Block a user