mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-22 06:39:52 +00:00
community[patch]: speed up import times in the community package (#18928)
This PR speeds up import times in the community package
This commit is contained in:
parent
a7fc731720
commit
11195cfa42
@ -6,63 +6,42 @@
|
|||||||
|
|
||||||
BaseCallbackHandler --> <name>CallbackHandler # Example: AimCallbackHandler
|
BaseCallbackHandler --> <name>CallbackHandler # Example: AimCallbackHandler
|
||||||
"""
|
"""
|
||||||
|
import importlib
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from langchain_community.callbacks.aim_callback import AimCallbackHandler
|
_module_lookup = {
|
||||||
from langchain_community.callbacks.argilla_callback import ArgillaCallbackHandler
|
"AimCallbackHandler": "langchain_community.callbacks.aim_callback",
|
||||||
from langchain_community.callbacks.arize_callback import ArizeCallbackHandler
|
"ArgillaCallbackHandler": "langchain_community.callbacks.argilla_callback",
|
||||||
from langchain_community.callbacks.arthur_callback import ArthurCallbackHandler
|
"ArizeCallbackHandler": "langchain_community.callbacks.arize_callback",
|
||||||
from langchain_community.callbacks.clearml_callback import ClearMLCallbackHandler
|
"ArthurCallbackHandler": "langchain_community.callbacks.arthur_callback",
|
||||||
from langchain_community.callbacks.comet_ml_callback import CometCallbackHandler
|
"ClearMLCallbackHandler": "langchain_community.callbacks.clearml_callback",
|
||||||
from langchain_community.callbacks.context_callback import ContextCallbackHandler
|
"CometCallbackHandler": "langchain_community.callbacks.comet_ml_callback",
|
||||||
from langchain_community.callbacks.fiddler_callback import FiddlerCallbackHandler
|
"ContextCallbackHandler": "langchain_community.callbacks.context_callback",
|
||||||
from langchain_community.callbacks.flyte_callback import FlyteCallbackHandler
|
"FiddlerCallbackHandler": "langchain_community.callbacks.fiddler_callback",
|
||||||
from langchain_community.callbacks.human import HumanApprovalCallbackHandler
|
"FlyteCallbackHandler": "langchain_community.callbacks.flyte_callback",
|
||||||
from langchain_community.callbacks.infino_callback import InfinoCallbackHandler
|
"HumanApprovalCallbackHandler": "langchain_community.callbacks.human",
|
||||||
from langchain_community.callbacks.labelstudio_callback import (
|
"InfinoCallbackHandler": "langchain_community.callbacks.infino_callback",
|
||||||
LabelStudioCallbackHandler,
|
"LLMThoughtLabeler": "langchain_community.callbacks.streamlit",
|
||||||
)
|
"LLMonitorCallbackHandler": "langchain_community.callbacks.llmonitor_callback",
|
||||||
from langchain_community.callbacks.llmonitor_callback import LLMonitorCallbackHandler
|
"LabelStudioCallbackHandler": "langchain_community.callbacks.labelstudio_callback",
|
||||||
from langchain_community.callbacks.manager import (
|
"MlflowCallbackHandler": "langchain_community.callbacks.mlflow_callback",
|
||||||
get_openai_callback,
|
"OpenAICallbackHandler": "langchain_community.callbacks.openai_info",
|
||||||
wandb_tracing_enabled,
|
"PromptLayerCallbackHandler": "langchain_community.callbacks.promptlayer_callback",
|
||||||
)
|
"SageMakerCallbackHandler": "langchain_community.callbacks.sagemaker_callback",
|
||||||
from langchain_community.callbacks.mlflow_callback import MlflowCallbackHandler
|
"StreamlitCallbackHandler": "langchain_community.callbacks.streamlit",
|
||||||
from langchain_community.callbacks.openai_info import OpenAICallbackHandler
|
"TrubricsCallbackHandler": "langchain_community.callbacks.trubrics_callback",
|
||||||
from langchain_community.callbacks.promptlayer_callback import (
|
"WandbCallbackHandler": "langchain_community.callbacks.wandb_callback",
|
||||||
PromptLayerCallbackHandler,
|
"WhyLabsCallbackHandler": "langchain_community.callbacks.whylabs_callback",
|
||||||
)
|
"get_openai_callback": "langchain_community.callbacks.manager",
|
||||||
from langchain_community.callbacks.sagemaker_callback import SageMakerCallbackHandler
|
"wandb_tracing_enabled": "langchain_community.callbacks.manager",
|
||||||
from langchain_community.callbacks.streamlit import (
|
}
|
||||||
LLMThoughtLabeler,
|
|
||||||
StreamlitCallbackHandler,
|
|
||||||
)
|
|
||||||
from langchain_community.callbacks.trubrics_callback import TrubricsCallbackHandler
|
|
||||||
from langchain_community.callbacks.wandb_callback import WandbCallbackHandler
|
|
||||||
from langchain_community.callbacks.whylabs_callback import WhyLabsCallbackHandler
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"AimCallbackHandler",
|
def __getattr__(name: str) -> Any:
|
||||||
"ArgillaCallbackHandler",
|
if name in _module_lookup:
|
||||||
"ArizeCallbackHandler",
|
module = importlib.import_module(_module_lookup[name])
|
||||||
"PromptLayerCallbackHandler",
|
return getattr(module, name)
|
||||||
"ArthurCallbackHandler",
|
raise AttributeError(f"module {__name__} has no attribute {name}")
|
||||||
"ClearMLCallbackHandler",
|
|
||||||
"CometCallbackHandler",
|
|
||||||
"ContextCallbackHandler",
|
__all__ = list(_module_lookup.keys())
|
||||||
"HumanApprovalCallbackHandler",
|
|
||||||
"InfinoCallbackHandler",
|
|
||||||
"MlflowCallbackHandler",
|
|
||||||
"LLMonitorCallbackHandler",
|
|
||||||
"OpenAICallbackHandler",
|
|
||||||
"LLMThoughtLabeler",
|
|
||||||
"StreamlitCallbackHandler",
|
|
||||||
"WandbCallbackHandler",
|
|
||||||
"WhyLabsCallbackHandler",
|
|
||||||
"get_openai_callback",
|
|
||||||
"wandb_tracing_enabled",
|
|
||||||
"FlyteCallbackHandler",
|
|
||||||
"SageMakerCallbackHandler",
|
|
||||||
"LabelStudioCallbackHandler",
|
|
||||||
"TrubricsCallbackHandler",
|
|
||||||
"FiddlerCallbackHandler",
|
|
||||||
]
|
|
||||||
|
@ -17,3 +17,29 @@ WhatsApp. The loaded chat messages can be used for fine-tuning models.
|
|||||||
ChatSession
|
ChatSession
|
||||||
|
|
||||||
""" # noqa: E501
|
""" # noqa: E501
|
||||||
|
|
||||||
|
import importlib
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
_module_lookup = {
|
||||||
|
"BaseChatLoader": "langchain_community.chat_loaders.base",
|
||||||
|
"FolderFacebookMessengerChatLoader": "langchain_community.chat_loaders.facebook_messenger", # noqa: E501
|
||||||
|
"GMailLoader": "langchain_community.chat_loaders.gmail",
|
||||||
|
"IMessageChatLoader": "langchain_community.chat_loaders.imessage",
|
||||||
|
"LangSmithDatasetChatLoader": "langchain_community.chat_loaders.langsmith",
|
||||||
|
"LangSmithRunChatLoader": "langchain_community.chat_loaders.langsmith",
|
||||||
|
"SingleFileFacebookMessengerChatLoader": "langchain_community.chat_loaders.facebook_messenger", # noqa: E501
|
||||||
|
"SlackChatLoader": "langchain_community.chat_loaders.slack",
|
||||||
|
"TelegramChatLoader": "langchain_community.chat_loaders.telegram",
|
||||||
|
"WhatsAppChatLoader": "langchain_community.chat_loaders.whatsapp",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name: str) -> Any:
|
||||||
|
if name in _module_lookup:
|
||||||
|
module = importlib.import_module(_module_lookup[name])
|
||||||
|
return getattr(module, name)
|
||||||
|
raise AttributeError(f"module {__name__} has no attribute {name}")
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = list(_module_lookup.keys())
|
||||||
|
@ -15,70 +15,39 @@
|
|||||||
|
|
||||||
""" # noqa: E501
|
""" # noqa: E501
|
||||||
|
|
||||||
from langchain_community.chat_message_histories.astradb import (
|
import importlib
|
||||||
AstraDBChatMessageHistory,
|
from typing import Any
|
||||||
)
|
|
||||||
from langchain_community.chat_message_histories.cassandra import (
|
|
||||||
CassandraChatMessageHistory,
|
|
||||||
)
|
|
||||||
from langchain_community.chat_message_histories.cosmos_db import (
|
|
||||||
CosmosDBChatMessageHistory,
|
|
||||||
)
|
|
||||||
from langchain_community.chat_message_histories.dynamodb import (
|
|
||||||
DynamoDBChatMessageHistory,
|
|
||||||
)
|
|
||||||
from langchain_community.chat_message_histories.elasticsearch import (
|
|
||||||
ElasticsearchChatMessageHistory,
|
|
||||||
)
|
|
||||||
from langchain_community.chat_message_histories.file import FileChatMessageHistory
|
|
||||||
from langchain_community.chat_message_histories.firestore import (
|
|
||||||
FirestoreChatMessageHistory,
|
|
||||||
)
|
|
||||||
from langchain_community.chat_message_histories.in_memory import ChatMessageHistory
|
|
||||||
from langchain_community.chat_message_histories.momento import MomentoChatMessageHistory
|
|
||||||
from langchain_community.chat_message_histories.mongodb import MongoDBChatMessageHistory
|
|
||||||
from langchain_community.chat_message_histories.neo4j import Neo4jChatMessageHistory
|
|
||||||
from langchain_community.chat_message_histories.postgres import (
|
|
||||||
PostgresChatMessageHistory,
|
|
||||||
)
|
|
||||||
from langchain_community.chat_message_histories.redis import RedisChatMessageHistory
|
|
||||||
from langchain_community.chat_message_histories.rocksetdb import (
|
|
||||||
RocksetChatMessageHistory,
|
|
||||||
)
|
|
||||||
from langchain_community.chat_message_histories.singlestoredb import (
|
|
||||||
SingleStoreDBChatMessageHistory,
|
|
||||||
)
|
|
||||||
from langchain_community.chat_message_histories.sql import SQLChatMessageHistory
|
|
||||||
from langchain_community.chat_message_histories.streamlit import (
|
|
||||||
StreamlitChatMessageHistory,
|
|
||||||
)
|
|
||||||
from langchain_community.chat_message_histories.tidb import TiDBChatMessageHistory
|
|
||||||
from langchain_community.chat_message_histories.upstash_redis import (
|
|
||||||
UpstashRedisChatMessageHistory,
|
|
||||||
)
|
|
||||||
from langchain_community.chat_message_histories.xata import XataChatMessageHistory
|
|
||||||
from langchain_community.chat_message_histories.zep import ZepChatMessageHistory
|
|
||||||
|
|
||||||
__all__ = [
|
_module_lookup = {
|
||||||
"AstraDBChatMessageHistory",
|
"AstraDBChatMessageHistory": "langchain_community.chat_message_histories.astradb",
|
||||||
"ChatMessageHistory",
|
"CassandraChatMessageHistory": "langchain_community.chat_message_histories.cassandra", # noqa: E501
|
||||||
"CassandraChatMessageHistory",
|
"ChatMessageHistory": "langchain_community.chat_message_histories.in_memory",
|
||||||
"CosmosDBChatMessageHistory",
|
"CosmosDBChatMessageHistory": "langchain_community.chat_message_histories.cosmos_db", # noqa: E501
|
||||||
"DynamoDBChatMessageHistory",
|
"DynamoDBChatMessageHistory": "langchain_community.chat_message_histories.dynamodb",
|
||||||
"ElasticsearchChatMessageHistory",
|
"ElasticsearchChatMessageHistory": "langchain_community.chat_message_histories.elasticsearch", # noqa: E501
|
||||||
"FileChatMessageHistory",
|
"FileChatMessageHistory": "langchain_community.chat_message_histories.file",
|
||||||
"FirestoreChatMessageHistory",
|
"FirestoreChatMessageHistory": "langchain_community.chat_message_histories.firestore", # noqa: E501
|
||||||
"MomentoChatMessageHistory",
|
"MomentoChatMessageHistory": "langchain_community.chat_message_histories.momento",
|
||||||
"MongoDBChatMessageHistory",
|
"MongoDBChatMessageHistory": "langchain_community.chat_message_histories.mongodb",
|
||||||
"PostgresChatMessageHistory",
|
"Neo4jChatMessageHistory": "langchain_community.chat_message_histories.neo4j",
|
||||||
"RedisChatMessageHistory",
|
"PostgresChatMessageHistory": "langchain_community.chat_message_histories.postgres",
|
||||||
"RocksetChatMessageHistory",
|
"RedisChatMessageHistory": "langchain_community.chat_message_histories.redis",
|
||||||
"SQLChatMessageHistory",
|
"RocksetChatMessageHistory": "langchain_community.chat_message_histories.rocksetdb",
|
||||||
"StreamlitChatMessageHistory",
|
"SQLChatMessageHistory": "langchain_community.chat_message_histories.sql",
|
||||||
"SingleStoreDBChatMessageHistory",
|
"SingleStoreDBChatMessageHistory": "langchain_community.chat_message_histories.singlestoredb", # noqa: E501
|
||||||
"XataChatMessageHistory",
|
"StreamlitChatMessageHistory": "langchain_community.chat_message_histories.streamlit", # noqa: E501
|
||||||
"ZepChatMessageHistory",
|
"TiDBChatMessageHistory": "langchain_community.chat_message_histories.tidb",
|
||||||
"UpstashRedisChatMessageHistory",
|
"UpstashRedisChatMessageHistory": "langchain_community.chat_message_histories.upstash_redis", # noqa: E501
|
||||||
"Neo4jChatMessageHistory",
|
"XataChatMessageHistory": "langchain_community.chat_message_histories.xata",
|
||||||
"TiDBChatMessageHistory",
|
"ZepChatMessageHistory": "langchain_community.chat_message_histories.zep",
|
||||||
]
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name: str) -> Any:
|
||||||
|
if name in _module_lookup:
|
||||||
|
module = importlib.import_module(_module_lookup[name])
|
||||||
|
return getattr(module, name)
|
||||||
|
raise AttributeError(f"module {__name__} has no attribute {name}")
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = list(_module_lookup.keys())
|
||||||
|
@ -17,92 +17,61 @@ an interface where "chat messages" are the inputs and outputs.
|
|||||||
AIMessage, BaseMessage, HumanMessage
|
AIMessage, BaseMessage, HumanMessage
|
||||||
""" # noqa: E501
|
""" # noqa: E501
|
||||||
|
|
||||||
from langchain_community.chat_models.anthropic import ChatAnthropic
|
import importlib
|
||||||
from langchain_community.chat_models.anyscale import ChatAnyscale
|
from typing import Any
|
||||||
from langchain_community.chat_models.azure_openai import AzureChatOpenAI
|
|
||||||
from langchain_community.chat_models.baichuan import ChatBaichuan
|
|
||||||
from langchain_community.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint
|
|
||||||
from langchain_community.chat_models.bedrock import BedrockChat
|
|
||||||
from langchain_community.chat_models.cohere import ChatCohere
|
|
||||||
from langchain_community.chat_models.databricks import ChatDatabricks
|
|
||||||
from langchain_community.chat_models.deepinfra import ChatDeepInfra
|
|
||||||
from langchain_community.chat_models.ernie import ErnieBotChat
|
|
||||||
from langchain_community.chat_models.everlyai import ChatEverlyAI
|
|
||||||
from langchain_community.chat_models.fake import FakeListChatModel
|
|
||||||
from langchain_community.chat_models.fireworks import ChatFireworks
|
|
||||||
from langchain_community.chat_models.friendli import ChatFriendli
|
|
||||||
from langchain_community.chat_models.gigachat import GigaChat
|
|
||||||
from langchain_community.chat_models.google_palm import ChatGooglePalm
|
|
||||||
from langchain_community.chat_models.gpt_router import GPTRouter
|
|
||||||
from langchain_community.chat_models.huggingface import ChatHuggingFace
|
|
||||||
from langchain_community.chat_models.human import HumanInputChatModel
|
|
||||||
from langchain_community.chat_models.hunyuan import ChatHunyuan
|
|
||||||
from langchain_community.chat_models.javelin_ai_gateway import ChatJavelinAIGateway
|
|
||||||
from langchain_community.chat_models.jinachat import JinaChat
|
|
||||||
from langchain_community.chat_models.kinetica import ChatKinetica
|
|
||||||
from langchain_community.chat_models.konko import ChatKonko
|
|
||||||
from langchain_community.chat_models.litellm import ChatLiteLLM
|
|
||||||
from langchain_community.chat_models.litellm_router import ChatLiteLLMRouter
|
|
||||||
from langchain_community.chat_models.llama_edge import LlamaEdgeChatService
|
|
||||||
from langchain_community.chat_models.maritalk import ChatMaritalk
|
|
||||||
from langchain_community.chat_models.minimax import MiniMaxChat
|
|
||||||
from langchain_community.chat_models.mlflow import ChatMlflow
|
|
||||||
from langchain_community.chat_models.mlflow_ai_gateway import ChatMLflowAIGateway
|
|
||||||
from langchain_community.chat_models.ollama import ChatOllama
|
|
||||||
from langchain_community.chat_models.openai import ChatOpenAI
|
|
||||||
from langchain_community.chat_models.pai_eas_endpoint import PaiEasChatEndpoint
|
|
||||||
from langchain_community.chat_models.perplexity import ChatPerplexity
|
|
||||||
from langchain_community.chat_models.promptlayer_openai import PromptLayerChatOpenAI
|
|
||||||
from langchain_community.chat_models.sparkllm import ChatSparkLLM
|
|
||||||
from langchain_community.chat_models.tongyi import ChatTongyi
|
|
||||||
from langchain_community.chat_models.vertexai import ChatVertexAI
|
|
||||||
from langchain_community.chat_models.volcengine_maas import VolcEngineMaasChat
|
|
||||||
from langchain_community.chat_models.yandex import ChatYandexGPT
|
|
||||||
from langchain_community.chat_models.yuan2 import ChatYuan2
|
|
||||||
from langchain_community.chat_models.zhipuai import ChatZhipuAI
|
|
||||||
|
|
||||||
__all__ = [
|
_module_lookup = {
|
||||||
"LlamaEdgeChatService",
|
"AzureChatOpenAI": "langchain_community.chat_models.azure_openai",
|
||||||
"ChatOpenAI",
|
"BedrockChat": "langchain_community.chat_models.bedrock",
|
||||||
"BedrockChat",
|
"ChatAnthropic": "langchain_community.chat_models.anthropic",
|
||||||
"AzureChatOpenAI",
|
"ChatAnyscale": "langchain_community.chat_models.anyscale",
|
||||||
"FakeListChatModel",
|
"ChatBaichuan": "langchain_community.chat_models.baichuan",
|
||||||
"PromptLayerChatOpenAI",
|
"ChatCohere": "langchain_community.chat_models.cohere",
|
||||||
"ChatDatabricks",
|
"ChatDatabricks": "langchain_community.chat_models.databricks",
|
||||||
"ChatDeepInfra",
|
"ChatDeepInfra": "langchain_community.chat_models.deepinfra",
|
||||||
"ChatEverlyAI",
|
"ChatEverlyAI": "langchain_community.chat_models.everlyai",
|
||||||
"ChatAnthropic",
|
"ChatFireworks": "langchain_community.chat_models.fireworks",
|
||||||
"ChatCohere",
|
"ChatFriendli": "langchain_community.chat_models.friendli",
|
||||||
"ChatGooglePalm",
|
"ChatGooglePalm": "langchain_community.chat_models.google_palm",
|
||||||
"ChatMlflow",
|
"ChatHuggingFace": "langchain_community.chat_models.huggingface",
|
||||||
"ChatMLflowAIGateway",
|
"ChatHunyuan": "langchain_community.chat_models.hunyuan",
|
||||||
"ChatOllama",
|
"ChatJavelinAIGateway": "langchain_community.chat_models.javelin_ai_gateway",
|
||||||
"ChatVertexAI",
|
"ChatKinetica": "langchain_community.chat_models.kinetica",
|
||||||
"JinaChat",
|
"ChatKonko": "langchain_community.chat_models.konko",
|
||||||
"ChatHuggingFace",
|
"ChatLiteLLM": "langchain_community.chat_models.litellm",
|
||||||
"HumanInputChatModel",
|
"ChatLiteLLMRouter": "langchain_community.chat_models.litellm_router",
|
||||||
"MiniMaxChat",
|
"ChatMLflowAIGateway": "langchain_community.chat_models.mlflow_ai_gateway",
|
||||||
"ChatAnyscale",
|
"ChatMaritalk": "langchain_community.chat_models.maritalk",
|
||||||
"ChatLiteLLM",
|
"ChatMlflow": "langchain_community.chat_models.mlflow",
|
||||||
"ChatLiteLLMRouter",
|
"ChatOllama": "langchain_community.chat_models.ollama",
|
||||||
"ErnieBotChat",
|
"ChatOpenAI": "langchain_community.chat_models.openai",
|
||||||
"ChatJavelinAIGateway",
|
"ChatPerplexity": "langchain_community.chat_models.perplexity",
|
||||||
"ChatKonko",
|
"ChatSparkLLM": "langchain_community.chat_models.sparkllm",
|
||||||
"PaiEasChatEndpoint",
|
"ChatTongyi": "langchain_community.chat_models.tongyi",
|
||||||
"QianfanChatEndpoint",
|
"ChatVertexAI": "langchain_community.chat_models.vertexai",
|
||||||
"ChatTongyi",
|
"ChatYandexGPT": "langchain_community.chat_models.yandex",
|
||||||
"ChatFireworks",
|
"ChatYuan2": "langchain_community.chat_models.yuan2",
|
||||||
"ChatYandexGPT",
|
"ChatZhipuAI": "langchain_community.chat_models.zhipuai",
|
||||||
"ChatBaichuan",
|
"ErnieBotChat": "langchain_community.chat_models.ernie",
|
||||||
"ChatHunyuan",
|
"FakeListChatModel": "langchain_community.chat_models.fake",
|
||||||
"ChatFriendli",
|
"GPTRouter": "langchain_community.chat_models.gpt_router",
|
||||||
"GigaChat",
|
"GigaChat": "langchain_community.chat_models.gigachat",
|
||||||
"ChatSparkLLM",
|
"HumanInputChatModel": "langchain_community.chat_models.human",
|
||||||
"VolcEngineMaasChat",
|
"JinaChat": "langchain_community.chat_models.jinachat",
|
||||||
"GPTRouter",
|
"LlamaEdgeChatService": "langchain_community.chat_models.llama_edge",
|
||||||
"ChatYuan2",
|
"MiniMaxChat": "langchain_community.chat_models.minimax",
|
||||||
"ChatZhipuAI",
|
"PaiEasChatEndpoint": "langchain_community.chat_models.pai_eas_endpoint",
|
||||||
"ChatPerplexity",
|
"PromptLayerChatOpenAI": "langchain_community.chat_models.promptlayer_openai",
|
||||||
"ChatKinetica",
|
"QianfanChatEndpoint": "langchain_community.chat_models.baidu_qianfan_endpoint",
|
||||||
"ChatMaritalk",
|
"VolcEngineMaasChat": "langchain_community.chat_models.volcengine_maas",
|
||||||
]
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name: str) -> Any:
|
||||||
|
if name in _module_lookup:
|
||||||
|
module = importlib.import_module(_module_lookup[name])
|
||||||
|
return getattr(module, name)
|
||||||
|
raise AttributeError(f"module {__name__} has no attribute {name}")
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = list(_module_lookup.keys())
|
||||||
|
@ -14,8 +14,22 @@ The **Docstore** is a simplified version of the Document Loader.
|
|||||||
|
|
||||||
Document, AddableMixin
|
Document, AddableMixin
|
||||||
"""
|
"""
|
||||||
from langchain_community.docstore.arbitrary_fn import DocstoreFn
|
|
||||||
from langchain_community.docstore.in_memory import InMemoryDocstore
|
|
||||||
from langchain_community.docstore.wikipedia import Wikipedia
|
|
||||||
|
|
||||||
__all__ = ["DocstoreFn", "InMemoryDocstore", "Wikipedia"]
|
import importlib
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
_module_lookup = {
|
||||||
|
"DocstoreFn": "langchain_community.docstore.arbitrary_fn",
|
||||||
|
"InMemoryDocstore": "langchain_community.docstore.in_memory",
|
||||||
|
"Wikipedia": "langchain_community.docstore.wikipedia",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name: str) -> Any:
|
||||||
|
if name in _module_lookup:
|
||||||
|
module = importlib.import_module(_module_lookup[name])
|
||||||
|
return getattr(module, name)
|
||||||
|
raise AttributeError(f"module {__name__} has no attribute {name}")
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = list(_module_lookup.keys())
|
||||||
|
@ -1,7 +1,16 @@
|
|||||||
from langchain_community.document_compressors.llmlingua_filter import (
|
import importlib
|
||||||
LLMLinguaCompressor,
|
from typing import Any
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = [
|
_module_lookup = {
|
||||||
"LLMLinguaCompressor",
|
"LLMLinguaCompressor": "langchain_community.document_compressors.llmlingua_filter",
|
||||||
]
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name: str) -> Any:
|
||||||
|
if name in _module_lookup:
|
||||||
|
module = importlib.import_module(_module_lookup[name])
|
||||||
|
return getattr(module, name)
|
||||||
|
raise AttributeError(f"module {__name__} has no attribute {name}")
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = list(_module_lookup.keys())
|
||||||
|
@ -14,415 +14,197 @@
|
|||||||
|
|
||||||
Document, <name>TextSplitter
|
Document, <name>TextSplitter
|
||||||
"""
|
"""
|
||||||
|
import importlib
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from langchain_community.document_loaders.acreom import AcreomLoader
|
_module_lookup = {
|
||||||
from langchain_community.document_loaders.airbyte import (
|
"AZLyricsLoader": "langchain_community.document_loaders.azlyrics",
|
||||||
AirbyteCDKLoader,
|
"AcreomLoader": "langchain_community.document_loaders.acreom",
|
||||||
AirbyteGongLoader,
|
"AirbyteCDKLoader": "langchain_community.document_loaders.airbyte",
|
||||||
AirbyteHubspotLoader,
|
"AirbyteGongLoader": "langchain_community.document_loaders.airbyte",
|
||||||
AirbyteSalesforceLoader,
|
"AirbyteHubspotLoader": "langchain_community.document_loaders.airbyte",
|
||||||
AirbyteShopifyLoader,
|
"AirbyteJSONLoader": "langchain_community.document_loaders.airbyte_json",
|
||||||
AirbyteStripeLoader,
|
"AirbyteSalesforceLoader": "langchain_community.document_loaders.airbyte",
|
||||||
AirbyteTypeformLoader,
|
"AirbyteShopifyLoader": "langchain_community.document_loaders.airbyte",
|
||||||
AirbyteZendeskSupportLoader,
|
"AirbyteStripeLoader": "langchain_community.document_loaders.airbyte",
|
||||||
)
|
"AirbyteTypeformLoader": "langchain_community.document_loaders.airbyte",
|
||||||
from langchain_community.document_loaders.airbyte_json import AirbyteJSONLoader
|
"AirbyteZendeskSupportLoader": "langchain_community.document_loaders.airbyte",
|
||||||
from langchain_community.document_loaders.airtable import AirtableLoader
|
"AirtableLoader": "langchain_community.document_loaders.airtable",
|
||||||
from langchain_community.document_loaders.apify_dataset import ApifyDatasetLoader
|
"AmazonTextractPDFLoader": "langchain_community.document_loaders.pdf",
|
||||||
from langchain_community.document_loaders.arcgis_loader import ArcGISLoader
|
"ApifyDatasetLoader": "langchain_community.document_loaders.apify_dataset",
|
||||||
from langchain_community.document_loaders.arxiv import ArxivLoader
|
"ArcGISLoader": "langchain_community.document_loaders.arcgis_loader",
|
||||||
from langchain_community.document_loaders.assemblyai import (
|
"ArxivLoader": "langchain_community.document_loaders.arxiv",
|
||||||
AssemblyAIAudioLoaderById,
|
"AssemblyAIAudioLoaderById": "langchain_community.document_loaders.assemblyai",
|
||||||
AssemblyAIAudioTranscriptLoader,
|
"AssemblyAIAudioTranscriptLoader": "langchain_community.document_loaders.assemblyai", # noqa: E501
|
||||||
)
|
"AstraDBLoader": "langchain_community.document_loaders.astradb",
|
||||||
from langchain_community.document_loaders.astradb import AstraDBLoader
|
"AsyncChromiumLoader": "langchain_community.document_loaders.chromium",
|
||||||
from langchain_community.document_loaders.async_html import AsyncHtmlLoader
|
"AsyncHtmlLoader": "langchain_community.document_loaders.async_html",
|
||||||
from langchain_community.document_loaders.athena import AthenaLoader
|
"AthenaLoader": "langchain_community.document_loaders.athena",
|
||||||
from langchain_community.document_loaders.azlyrics import AZLyricsLoader
|
"AzureAIDataLoader": "langchain_community.document_loaders.azure_ai_data",
|
||||||
from langchain_community.document_loaders.azure_ai_data import (
|
"AzureAIDocumentIntelligenceLoader": "langchain_community.document_loaders.doc_intelligence", # noqa: E501
|
||||||
AzureAIDataLoader,
|
"AzureBlobStorageContainerLoader": "langchain_community.document_loaders.azure_blob_storage_container", # noqa: E501
|
||||||
)
|
"AzureBlobStorageFileLoader": "langchain_community.document_loaders.azure_blob_storage_file", # noqa: E501
|
||||||
from langchain_community.document_loaders.azure_blob_storage_container import (
|
"BSHTMLLoader": "langchain_community.document_loaders.html_bs",
|
||||||
AzureBlobStorageContainerLoader,
|
"BibtexLoader": "langchain_community.document_loaders.bibtex",
|
||||||
)
|
"BigQueryLoader": "langchain_community.document_loaders.bigquery",
|
||||||
from langchain_community.document_loaders.azure_blob_storage_file import (
|
"BiliBiliLoader": "langchain_community.document_loaders.bilibili",
|
||||||
AzureBlobStorageFileLoader,
|
"BlackboardLoader": "langchain_community.document_loaders.blackboard",
|
||||||
)
|
"Blob": "langchain_community.document_loaders.blob_loaders",
|
||||||
from langchain_community.document_loaders.bibtex import BibtexLoader
|
"BlobLoader": "langchain_community.document_loaders.blob_loaders",
|
||||||
from langchain_community.document_loaders.bigquery import BigQueryLoader
|
"BlockchainDocumentLoader": "langchain_community.document_loaders.blockchain",
|
||||||
from langchain_community.document_loaders.bilibili import BiliBiliLoader
|
"BraveSearchLoader": "langchain_community.document_loaders.brave_search",
|
||||||
from langchain_community.document_loaders.blackboard import BlackboardLoader
|
"BrowserlessLoader": "langchain_community.document_loaders.browserless",
|
||||||
from langchain_community.document_loaders.blob_loaders import (
|
"CSVLoader": "langchain_community.document_loaders.csv_loader",
|
||||||
Blob,
|
"CassandraLoader": "langchain_community.document_loaders.cassandra",
|
||||||
BlobLoader,
|
"ChatGPTLoader": "langchain_community.document_loaders.chatgpt",
|
||||||
FileSystemBlobLoader,
|
"CoNLLULoader": "langchain_community.document_loaders.conllu",
|
||||||
YoutubeAudioLoader,
|
"CollegeConfidentialLoader": "langchain_community.document_loaders.college_confidential", # noqa: E501
|
||||||
)
|
"ConcurrentLoader": "langchain_community.document_loaders.concurrent",
|
||||||
from langchain_community.document_loaders.blockchain import BlockchainDocumentLoader
|
"ConfluenceLoader": "langchain_community.document_loaders.confluence",
|
||||||
from langchain_community.document_loaders.brave_search import BraveSearchLoader
|
"CouchbaseLoader": "langchain_community.document_loaders.couchbase",
|
||||||
from langchain_community.document_loaders.browserless import BrowserlessLoader
|
"CubeSemanticLoader": "langchain_community.document_loaders.cube_semantic",
|
||||||
from langchain_community.document_loaders.cassandra import CassandraLoader
|
"DataFrameLoader": "langchain_community.document_loaders.dataframe",
|
||||||
from langchain_community.document_loaders.chatgpt import ChatGPTLoader
|
"DatadogLogsLoader": "langchain_community.document_loaders.datadog_logs",
|
||||||
from langchain_community.document_loaders.chromium import AsyncChromiumLoader
|
"DiffbotLoader": "langchain_community.document_loaders.diffbot",
|
||||||
from langchain_community.document_loaders.college_confidential import (
|
"DirectoryLoader": "langchain_community.document_loaders.directory",
|
||||||
CollegeConfidentialLoader,
|
"DiscordChatLoader": "langchain_community.document_loaders.discord",
|
||||||
)
|
"DocugamiLoader": "langchain_community.document_loaders.docugami",
|
||||||
from langchain_community.document_loaders.concurrent import ConcurrentLoader
|
"DocusaurusLoader": "langchain_community.document_loaders.docusaurus",
|
||||||
from langchain_community.document_loaders.confluence import ConfluenceLoader
|
"Docx2txtLoader": "langchain_community.document_loaders.word_document",
|
||||||
from langchain_community.document_loaders.conllu import CoNLLULoader
|
"DropboxLoader": "langchain_community.document_loaders.dropbox",
|
||||||
from langchain_community.document_loaders.couchbase import CouchbaseLoader
|
"DuckDBLoader": "langchain_community.document_loaders.duckdb_loader",
|
||||||
from langchain_community.document_loaders.csv_loader import (
|
"EtherscanLoader": "langchain_community.document_loaders.etherscan",
|
||||||
CSVLoader,
|
"EverNoteLoader": "langchain_community.document_loaders.evernote",
|
||||||
UnstructuredCSVLoader,
|
"FacebookChatLoader": "langchain_community.document_loaders.facebook_chat",
|
||||||
)
|
"FaunaLoader": "langchain_community.document_loaders.fauna",
|
||||||
from langchain_community.document_loaders.cube_semantic import CubeSemanticLoader
|
"FigmaFileLoader": "langchain_community.document_loaders.figma",
|
||||||
from langchain_community.document_loaders.datadog_logs import DatadogLogsLoader
|
"FileSystemBlobLoader": "langchain_community.document_loaders.blob_loaders",
|
||||||
from langchain_community.document_loaders.dataframe import DataFrameLoader
|
"GCSDirectoryLoader": "langchain_community.document_loaders.gcs_directory",
|
||||||
from langchain_community.document_loaders.diffbot import DiffbotLoader
|
"GCSFileLoader": "langchain_community.document_loaders.gcs_file",
|
||||||
from langchain_community.document_loaders.directory import DirectoryLoader
|
"GeoDataFrameLoader": "langchain_community.document_loaders.geodataframe",
|
||||||
from langchain_community.document_loaders.discord import DiscordChatLoader
|
"GitHubIssuesLoader": "langchain_community.document_loaders.github",
|
||||||
from langchain_community.document_loaders.doc_intelligence import (
|
"GitLoader": "langchain_community.document_loaders.git",
|
||||||
AzureAIDocumentIntelligenceLoader,
|
"GitbookLoader": "langchain_community.document_loaders.gitbook",
|
||||||
)
|
"GithubFileLoader": "langchain_community.document_loaders.github",
|
||||||
from langchain_community.document_loaders.docugami import DocugamiLoader
|
"GoogleApiClient": "langchain_community.document_loaders.youtube",
|
||||||
from langchain_community.document_loaders.docusaurus import DocusaurusLoader
|
"GoogleApiYoutubeLoader": "langchain_community.document_loaders.youtube",
|
||||||
from langchain_community.document_loaders.dropbox import DropboxLoader
|
"GoogleDriveLoader": "langchain_community.document_loaders.googledrive",
|
||||||
from langchain_community.document_loaders.duckdb_loader import DuckDBLoader
|
"GoogleSpeechToTextLoader": "langchain_community.document_loaders.google_speech_to_text", # noqa: E501
|
||||||
from langchain_community.document_loaders.email import (
|
"GutenbergLoader": "langchain_community.document_loaders.gutenberg",
|
||||||
OutlookMessageLoader,
|
"HNLoader": "langchain_community.document_loaders.hn",
|
||||||
UnstructuredEmailLoader,
|
"HuggingFaceDatasetLoader": "langchain_community.document_loaders.hugging_face_dataset", # noqa: E501
|
||||||
)
|
"HuggingFaceModelLoader": "langchain_community.document_loaders.hugging_face_model",
|
||||||
from langchain_community.document_loaders.epub import UnstructuredEPubLoader
|
"IFixitLoader": "langchain_community.document_loaders.ifixit",
|
||||||
from langchain_community.document_loaders.etherscan import EtherscanLoader
|
"IMSDbLoader": "langchain_community.document_loaders.imsdb",
|
||||||
from langchain_community.document_loaders.evernote import EverNoteLoader
|
"ImageCaptionLoader": "langchain_community.document_loaders.image_captions",
|
||||||
from langchain_community.document_loaders.excel import UnstructuredExcelLoader
|
"IuguLoader": "langchain_community.document_loaders.iugu",
|
||||||
from langchain_community.document_loaders.facebook_chat import FacebookChatLoader
|
"JSONLoader": "langchain_community.document_loaders.json_loader",
|
||||||
from langchain_community.document_loaders.fauna import FaunaLoader
|
"JoplinLoader": "langchain_community.document_loaders.joplin",
|
||||||
from langchain_community.document_loaders.figma import FigmaFileLoader
|
"LakeFSLoader": "langchain_community.document_loaders.lakefs",
|
||||||
from langchain_community.document_loaders.gcs_directory import GCSDirectoryLoader
|
"LarkSuiteDocLoader": "langchain_community.document_loaders.larksuite",
|
||||||
from langchain_community.document_loaders.gcs_file import GCSFileLoader
|
"MHTMLLoader": "langchain_community.document_loaders.mhtml",
|
||||||
from langchain_community.document_loaders.geodataframe import GeoDataFrameLoader
|
"MWDumpLoader": "langchain_community.document_loaders.mediawikidump",
|
||||||
from langchain_community.document_loaders.git import GitLoader
|
"MastodonTootsLoader": "langchain_community.document_loaders.mastodon",
|
||||||
from langchain_community.document_loaders.gitbook import GitbookLoader
|
"MathpixPDFLoader": "langchain_community.document_loaders.pdf",
|
||||||
from langchain_community.document_loaders.github import (
|
"MaxComputeLoader": "langchain_community.document_loaders.max_compute",
|
||||||
GithubFileLoader,
|
"MergedDataLoader": "langchain_community.document_loaders.merge",
|
||||||
GitHubIssuesLoader,
|
"ModernTreasuryLoader": "langchain_community.document_loaders.modern_treasury",
|
||||||
)
|
"MongodbLoader": "langchain_community.document_loaders.mongodb",
|
||||||
from langchain_community.document_loaders.google_speech_to_text import (
|
"NewsURLLoader": "langchain_community.document_loaders.news",
|
||||||
GoogleSpeechToTextLoader,
|
"NotebookLoader": "langchain_community.document_loaders.notebook",
|
||||||
)
|
"NotionDBLoader": "langchain_community.document_loaders.notiondb",
|
||||||
from langchain_community.document_loaders.googledrive import GoogleDriveLoader
|
"NotionDirectoryLoader": "langchain_community.document_loaders.notion",
|
||||||
from langchain_community.document_loaders.gutenberg import GutenbergLoader
|
"OBSDirectoryLoader": "langchain_community.document_loaders.obs_directory",
|
||||||
from langchain_community.document_loaders.hn import HNLoader
|
"OBSFileLoader": "langchain_community.document_loaders.obs_file",
|
||||||
from langchain_community.document_loaders.html import UnstructuredHTMLLoader
|
"ObsidianLoader": "langchain_community.document_loaders.obsidian",
|
||||||
from langchain_community.document_loaders.html_bs import BSHTMLLoader
|
"OneDriveFileLoader": "langchain_community.document_loaders.onedrive_file",
|
||||||
from langchain_community.document_loaders.hugging_face_dataset import (
|
"OneDriveLoader": "langchain_community.document_loaders.onedrive",
|
||||||
HuggingFaceDatasetLoader,
|
"OnlinePDFLoader": "langchain_community.document_loaders.pdf",
|
||||||
)
|
"OpenCityDataLoader": "langchain_community.document_loaders.open_city_data",
|
||||||
from langchain_community.document_loaders.hugging_face_model import (
|
"OutlookMessageLoader": "langchain_community.document_loaders.email",
|
||||||
HuggingFaceModelLoader,
|
"PDFMinerLoader": "langchain_community.document_loaders.pdf",
|
||||||
)
|
"PDFMinerPDFasHTMLLoader": "langchain_community.document_loaders.pdf",
|
||||||
from langchain_community.document_loaders.ifixit import IFixitLoader
|
"PDFPlumberLoader": "langchain_community.document_loaders.pdf",
|
||||||
from langchain_community.document_loaders.image import UnstructuredImageLoader
|
"PagedPDFSplitter": "langchain_community.document_loaders.pdf",
|
||||||
from langchain_community.document_loaders.image_captions import ImageCaptionLoader
|
"PebbloSafeLoader": "langchain_community.document_loaders.pebblo",
|
||||||
from langchain_community.document_loaders.imsdb import IMSDbLoader
|
"PlaywrightURLLoader": "langchain_community.document_loaders.url_playwright",
|
||||||
from langchain_community.document_loaders.iugu import IuguLoader
|
"PolarsDataFrameLoader": "langchain_community.document_loaders.polars_dataframe",
|
||||||
from langchain_community.document_loaders.joplin import JoplinLoader
|
"PsychicLoader": "langchain_community.document_loaders.psychic",
|
||||||
from langchain_community.document_loaders.json_loader import JSONLoader
|
"PubMedLoader": "langchain_community.document_loaders.pubmed",
|
||||||
from langchain_community.document_loaders.lakefs import LakeFSLoader
|
"PyMuPDFLoader": "langchain_community.document_loaders.pdf",
|
||||||
from langchain_community.document_loaders.larksuite import LarkSuiteDocLoader
|
"PyPDFDirectoryLoader": "langchain_community.document_loaders.pdf",
|
||||||
from langchain_community.document_loaders.markdown import UnstructuredMarkdownLoader
|
"PyPDFLoader": "langchain_community.document_loaders.pdf",
|
||||||
from langchain_community.document_loaders.mastodon import MastodonTootsLoader
|
"PyPDFium2Loader": "langchain_community.document_loaders.pdf",
|
||||||
from langchain_community.document_loaders.max_compute import MaxComputeLoader
|
"PySparkDataFrameLoader": "langchain_community.document_loaders.pyspark_dataframe",
|
||||||
from langchain_community.document_loaders.mediawikidump import MWDumpLoader
|
"PythonLoader": "langchain_community.document_loaders.python",
|
||||||
from langchain_community.document_loaders.merge import MergedDataLoader
|
"RSSFeedLoader": "langchain_community.document_loaders.rss",
|
||||||
from langchain_community.document_loaders.mhtml import MHTMLLoader
|
"ReadTheDocsLoader": "langchain_community.document_loaders.readthedocs",
|
||||||
from langchain_community.document_loaders.modern_treasury import ModernTreasuryLoader
|
"RecursiveUrlLoader": "langchain_community.document_loaders.recursive_url_loader",
|
||||||
from langchain_community.document_loaders.mongodb import MongodbLoader
|
"RedditPostsLoader": "langchain_community.document_loaders.reddit",
|
||||||
from langchain_community.document_loaders.news import NewsURLLoader
|
"RoamLoader": "langchain_community.document_loaders.roam",
|
||||||
from langchain_community.document_loaders.notebook import NotebookLoader
|
"RocksetLoader": "langchain_community.document_loaders.rocksetdb",
|
||||||
from langchain_community.document_loaders.notion import NotionDirectoryLoader
|
"S3DirectoryLoader": "langchain_community.document_loaders.s3_directory",
|
||||||
from langchain_community.document_loaders.notiondb import NotionDBLoader
|
"S3FileLoader": "langchain_community.document_loaders.s3_file",
|
||||||
from langchain_community.document_loaders.obs_directory import OBSDirectoryLoader
|
"SQLDatabaseLoader": "langchain_community.document_loaders.sql_database",
|
||||||
from langchain_community.document_loaders.obs_file import OBSFileLoader
|
"SRTLoader": "langchain_community.document_loaders.srt",
|
||||||
from langchain_community.document_loaders.obsidian import ObsidianLoader
|
"SeleniumURLLoader": "langchain_community.document_loaders.url_selenium",
|
||||||
from langchain_community.document_loaders.odt import UnstructuredODTLoader
|
"SharePointLoader": "langchain_community.document_loaders.sharepoint",
|
||||||
from langchain_community.document_loaders.onedrive import OneDriveLoader
|
"SitemapLoader": "langchain_community.document_loaders.sitemap",
|
||||||
from langchain_community.document_loaders.onedrive_file import OneDriveFileLoader
|
"SlackDirectoryLoader": "langchain_community.document_loaders.slack_directory",
|
||||||
from langchain_community.document_loaders.open_city_data import OpenCityDataLoader
|
"SnowflakeLoader": "langchain_community.document_loaders.snowflake_loader",
|
||||||
from langchain_community.document_loaders.org_mode import UnstructuredOrgModeLoader
|
"SpreedlyLoader": "langchain_community.document_loaders.spreedly",
|
||||||
from langchain_community.document_loaders.pdf import (
|
"StripeLoader": "langchain_community.document_loaders.stripe",
|
||||||
AmazonTextractPDFLoader,
|
"SurrealDBLoader": "langchain_community.document_loaders.surrealdb",
|
||||||
MathpixPDFLoader,
|
"TelegramChatApiLoader": "langchain_community.document_loaders.telegram",
|
||||||
OnlinePDFLoader,
|
"TelegramChatFileLoader": "langchain_community.document_loaders.telegram",
|
||||||
PDFMinerLoader,
|
"TelegramChatLoader": "langchain_community.document_loaders.telegram",
|
||||||
PDFMinerPDFasHTMLLoader,
|
"TencentCOSDirectoryLoader": "langchain_community.document_loaders.tencent_cos_directory", # noqa: E501
|
||||||
PDFPlumberLoader,
|
"TencentCOSFileLoader": "langchain_community.document_loaders.tencent_cos_file",
|
||||||
PyMuPDFLoader,
|
"TensorflowDatasetLoader": "langchain_community.document_loaders.tensorflow_datasets", # noqa: E501
|
||||||
PyPDFDirectoryLoader,
|
"TextLoader": "langchain_community.document_loaders.text",
|
||||||
PyPDFium2Loader,
|
"TiDBLoader": "langchain_community.document_loaders.tidb",
|
||||||
PyPDFLoader,
|
"ToMarkdownLoader": "langchain_community.document_loaders.tomarkdown",
|
||||||
UnstructuredPDFLoader,
|
"TomlLoader": "langchain_community.document_loaders.toml",
|
||||||
)
|
"TrelloLoader": "langchain_community.document_loaders.trello",
|
||||||
from langchain_community.document_loaders.pebblo import PebbloSafeLoader
|
"TwitterTweetLoader": "langchain_community.document_loaders.twitter",
|
||||||
from langchain_community.document_loaders.polars_dataframe import PolarsDataFrameLoader
|
"UnstructuredAPIFileIOLoader": "langchain_community.document_loaders.unstructured",
|
||||||
from langchain_community.document_loaders.powerpoint import UnstructuredPowerPointLoader
|
"UnstructuredAPIFileLoader": "langchain_community.document_loaders.unstructured",
|
||||||
from langchain_community.document_loaders.psychic import PsychicLoader
|
"UnstructuredCSVLoader": "langchain_community.document_loaders.csv_loader",
|
||||||
from langchain_community.document_loaders.pubmed import PubMedLoader
|
"UnstructuredEPubLoader": "langchain_community.document_loaders.epub",
|
||||||
from langchain_community.document_loaders.pyspark_dataframe import (
|
"UnstructuredEmailLoader": "langchain_community.document_loaders.email",
|
||||||
PySparkDataFrameLoader,
|
"UnstructuredExcelLoader": "langchain_community.document_loaders.excel",
|
||||||
)
|
"UnstructuredFileIOLoader": "langchain_community.document_loaders.unstructured",
|
||||||
from langchain_community.document_loaders.python import PythonLoader
|
"UnstructuredFileLoader": "langchain_community.document_loaders.unstructured",
|
||||||
from langchain_community.document_loaders.readthedocs import ReadTheDocsLoader
|
"UnstructuredHTMLLoader": "langchain_community.document_loaders.html",
|
||||||
from langchain_community.document_loaders.recursive_url_loader import RecursiveUrlLoader
|
"UnstructuredImageLoader": "langchain_community.document_loaders.image",
|
||||||
from langchain_community.document_loaders.reddit import RedditPostsLoader
|
"UnstructuredMarkdownLoader": "langchain_community.document_loaders.markdown",
|
||||||
from langchain_community.document_loaders.roam import RoamLoader
|
"UnstructuredODTLoader": "langchain_community.document_loaders.odt",
|
||||||
from langchain_community.document_loaders.rocksetdb import RocksetLoader
|
"UnstructuredOrgModeLoader": "langchain_community.document_loaders.org_mode",
|
||||||
from langchain_community.document_loaders.rss import RSSFeedLoader
|
"UnstructuredPDFLoader": "langchain_community.document_loaders.pdf",
|
||||||
from langchain_community.document_loaders.rst import UnstructuredRSTLoader
|
"UnstructuredPowerPointLoader": "langchain_community.document_loaders.powerpoint",
|
||||||
from langchain_community.document_loaders.rtf import UnstructuredRTFLoader
|
"UnstructuredRSTLoader": "langchain_community.document_loaders.rst",
|
||||||
from langchain_community.document_loaders.s3_directory import S3DirectoryLoader
|
"UnstructuredRTFLoader": "langchain_community.document_loaders.rtf",
|
||||||
from langchain_community.document_loaders.s3_file import S3FileLoader
|
"UnstructuredTSVLoader": "langchain_community.document_loaders.tsv",
|
||||||
from langchain_community.document_loaders.sharepoint import SharePointLoader
|
"UnstructuredURLLoader": "langchain_community.document_loaders.url",
|
||||||
from langchain_community.document_loaders.sitemap import SitemapLoader
|
"UnstructuredWordDocumentLoader": "langchain_community.document_loaders.word_document", # noqa: E501
|
||||||
from langchain_community.document_loaders.slack_directory import SlackDirectoryLoader
|
"UnstructuredXMLLoader": "langchain_community.document_loaders.xml",
|
||||||
from langchain_community.document_loaders.snowflake_loader import SnowflakeLoader
|
"VsdxLoader": "langchain_community.document_loaders.vsdx",
|
||||||
from langchain_community.document_loaders.spreedly import SpreedlyLoader
|
"WeatherDataLoader": "langchain_community.document_loaders.weather",
|
||||||
from langchain_community.document_loaders.sql_database import SQLDatabaseLoader
|
"WebBaseLoader": "langchain_community.document_loaders.web_base",
|
||||||
from langchain_community.document_loaders.srt import SRTLoader
|
"WhatsAppChatLoader": "langchain_community.document_loaders.whatsapp_chat",
|
||||||
from langchain_community.document_loaders.stripe import StripeLoader
|
"WikipediaLoader": "langchain_community.document_loaders.wikipedia",
|
||||||
from langchain_community.document_loaders.surrealdb import SurrealDBLoader
|
"XorbitsLoader": "langchain_community.document_loaders.xorbits",
|
||||||
from langchain_community.document_loaders.telegram import (
|
"YoutubeAudioLoader": "langchain_community.document_loaders.blob_loaders",
|
||||||
TelegramChatApiLoader,
|
"YoutubeLoader": "langchain_community.document_loaders.youtube",
|
||||||
TelegramChatFileLoader,
|
"YuqueLoader": "langchain_community.document_loaders.yuque",
|
||||||
)
|
}
|
||||||
from langchain_community.document_loaders.tencent_cos_directory import (
|
|
||||||
TencentCOSDirectoryLoader,
|
|
||||||
)
|
|
||||||
from langchain_community.document_loaders.tencent_cos_file import TencentCOSFileLoader
|
|
||||||
from langchain_community.document_loaders.tensorflow_datasets import (
|
|
||||||
TensorflowDatasetLoader,
|
|
||||||
)
|
|
||||||
from langchain_community.document_loaders.text import TextLoader
|
|
||||||
from langchain_community.document_loaders.tidb import TiDBLoader
|
|
||||||
from langchain_community.document_loaders.tomarkdown import ToMarkdownLoader
|
|
||||||
from langchain_community.document_loaders.toml import TomlLoader
|
|
||||||
from langchain_community.document_loaders.trello import TrelloLoader
|
|
||||||
from langchain_community.document_loaders.tsv import UnstructuredTSVLoader
|
|
||||||
from langchain_community.document_loaders.twitter import TwitterTweetLoader
|
|
||||||
from langchain_community.document_loaders.unstructured import (
|
|
||||||
UnstructuredAPIFileIOLoader,
|
|
||||||
UnstructuredAPIFileLoader,
|
|
||||||
UnstructuredFileIOLoader,
|
|
||||||
UnstructuredFileLoader,
|
|
||||||
)
|
|
||||||
from langchain_community.document_loaders.url import UnstructuredURLLoader
|
|
||||||
from langchain_community.document_loaders.url_playwright import PlaywrightURLLoader
|
|
||||||
from langchain_community.document_loaders.url_selenium import SeleniumURLLoader
|
|
||||||
from langchain_community.document_loaders.vsdx import VsdxLoader
|
|
||||||
from langchain_community.document_loaders.weather import WeatherDataLoader
|
|
||||||
from langchain_community.document_loaders.web_base import WebBaseLoader
|
|
||||||
from langchain_community.document_loaders.whatsapp_chat import WhatsAppChatLoader
|
|
||||||
from langchain_community.document_loaders.wikipedia import WikipediaLoader
|
|
||||||
from langchain_community.document_loaders.word_document import (
|
|
||||||
Docx2txtLoader,
|
|
||||||
UnstructuredWordDocumentLoader,
|
|
||||||
)
|
|
||||||
from langchain_community.document_loaders.xml import UnstructuredXMLLoader
|
|
||||||
from langchain_community.document_loaders.xorbits import XorbitsLoader
|
|
||||||
from langchain_community.document_loaders.youtube import (
|
|
||||||
GoogleApiClient,
|
|
||||||
GoogleApiYoutubeLoader,
|
|
||||||
YoutubeLoader,
|
|
||||||
)
|
|
||||||
from langchain_community.document_loaders.yuque import YuqueLoader
|
|
||||||
|
|
||||||
# Legacy: only for backwards compatibility. Use PyPDFLoader instead
|
|
||||||
PagedPDFSplitter = PyPDFLoader
|
|
||||||
|
|
||||||
# For backwards compatibility
|
def __getattr__(name: str) -> Any:
|
||||||
TelegramChatLoader = TelegramChatFileLoader
|
if name in _module_lookup:
|
||||||
|
module = importlib.import_module(_module_lookup[name])
|
||||||
|
return getattr(module, name)
|
||||||
|
raise AttributeError(f"module {__name__} has no attribute {name}")
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"AcreomLoader",
|
__all__ = list(_module_lookup.keys())
|
||||||
"AsyncHtmlLoader",
|
|
||||||
"AsyncChromiumLoader",
|
|
||||||
"AZLyricsLoader",
|
|
||||||
"AcreomLoader",
|
|
||||||
"AirbyteCDKLoader",
|
|
||||||
"AirbyteGongLoader",
|
|
||||||
"AirbyteJSONLoader",
|
|
||||||
"AirbyteHubspotLoader",
|
|
||||||
"AirbyteSalesforceLoader",
|
|
||||||
"AirbyteShopifyLoader",
|
|
||||||
"AirbyteStripeLoader",
|
|
||||||
"AirbyteTypeformLoader",
|
|
||||||
"AirbyteZendeskSupportLoader",
|
|
||||||
"AirtableLoader",
|
|
||||||
"AmazonTextractPDFLoader",
|
|
||||||
"ApifyDatasetLoader",
|
|
||||||
"ArcGISLoader",
|
|
||||||
"ArxivLoader",
|
|
||||||
"AssemblyAIAudioLoaderById",
|
|
||||||
"AssemblyAIAudioTranscriptLoader",
|
|
||||||
"AstraDBLoader",
|
|
||||||
"AsyncHtmlLoader",
|
|
||||||
"AthenaLoader",
|
|
||||||
"AzureAIDataLoader",
|
|
||||||
"AzureAIDocumentIntelligenceLoader",
|
|
||||||
"AzureBlobStorageContainerLoader",
|
|
||||||
"AzureBlobStorageFileLoader",
|
|
||||||
"BSHTMLLoader",
|
|
||||||
"BibtexLoader",
|
|
||||||
"BigQueryLoader",
|
|
||||||
"BiliBiliLoader",
|
|
||||||
"BlackboardLoader",
|
|
||||||
"Blob",
|
|
||||||
"BlobLoader",
|
|
||||||
"BlockchainDocumentLoader",
|
|
||||||
"BraveSearchLoader",
|
|
||||||
"BrowserlessLoader",
|
|
||||||
"CassandraLoader",
|
|
||||||
"CSVLoader",
|
|
||||||
"ChatGPTLoader",
|
|
||||||
"CoNLLULoader",
|
|
||||||
"CollegeConfidentialLoader",
|
|
||||||
"ConcurrentLoader",
|
|
||||||
"ConfluenceLoader",
|
|
||||||
"CouchbaseLoader",
|
|
||||||
"CubeSemanticLoader",
|
|
||||||
"DataFrameLoader",
|
|
||||||
"DatadogLogsLoader",
|
|
||||||
"PebbloSafeLoader",
|
|
||||||
"DiffbotLoader",
|
|
||||||
"DirectoryLoader",
|
|
||||||
"DiscordChatLoader",
|
|
||||||
"DocugamiLoader",
|
|
||||||
"DocusaurusLoader",
|
|
||||||
"Docx2txtLoader",
|
|
||||||
"DropboxLoader",
|
|
||||||
"DuckDBLoader",
|
|
||||||
"EtherscanLoader",
|
|
||||||
"EverNoteLoader",
|
|
||||||
"FacebookChatLoader",
|
|
||||||
"FaunaLoader",
|
|
||||||
"FigmaFileLoader",
|
|
||||||
"FileSystemBlobLoader",
|
|
||||||
"GCSDirectoryLoader",
|
|
||||||
"GCSFileLoader",
|
|
||||||
"GeoDataFrameLoader",
|
|
||||||
"GithubFileLoader",
|
|
||||||
"GitHubIssuesLoader",
|
|
||||||
"GitLoader",
|
|
||||||
"GitbookLoader",
|
|
||||||
"GoogleApiClient",
|
|
||||||
"GoogleApiYoutubeLoader",
|
|
||||||
"GoogleSpeechToTextLoader",
|
|
||||||
"GoogleDriveLoader",
|
|
||||||
"GutenbergLoader",
|
|
||||||
"HNLoader",
|
|
||||||
"HuggingFaceDatasetLoader",
|
|
||||||
"HuggingFaceModelLoader",
|
|
||||||
"IFixitLoader",
|
|
||||||
"IMSDbLoader",
|
|
||||||
"ImageCaptionLoader",
|
|
||||||
"IuguLoader",
|
|
||||||
"JSONLoader",
|
|
||||||
"JoplinLoader",
|
|
||||||
"LarkSuiteDocLoader",
|
|
||||||
"LakeFSLoader",
|
|
||||||
"MHTMLLoader",
|
|
||||||
"MWDumpLoader",
|
|
||||||
"MastodonTootsLoader",
|
|
||||||
"MathpixPDFLoader",
|
|
||||||
"MaxComputeLoader",
|
|
||||||
"MergedDataLoader",
|
|
||||||
"ModernTreasuryLoader",
|
|
||||||
"MongodbLoader",
|
|
||||||
"NewsURLLoader",
|
|
||||||
"NotebookLoader",
|
|
||||||
"NotionDBLoader",
|
|
||||||
"NotionDirectoryLoader",
|
|
||||||
"OBSDirectoryLoader",
|
|
||||||
"OBSFileLoader",
|
|
||||||
"ObsidianLoader",
|
|
||||||
"OneDriveFileLoader",
|
|
||||||
"OneDriveLoader",
|
|
||||||
"OnlinePDFLoader",
|
|
||||||
"OpenCityDataLoader",
|
|
||||||
"OutlookMessageLoader",
|
|
||||||
"PDFMinerLoader",
|
|
||||||
"PDFMinerPDFasHTMLLoader",
|
|
||||||
"PDFPlumberLoader",
|
|
||||||
"PagedPDFSplitter",
|
|
||||||
"PlaywrightURLLoader",
|
|
||||||
"PolarsDataFrameLoader",
|
|
||||||
"PsychicLoader",
|
|
||||||
"PubMedLoader",
|
|
||||||
"PyMuPDFLoader",
|
|
||||||
"PyPDFDirectoryLoader",
|
|
||||||
"PyPDFLoader",
|
|
||||||
"PyPDFium2Loader",
|
|
||||||
"PySparkDataFrameLoader",
|
|
||||||
"PythonLoader",
|
|
||||||
"RSSFeedLoader",
|
|
||||||
"ReadTheDocsLoader",
|
|
||||||
"RecursiveUrlLoader",
|
|
||||||
"RedditPostsLoader",
|
|
||||||
"RoamLoader",
|
|
||||||
"RocksetLoader",
|
|
||||||
"S3DirectoryLoader",
|
|
||||||
"S3FileLoader",
|
|
||||||
"SRTLoader",
|
|
||||||
"SeleniumURLLoader",
|
|
||||||
"SharePointLoader",
|
|
||||||
"SitemapLoader",
|
|
||||||
"SlackDirectoryLoader",
|
|
||||||
"SnowflakeLoader",
|
|
||||||
"SpreedlyLoader",
|
|
||||||
"SQLDatabaseLoader",
|
|
||||||
"StripeLoader",
|
|
||||||
"SurrealDBLoader",
|
|
||||||
"TelegramChatApiLoader",
|
|
||||||
"TelegramChatFileLoader",
|
|
||||||
"TelegramChatLoader",
|
|
||||||
"TensorflowDatasetLoader",
|
|
||||||
"TencentCOSDirectoryLoader",
|
|
||||||
"TencentCOSFileLoader",
|
|
||||||
"TextLoader",
|
|
||||||
"TiDBLoader",
|
|
||||||
"ToMarkdownLoader",
|
|
||||||
"TomlLoader",
|
|
||||||
"TrelloLoader",
|
|
||||||
"TwitterTweetLoader",
|
|
||||||
"UnstructuredAPIFileIOLoader",
|
|
||||||
"UnstructuredAPIFileLoader",
|
|
||||||
"UnstructuredCSVLoader",
|
|
||||||
"UnstructuredEPubLoader",
|
|
||||||
"UnstructuredEmailLoader",
|
|
||||||
"UnstructuredExcelLoader",
|
|
||||||
"UnstructuredFileIOLoader",
|
|
||||||
"UnstructuredFileLoader",
|
|
||||||
"UnstructuredHTMLLoader",
|
|
||||||
"UnstructuredImageLoader",
|
|
||||||
"UnstructuredMarkdownLoader",
|
|
||||||
"UnstructuredODTLoader",
|
|
||||||
"UnstructuredOrgModeLoader",
|
|
||||||
"UnstructuredPDFLoader",
|
|
||||||
"UnstructuredPowerPointLoader",
|
|
||||||
"UnstructuredRSTLoader",
|
|
||||||
"UnstructuredRTFLoader",
|
|
||||||
"UnstructuredTSVLoader",
|
|
||||||
"UnstructuredURLLoader",
|
|
||||||
"UnstructuredWordDocumentLoader",
|
|
||||||
"UnstructuredXMLLoader",
|
|
||||||
"VsdxLoader",
|
|
||||||
"WeatherDataLoader",
|
|
||||||
"WebBaseLoader",
|
|
||||||
"WhatsAppChatLoader",
|
|
||||||
"WikipediaLoader",
|
|
||||||
"XorbitsLoader",
|
|
||||||
"YoutubeAudioLoader",
|
|
||||||
"YoutubeLoader",
|
|
||||||
"YuqueLoader",
|
|
||||||
]
|
|
||||||
|
@ -1,9 +1,19 @@
|
|||||||
from langchain_community.document_loaders.blob_loaders.file_system import (
|
import importlib
|
||||||
FileSystemBlobLoader,
|
from typing import Any
|
||||||
)
|
|
||||||
from langchain_community.document_loaders.blob_loaders.schema import Blob, BlobLoader
|
|
||||||
from langchain_community.document_loaders.blob_loaders.youtube_audio import (
|
|
||||||
YoutubeAudioLoader,
|
|
||||||
)
|
|
||||||
|
|
||||||
__all__ = ["BlobLoader", "Blob", "FileSystemBlobLoader", "YoutubeAudioLoader"]
|
_module_lookup = {
|
||||||
|
"Blob": "langchain_community.document_loaders.blob_loaders.schema",
|
||||||
|
"BlobLoader": "langchain_community.document_loaders.blob_loaders.schema",
|
||||||
|
"FileSystemBlobLoader": "langchain_community.document_loaders.blob_loaders.file_system", # noqa: E501
|
||||||
|
"YoutubeAudioLoader": "langchain_community.document_loaders.blob_loaders.youtube_audio", # noqa: E501
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name: str) -> Any:
|
||||||
|
if name in _module_lookup:
|
||||||
|
module = importlib.import_module(_module_lookup[name])
|
||||||
|
return getattr(module, name)
|
||||||
|
raise AttributeError(f"module {__name__} has no attribute {name}")
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = list(_module_lookup.keys())
|
||||||
|
@ -1,31 +1,27 @@
|
|||||||
from langchain_community.document_loaders.parsers.audio import OpenAIWhisperParser
|
import importlib
|
||||||
from langchain_community.document_loaders.parsers.doc_intelligence import (
|
from typing import Any
|
||||||
AzureAIDocumentIntelligenceParser,
|
|
||||||
)
|
|
||||||
from langchain_community.document_loaders.parsers.docai import DocAIParser
|
|
||||||
from langchain_community.document_loaders.parsers.grobid import GrobidParser
|
|
||||||
from langchain_community.document_loaders.parsers.html import BS4HTMLParser
|
|
||||||
from langchain_community.document_loaders.parsers.language import LanguageParser
|
|
||||||
from langchain_community.document_loaders.parsers.pdf import (
|
|
||||||
PDFMinerParser,
|
|
||||||
PDFPlumberParser,
|
|
||||||
PyMuPDFParser,
|
|
||||||
PyPDFium2Parser,
|
|
||||||
PyPDFParser,
|
|
||||||
)
|
|
||||||
from langchain_community.document_loaders.parsers.vsdx import VsdxParser
|
|
||||||
|
|
||||||
__all__ = [
|
_module_lookup = {
|
||||||
"AzureAIDocumentIntelligenceParser",
|
"AzureAIDocumentIntelligenceParser": "langchain_community.document_loaders.parsers.doc_intelligence", # noqa: E501
|
||||||
"BS4HTMLParser",
|
"BS4HTMLParser": "langchain_community.document_loaders.parsers.html",
|
||||||
"DocAIParser",
|
"DocAIParser": "langchain_community.document_loaders.parsers.docai",
|
||||||
"GrobidParser",
|
"GrobidParser": "langchain_community.document_loaders.parsers.grobid",
|
||||||
"LanguageParser",
|
"LanguageParser": "langchain_community.document_loaders.parsers.language",
|
||||||
"OpenAIWhisperParser",
|
"OpenAIWhisperParser": "langchain_community.document_loaders.parsers.audio",
|
||||||
"PDFMinerParser",
|
"PDFMinerParser": "langchain_community.document_loaders.parsers.pdf",
|
||||||
"PDFPlumberParser",
|
"PDFPlumberParser": "langchain_community.document_loaders.parsers.pdf",
|
||||||
"PyMuPDFParser",
|
"PyMuPDFParser": "langchain_community.document_loaders.parsers.pdf",
|
||||||
"PyPDFium2Parser",
|
"PyPDFParser": "langchain_community.document_loaders.parsers.pdf",
|
||||||
"PyPDFParser",
|
"PyPDFium2Parser": "langchain_community.document_loaders.parsers.pdf",
|
||||||
"VsdxParser",
|
"VsdxParser": "langchain_community.document_loaders.parsers.vsdx",
|
||||||
]
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name: str) -> Any:
|
||||||
|
if name in _module_lookup:
|
||||||
|
module = importlib.import_module(_module_lookup[name])
|
||||||
|
return getattr(module, name)
|
||||||
|
raise AttributeError(f"module {__name__} has no attribute {name}")
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = list(_module_lookup.keys())
|
||||||
|
@ -780,3 +780,7 @@ class DocumentIntelligenceLoader(BasePDFLoader):
|
|||||||
"""Lazy load given path as pages."""
|
"""Lazy load given path as pages."""
|
||||||
blob = Blob.from_path(self.file_path)
|
blob = Blob.from_path(self.file_path)
|
||||||
yield from self.parser.parse(blob)
|
yield from self.parser.parse(blob)
|
||||||
|
|
||||||
|
|
||||||
|
# Legacy: only for backwards compatibility. Use PyPDFLoader instead
|
||||||
|
PagedPDFSplitter = PyPDFLoader
|
||||||
|
@ -262,3 +262,7 @@ class TelegramChatApiLoader(BaseLoader):
|
|||||||
combined_texts = self._combine_message_texts(message_threads, df)
|
combined_texts = self._combine_message_texts(message_threads, df)
|
||||||
|
|
||||||
return text_to_docs(combined_texts)
|
return text_to_docs(combined_texts)
|
||||||
|
|
||||||
|
|
||||||
|
# For backwards compatibility
|
||||||
|
TelegramChatLoader = TelegramChatFileLoader
|
||||||
|
Loading…
Reference in New Issue
Block a user