mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-14 17:07:25 +00:00
add import tests to all modules (#12806)
This commit is contained in:
parent
6609a6033f
commit
526313002c
13
libs/langchain/tests/unit_tests/_api/test_imports.py
Normal file
13
libs/langchain/tests/unit_tests/_api/test_imports.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from langchain._api import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"deprecated",
|
||||||
|
"LangChainDeprecationWarning",
|
||||||
|
"suppress_langchain_deprecation_warning",
|
||||||
|
"surface_langchain_deprecation_warnings",
|
||||||
|
"warn_deprecated",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
42
libs/langchain/tests/unit_tests/agents/test_imports.py
Normal file
42
libs/langchain/tests/unit_tests/agents/test_imports.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
from langchain.agents import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"Agent",
|
||||||
|
"AgentExecutor",
|
||||||
|
"AgentExecutorIterator",
|
||||||
|
"AgentOutputParser",
|
||||||
|
"AgentType",
|
||||||
|
"BaseMultiActionAgent",
|
||||||
|
"BaseSingleActionAgent",
|
||||||
|
"ConversationalAgent",
|
||||||
|
"ConversationalChatAgent",
|
||||||
|
"LLMSingleActionAgent",
|
||||||
|
"MRKLChain",
|
||||||
|
"OpenAIFunctionsAgent",
|
||||||
|
"OpenAIMultiFunctionsAgent",
|
||||||
|
"ReActChain",
|
||||||
|
"ReActTextWorldAgent",
|
||||||
|
"SelfAskWithSearchChain",
|
||||||
|
"StructuredChatAgent",
|
||||||
|
"Tool",
|
||||||
|
"ZeroShotAgent",
|
||||||
|
"create_json_agent",
|
||||||
|
"create_openapi_agent",
|
||||||
|
"create_pbi_agent",
|
||||||
|
"create_pbi_chat_agent",
|
||||||
|
"create_spark_sql_agent",
|
||||||
|
"create_sql_agent",
|
||||||
|
"create_vectorstore_agent",
|
||||||
|
"create_vectorstore_router_agent",
|
||||||
|
"get_all_tool_names",
|
||||||
|
"initialize_agent",
|
||||||
|
"load_agent",
|
||||||
|
"load_huggingface_tool",
|
||||||
|
"load_tools",
|
||||||
|
"tool",
|
||||||
|
"XMLAgent",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
40
libs/langchain/tests/unit_tests/callbacks/test_imports.py
Normal file
40
libs/langchain/tests/unit_tests/callbacks/test_imports.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
from langchain.callbacks import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"AimCallbackHandler",
|
||||||
|
"ArgillaCallbackHandler",
|
||||||
|
"ArizeCallbackHandler",
|
||||||
|
"PromptLayerCallbackHandler",
|
||||||
|
"ArthurCallbackHandler",
|
||||||
|
"ClearMLCallbackHandler",
|
||||||
|
"CometCallbackHandler",
|
||||||
|
"ContextCallbackHandler",
|
||||||
|
"FileCallbackHandler",
|
||||||
|
"HumanApprovalCallbackHandler",
|
||||||
|
"InfinoCallbackHandler",
|
||||||
|
"MlflowCallbackHandler",
|
||||||
|
"LLMonitorCallbackHandler",
|
||||||
|
"OpenAICallbackHandler",
|
||||||
|
"StdOutCallbackHandler",
|
||||||
|
"AsyncIteratorCallbackHandler",
|
||||||
|
"StreamingStdOutCallbackHandler",
|
||||||
|
"FinalStreamingStdOutCallbackHandler",
|
||||||
|
"LLMThoughtLabeler",
|
||||||
|
"LangChainTracer",
|
||||||
|
"StreamlitCallbackHandler",
|
||||||
|
"WandbCallbackHandler",
|
||||||
|
"WhyLabsCallbackHandler",
|
||||||
|
"get_openai_callback",
|
||||||
|
"tracing_enabled",
|
||||||
|
"tracing_v2_enabled",
|
||||||
|
"collect_runs",
|
||||||
|
"wandb_tracing_enabled",
|
||||||
|
"FlyteCallbackHandler",
|
||||||
|
"SageMakerCallbackHandler",
|
||||||
|
"LabelStudioCallbackHandler",
|
||||||
|
"TrubricsCallbackHandler",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
63
libs/langchain/tests/unit_tests/chains/test_imports.py
Normal file
63
libs/langchain/tests/unit_tests/chains/test_imports.py
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
from langchain.chains import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"APIChain",
|
||||||
|
"AnalyzeDocumentChain",
|
||||||
|
"ArangoGraphQAChain",
|
||||||
|
"ChatVectorDBChain",
|
||||||
|
"ConstitutionalChain",
|
||||||
|
"ConversationChain",
|
||||||
|
"ConversationalRetrievalChain",
|
||||||
|
"FalkorDBQAChain",
|
||||||
|
"FlareChain",
|
||||||
|
"GraphCypherQAChain",
|
||||||
|
"GraphQAChain",
|
||||||
|
"GraphSparqlQAChain",
|
||||||
|
"HugeGraphQAChain",
|
||||||
|
"HypotheticalDocumentEmbedder",
|
||||||
|
"KuzuQAChain",
|
||||||
|
"LLMChain",
|
||||||
|
"LLMCheckerChain",
|
||||||
|
"LLMMathChain",
|
||||||
|
"LLMRequestsChain",
|
||||||
|
"LLMRouterChain",
|
||||||
|
"LLMSummarizationCheckerChain",
|
||||||
|
"MapReduceChain",
|
||||||
|
"MapReduceDocumentsChain",
|
||||||
|
"MapRerankDocumentsChain",
|
||||||
|
"MultiPromptChain",
|
||||||
|
"MultiRetrievalQAChain",
|
||||||
|
"MultiRouteChain",
|
||||||
|
"NatBotChain",
|
||||||
|
"NebulaGraphQAChain",
|
||||||
|
"NeptuneOpenCypherQAChain",
|
||||||
|
"OpenAIModerationChain",
|
||||||
|
"OpenAPIEndpointChain",
|
||||||
|
"QAGenerationChain",
|
||||||
|
"QAWithSourcesChain",
|
||||||
|
"ReduceDocumentsChain",
|
||||||
|
"RefineDocumentsChain",
|
||||||
|
"RetrievalQA",
|
||||||
|
"RetrievalQAWithSourcesChain",
|
||||||
|
"RouterChain",
|
||||||
|
"SequentialChain",
|
||||||
|
"SimpleSequentialChain",
|
||||||
|
"StuffDocumentsChain",
|
||||||
|
"TransformChain",
|
||||||
|
"VectorDBQA",
|
||||||
|
"VectorDBQAWithSourcesChain",
|
||||||
|
"create_citation_fuzzy_match_chain",
|
||||||
|
"create_extraction_chain",
|
||||||
|
"create_extraction_chain_pydantic",
|
||||||
|
"create_qa_with_sources_chain",
|
||||||
|
"create_qa_with_structure_chain",
|
||||||
|
"create_tagging_chain",
|
||||||
|
"create_tagging_chain_pydantic",
|
||||||
|
"generate_example",
|
||||||
|
"load_chain",
|
||||||
|
"create_sql_query_chain",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
35
libs/langchain/tests/unit_tests/chat_models/test_imports.py
Normal file
35
libs/langchain/tests/unit_tests/chat_models/test_imports.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
from langchain.chat_models import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"ChatOpenAI",
|
||||||
|
"BedrockChat",
|
||||||
|
"AzureChatOpenAI",
|
||||||
|
"FakeListChatModel",
|
||||||
|
"PromptLayerChatOpenAI",
|
||||||
|
"ChatEverlyAI",
|
||||||
|
"ChatAnthropic",
|
||||||
|
"ChatCohere",
|
||||||
|
"ChatGooglePalm",
|
||||||
|
"ChatMLflowAIGateway",
|
||||||
|
"ChatOllama",
|
||||||
|
"ChatVertexAI",
|
||||||
|
"JinaChat",
|
||||||
|
"HumanInputChatModel",
|
||||||
|
"MiniMaxChat",
|
||||||
|
"ChatAnyscale",
|
||||||
|
"ChatLiteLLM",
|
||||||
|
"ErnieBotChat",
|
||||||
|
"ChatJavelinAIGateway",
|
||||||
|
"ChatKonko",
|
||||||
|
"PaiEasChatEndpoint",
|
||||||
|
"QianfanChatEndpoint",
|
||||||
|
"ChatFireworks",
|
||||||
|
"ChatYandexGPT",
|
||||||
|
"ChatBaichuan",
|
||||||
|
"ChatHunyuan",
|
||||||
|
"GigaChat",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
7
libs/langchain/tests/unit_tests/docstore/test_imports.py
Normal file
7
libs/langchain/tests/unit_tests/docstore/test_imports.py
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
from langchain.docstore import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = ["DocstoreFn", "InMemoryDocstore", "Wikipedia"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
175
libs/langchain/tests/unit_tests/document_loaders/test_imports.py
Normal file
175
libs/langchain/tests/unit_tests/document_loaders/test_imports.py
Normal file
@ -0,0 +1,175 @@
|
|||||||
|
from langchain.document_loaders import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"AcreomLoader",
|
||||||
|
"AsyncHtmlLoader",
|
||||||
|
"AsyncChromiumLoader",
|
||||||
|
"AZLyricsLoader",
|
||||||
|
"AcreomLoader",
|
||||||
|
"AirbyteCDKLoader",
|
||||||
|
"AirbyteGongLoader",
|
||||||
|
"AirbyteJSONLoader",
|
||||||
|
"AirbyteHubspotLoader",
|
||||||
|
"AirbyteSalesforceLoader",
|
||||||
|
"AirbyteShopifyLoader",
|
||||||
|
"AirbyteStripeLoader",
|
||||||
|
"AirbyteTypeformLoader",
|
||||||
|
"AirbyteZendeskSupportLoader",
|
||||||
|
"AirtableLoader",
|
||||||
|
"AmazonTextractPDFLoader",
|
||||||
|
"ApifyDatasetLoader",
|
||||||
|
"ArcGISLoader",
|
||||||
|
"ArxivLoader",
|
||||||
|
"AssemblyAIAudioTranscriptLoader",
|
||||||
|
"AsyncHtmlLoader",
|
||||||
|
"AzureBlobStorageContainerLoader",
|
||||||
|
"AzureBlobStorageFileLoader",
|
||||||
|
"BSHTMLLoader",
|
||||||
|
"BibtexLoader",
|
||||||
|
"BigQueryLoader",
|
||||||
|
"BiliBiliLoader",
|
||||||
|
"BlackboardLoader",
|
||||||
|
"Blob",
|
||||||
|
"BlobLoader",
|
||||||
|
"BlockchainDocumentLoader",
|
||||||
|
"BraveSearchLoader",
|
||||||
|
"BrowserlessLoader",
|
||||||
|
"CSVLoader",
|
||||||
|
"ChatGPTLoader",
|
||||||
|
"CoNLLULoader",
|
||||||
|
"CollegeConfidentialLoader",
|
||||||
|
"ConcurrentLoader",
|
||||||
|
"ConfluenceLoader",
|
||||||
|
"CubeSemanticLoader",
|
||||||
|
"DataFrameLoader",
|
||||||
|
"DatadogLogsLoader",
|
||||||
|
"DiffbotLoader",
|
||||||
|
"DirectoryLoader",
|
||||||
|
"DiscordChatLoader",
|
||||||
|
"DocugamiLoader",
|
||||||
|
"Docx2txtLoader",
|
||||||
|
"DropboxLoader",
|
||||||
|
"DuckDBLoader",
|
||||||
|
"EmbaasBlobLoader",
|
||||||
|
"EmbaasLoader",
|
||||||
|
"EtherscanLoader",
|
||||||
|
"EverNoteLoader",
|
||||||
|
"FacebookChatLoader",
|
||||||
|
"FaunaLoader",
|
||||||
|
"FigmaFileLoader",
|
||||||
|
"FileSystemBlobLoader",
|
||||||
|
"GCSDirectoryLoader",
|
||||||
|
"GCSFileLoader",
|
||||||
|
"GeoDataFrameLoader",
|
||||||
|
"GitHubIssuesLoader",
|
||||||
|
"GitLoader",
|
||||||
|
"GitbookLoader",
|
||||||
|
"GoogleApiClient",
|
||||||
|
"GoogleApiYoutubeLoader",
|
||||||
|
"GoogleSpeechToTextLoader",
|
||||||
|
"GoogleDriveLoader",
|
||||||
|
"GutenbergLoader",
|
||||||
|
"HNLoader",
|
||||||
|
"HuggingFaceDatasetLoader",
|
||||||
|
"IFixitLoader",
|
||||||
|
"IMSDbLoader",
|
||||||
|
"ImageCaptionLoader",
|
||||||
|
"IuguLoader",
|
||||||
|
"JSONLoader",
|
||||||
|
"JoplinLoader",
|
||||||
|
"LarkSuiteDocLoader",
|
||||||
|
"LakeFSLoader",
|
||||||
|
"MHTMLLoader",
|
||||||
|
"MWDumpLoader",
|
||||||
|
"MastodonTootsLoader",
|
||||||
|
"MathpixPDFLoader",
|
||||||
|
"MaxComputeLoader",
|
||||||
|
"MergedDataLoader",
|
||||||
|
"ModernTreasuryLoader",
|
||||||
|
"MongodbLoader",
|
||||||
|
"NewsURLLoader",
|
||||||
|
"NotebookLoader",
|
||||||
|
"NotionDBLoader",
|
||||||
|
"NotionDirectoryLoader",
|
||||||
|
"OBSDirectoryLoader",
|
||||||
|
"OBSFileLoader",
|
||||||
|
"ObsidianLoader",
|
||||||
|
"OneDriveFileLoader",
|
||||||
|
"OneDriveLoader",
|
||||||
|
"OnlinePDFLoader",
|
||||||
|
"OpenCityDataLoader",
|
||||||
|
"OutlookMessageLoader",
|
||||||
|
"PDFMinerLoader",
|
||||||
|
"PDFMinerPDFasHTMLLoader",
|
||||||
|
"PDFPlumberLoader",
|
||||||
|
"PagedPDFSplitter",
|
||||||
|
"PlaywrightURLLoader",
|
||||||
|
"PolarsDataFrameLoader",
|
||||||
|
"PsychicLoader",
|
||||||
|
"PubMedLoader",
|
||||||
|
"PyMuPDFLoader",
|
||||||
|
"PyPDFDirectoryLoader",
|
||||||
|
"PyPDFLoader",
|
||||||
|
"PyPDFium2Loader",
|
||||||
|
"PySparkDataFrameLoader",
|
||||||
|
"PythonLoader",
|
||||||
|
"RSSFeedLoader",
|
||||||
|
"ReadTheDocsLoader",
|
||||||
|
"RecursiveUrlLoader",
|
||||||
|
"RedditPostsLoader",
|
||||||
|
"RoamLoader",
|
||||||
|
"RocksetLoader",
|
||||||
|
"S3DirectoryLoader",
|
||||||
|
"S3FileLoader",
|
||||||
|
"SRTLoader",
|
||||||
|
"SeleniumURLLoader",
|
||||||
|
"SharePointLoader",
|
||||||
|
"SitemapLoader",
|
||||||
|
"SlackDirectoryLoader",
|
||||||
|
"SnowflakeLoader",
|
||||||
|
"SpreedlyLoader",
|
||||||
|
"StripeLoader",
|
||||||
|
"TelegramChatApiLoader",
|
||||||
|
"TelegramChatFileLoader",
|
||||||
|
"TelegramChatLoader",
|
||||||
|
"TensorflowDatasetLoader",
|
||||||
|
"TencentCOSDirectoryLoader",
|
||||||
|
"TencentCOSFileLoader",
|
||||||
|
"TextLoader",
|
||||||
|
"ToMarkdownLoader",
|
||||||
|
"TomlLoader",
|
||||||
|
"TrelloLoader",
|
||||||
|
"TwitterTweetLoader",
|
||||||
|
"UnstructuredAPIFileIOLoader",
|
||||||
|
"UnstructuredAPIFileLoader",
|
||||||
|
"UnstructuredCSVLoader",
|
||||||
|
"UnstructuredEPubLoader",
|
||||||
|
"UnstructuredEmailLoader",
|
||||||
|
"UnstructuredExcelLoader",
|
||||||
|
"UnstructuredFileIOLoader",
|
||||||
|
"UnstructuredFileLoader",
|
||||||
|
"UnstructuredHTMLLoader",
|
||||||
|
"UnstructuredImageLoader",
|
||||||
|
"UnstructuredMarkdownLoader",
|
||||||
|
"UnstructuredODTLoader",
|
||||||
|
"UnstructuredOrgModeLoader",
|
||||||
|
"UnstructuredPDFLoader",
|
||||||
|
"UnstructuredPowerPointLoader",
|
||||||
|
"UnstructuredRSTLoader",
|
||||||
|
"UnstructuredRTFLoader",
|
||||||
|
"UnstructuredTSVLoader",
|
||||||
|
"UnstructuredURLLoader",
|
||||||
|
"UnstructuredWordDocumentLoader",
|
||||||
|
"UnstructuredXMLLoader",
|
||||||
|
"WeatherDataLoader",
|
||||||
|
"WebBaseLoader",
|
||||||
|
"WhatsAppChatLoader",
|
||||||
|
"WikipediaLoader",
|
||||||
|
"XorbitsLoader",
|
||||||
|
"YoutubeAudioLoader",
|
||||||
|
"YoutubeLoader",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
@ -0,0 +1,20 @@
|
|||||||
|
from langchain.document_transformers import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"BeautifulSoupTransformer",
|
||||||
|
"DoctranQATransformer",
|
||||||
|
"DoctranTextTranslator",
|
||||||
|
"DoctranPropertyExtractor",
|
||||||
|
"EmbeddingsClusteringFilter",
|
||||||
|
"EmbeddingsRedundantFilter",
|
||||||
|
"GoogleTranslateTransformer",
|
||||||
|
"get_stateful_documents",
|
||||||
|
"LongContextReorder",
|
||||||
|
"NucliaTextTransformer",
|
||||||
|
"OpenAIMetadataTagger",
|
||||||
|
"Html2TextTransformer",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
55
libs/langchain/tests/unit_tests/embeddings/test_imports.py
Normal file
55
libs/langchain/tests/unit_tests/embeddings/test_imports.py
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
from langchain.embeddings import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"OpenAIEmbeddings",
|
||||||
|
"CacheBackedEmbeddings",
|
||||||
|
"ClarifaiEmbeddings",
|
||||||
|
"CohereEmbeddings",
|
||||||
|
"ElasticsearchEmbeddings",
|
||||||
|
"HuggingFaceEmbeddings",
|
||||||
|
"HuggingFaceInferenceAPIEmbeddings",
|
||||||
|
"GradientEmbeddings",
|
||||||
|
"JinaEmbeddings",
|
||||||
|
"LlamaCppEmbeddings",
|
||||||
|
"HuggingFaceHubEmbeddings",
|
||||||
|
"MlflowAIGatewayEmbeddings",
|
||||||
|
"ModelScopeEmbeddings",
|
||||||
|
"TensorflowHubEmbeddings",
|
||||||
|
"SagemakerEndpointEmbeddings",
|
||||||
|
"HuggingFaceInstructEmbeddings",
|
||||||
|
"MosaicMLInstructorEmbeddings",
|
||||||
|
"SelfHostedEmbeddings",
|
||||||
|
"SelfHostedHuggingFaceEmbeddings",
|
||||||
|
"SelfHostedHuggingFaceInstructEmbeddings",
|
||||||
|
"FakeEmbeddings",
|
||||||
|
"DeterministicFakeEmbedding",
|
||||||
|
"AlephAlphaAsymmetricSemanticEmbedding",
|
||||||
|
"AlephAlphaSymmetricSemanticEmbedding",
|
||||||
|
"SentenceTransformerEmbeddings",
|
||||||
|
"GooglePalmEmbeddings",
|
||||||
|
"MiniMaxEmbeddings",
|
||||||
|
"VertexAIEmbeddings",
|
||||||
|
"BedrockEmbeddings",
|
||||||
|
"DeepInfraEmbeddings",
|
||||||
|
"EdenAiEmbeddings",
|
||||||
|
"DashScopeEmbeddings",
|
||||||
|
"EmbaasEmbeddings",
|
||||||
|
"OctoAIEmbeddings",
|
||||||
|
"SpacyEmbeddings",
|
||||||
|
"NLPCloudEmbeddings",
|
||||||
|
"GPT4AllEmbeddings",
|
||||||
|
"XinferenceEmbeddings",
|
||||||
|
"LocalAIEmbeddings",
|
||||||
|
"AwaEmbeddings",
|
||||||
|
"HuggingFaceBgeEmbeddings",
|
||||||
|
"ErnieEmbeddings",
|
||||||
|
"JavelinAIGatewayEmbeddings",
|
||||||
|
"OllamaEmbeddings",
|
||||||
|
"QianfanEmbeddingsEndpoint",
|
||||||
|
"JohnSnowLabsEmbeddings",
|
||||||
|
"VoyageEmbeddings",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
38
libs/langchain/tests/unit_tests/evaluation/test_imports.py
Normal file
38
libs/langchain/tests/unit_tests/evaluation/test_imports.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
from langchain.evaluation import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"EvaluatorType",
|
||||||
|
"ExactMatchStringEvaluator",
|
||||||
|
"RegexMatchStringEvaluator",
|
||||||
|
"PairwiseStringEvalChain",
|
||||||
|
"LabeledPairwiseStringEvalChain",
|
||||||
|
"QAEvalChain",
|
||||||
|
"CotQAEvalChain",
|
||||||
|
"ContextQAEvalChain",
|
||||||
|
"StringEvaluator",
|
||||||
|
"PairwiseStringEvaluator",
|
||||||
|
"TrajectoryEvalChain",
|
||||||
|
"CriteriaEvalChain",
|
||||||
|
"Criteria",
|
||||||
|
"EmbeddingDistance",
|
||||||
|
"EmbeddingDistanceEvalChain",
|
||||||
|
"PairwiseEmbeddingDistanceEvalChain",
|
||||||
|
"StringDistance",
|
||||||
|
"StringDistanceEvalChain",
|
||||||
|
"PairwiseStringDistanceEvalChain",
|
||||||
|
"LabeledCriteriaEvalChain",
|
||||||
|
"load_evaluators",
|
||||||
|
"load_evaluator",
|
||||||
|
"load_dataset",
|
||||||
|
"AgentTrajectoryEvaluator",
|
||||||
|
"ScoreStringEvalChain",
|
||||||
|
"LabeledScoreStringEvalChain",
|
||||||
|
"JsonValidityEvaluator",
|
||||||
|
"JsonEqualityEvaluator",
|
||||||
|
"JsonEditDistanceEvaluator",
|
||||||
|
"JsonSchemaEvaluator",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
18
libs/langchain/tests/unit_tests/graphs/test_imports.py
Normal file
18
libs/langchain/tests/unit_tests/graphs/test_imports.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from langchain.graphs import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"MemgraphGraph",
|
||||||
|
"NetworkxEntityGraph",
|
||||||
|
"Neo4jGraph",
|
||||||
|
"NebulaGraph",
|
||||||
|
"NeptuneGraph",
|
||||||
|
"KuzuGraph",
|
||||||
|
"HugeGraph",
|
||||||
|
"RdfGraph",
|
||||||
|
"ArangoGraph",
|
||||||
|
"FalkorDBGraph",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
0
libs/langchain/tests/unit_tests/indexes/__init__.py
Normal file
0
libs/langchain/tests/unit_tests/indexes/__init__.py
Normal file
15
libs/langchain/tests/unit_tests/indexes/test_imports.py
Normal file
15
libs/langchain/tests/unit_tests/indexes/test_imports.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
from langchain.indexes import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
# Keep sorted
|
||||||
|
"aindex",
|
||||||
|
"GraphIndexCreator",
|
||||||
|
"index",
|
||||||
|
"IndexingResult",
|
||||||
|
"SQLRecordManager",
|
||||||
|
"VectorstoreIndexCreator",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
@ -1,8 +1,91 @@
|
|||||||
from langchain import llms
|
from langchain import llms
|
||||||
from langchain.llms.base import BaseLLM
|
from langchain.llms.base import BaseLLM
|
||||||
|
|
||||||
|
EXPECT_ALL = [
|
||||||
|
"AI21",
|
||||||
|
"AlephAlpha",
|
||||||
|
"AmazonAPIGateway",
|
||||||
|
"Anthropic",
|
||||||
|
"Anyscale",
|
||||||
|
"Arcee",
|
||||||
|
"Aviary",
|
||||||
|
"AzureMLOnlineEndpoint",
|
||||||
|
"AzureOpenAI",
|
||||||
|
"Banana",
|
||||||
|
"Baseten",
|
||||||
|
"Beam",
|
||||||
|
"Bedrock",
|
||||||
|
"CTransformers",
|
||||||
|
"CTranslate2",
|
||||||
|
"CerebriumAI",
|
||||||
|
"ChatGLM",
|
||||||
|
"Clarifai",
|
||||||
|
"Cohere",
|
||||||
|
"Databricks",
|
||||||
|
"DeepInfra",
|
||||||
|
"DeepSparse",
|
||||||
|
"EdenAI",
|
||||||
|
"FakeListLLM",
|
||||||
|
"Fireworks",
|
||||||
|
"ForefrontAI",
|
||||||
|
"GigaChat",
|
||||||
|
"GPT4All",
|
||||||
|
"GooglePalm",
|
||||||
|
"GooseAI",
|
||||||
|
"GradientLLM",
|
||||||
|
"HuggingFaceEndpoint",
|
||||||
|
"HuggingFaceHub",
|
||||||
|
"HuggingFacePipeline",
|
||||||
|
"HuggingFaceTextGenInference",
|
||||||
|
"HumanInputLLM",
|
||||||
|
"KoboldApiLLM",
|
||||||
|
"LlamaCpp",
|
||||||
|
"TextGen",
|
||||||
|
"ManifestWrapper",
|
||||||
|
"Minimax",
|
||||||
|
"MlflowAIGateway",
|
||||||
|
"Modal",
|
||||||
|
"MosaicML",
|
||||||
|
"Nebula",
|
||||||
|
"NIBittensorLLM",
|
||||||
|
"NLPCloud",
|
||||||
|
"Ollama",
|
||||||
|
"OpenAI",
|
||||||
|
"OpenAIChat",
|
||||||
|
"OpenLLM",
|
||||||
|
"OpenLM",
|
||||||
|
"PaiEasEndpoint",
|
||||||
|
"Petals",
|
||||||
|
"PipelineAI",
|
||||||
|
"Predibase",
|
||||||
|
"PredictionGuard",
|
||||||
|
"PromptLayerOpenAI",
|
||||||
|
"PromptLayerOpenAIChat",
|
||||||
|
"OpaquePrompts",
|
||||||
|
"RWKV",
|
||||||
|
"Replicate",
|
||||||
|
"SagemakerEndpoint",
|
||||||
|
"SelfHostedHuggingFaceLLM",
|
||||||
|
"SelfHostedPipeline",
|
||||||
|
"StochasticAI",
|
||||||
|
"TitanTakeoff",
|
||||||
|
"TitanTakeoffPro",
|
||||||
|
"Tongyi",
|
||||||
|
"VertexAI",
|
||||||
|
"VertexAIModelGarden",
|
||||||
|
"VLLM",
|
||||||
|
"VLLMOpenAI",
|
||||||
|
"Writer",
|
||||||
|
"OctoAIEndpoint",
|
||||||
|
"Xinference",
|
||||||
|
"JavelinAIGateway",
|
||||||
|
"QianfanLLMEndpoint",
|
||||||
|
"YandexGPT",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def test_all_imports() -> None:
|
def test_all_imports() -> None:
|
||||||
"""Simple test to make sure all things can be imported."""
|
"""Simple test to make sure all things can be imported."""
|
||||||
for cls in llms.__all__:
|
for cls in llms.__all__:
|
||||||
assert issubclass(getattr(llms, cls), BaseLLM)
|
assert issubclass(getattr(llms, cls), BaseLLM)
|
||||||
|
assert set(llms.__all__) == set(EXPECT_ALL)
|
||||||
|
0
libs/langchain/tests/unit_tests/load/__init__.py
Normal file
0
libs/langchain/tests/unit_tests/load/__init__.py
Normal file
@ -5,6 +5,9 @@
|
|||||||
"lc": 1,
|
"lc": 1,
|
||||||
"type": "constructor",
|
"type": "constructor",
|
||||||
"id": [
|
"id": [
|
||||||
|
"tests",
|
||||||
|
"unit_tests",
|
||||||
|
"load",
|
||||||
"test_dump",
|
"test_dump",
|
||||||
"Person"
|
"Person"
|
||||||
],
|
],
|
||||||
@ -27,6 +30,9 @@
|
|||||||
"lc": 1,
|
"lc": 1,
|
||||||
"type": "constructor",
|
"type": "constructor",
|
||||||
"id": [
|
"id": [
|
||||||
|
"tests",
|
||||||
|
"unit_tests",
|
||||||
|
"load",
|
||||||
"test_dump",
|
"test_dump",
|
||||||
"SpecialPerson"
|
"SpecialPerson"
|
||||||
],
|
],
|
||||||
|
@ -57,7 +57,7 @@ def test_person(snapshot: Any) -> None:
|
|||||||
assert dumps(p, pretty=True) == snapshot
|
assert dumps(p, pretty=True) == snapshot
|
||||||
sp = SpecialPerson(another_secret="Wooo", secret="Hmm")
|
sp = SpecialPerson(another_secret="Wooo", secret="Hmm")
|
||||||
assert dumps(sp, pretty=True) == snapshot
|
assert dumps(sp, pretty=True) == snapshot
|
||||||
assert Person.lc_id() == ["test_dump", "Person"]
|
assert Person.lc_id() == ["tests", "unit_tests", "load", "test_dump", "Person"]
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.requires("openai")
|
@pytest.mark.requires("openai")
|
||||||
|
12
libs/langchain/tests/unit_tests/load/test_imports.py
Normal file
12
libs/langchain/tests/unit_tests/load/test_imports.py
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
from langchain.load import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"dumpd",
|
||||||
|
"dumps",
|
||||||
|
"load",
|
||||||
|
"loads",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
42
libs/langchain/tests/unit_tests/memory/test_imports.py
Normal file
42
libs/langchain/tests/unit_tests/memory/test_imports.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
from langchain.memory import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"CassandraChatMessageHistory",
|
||||||
|
"ChatMessageHistory",
|
||||||
|
"CombinedMemory",
|
||||||
|
"ConversationBufferMemory",
|
||||||
|
"ConversationBufferWindowMemory",
|
||||||
|
"ConversationEntityMemory",
|
||||||
|
"ConversationKGMemory",
|
||||||
|
"ConversationStringBufferMemory",
|
||||||
|
"ConversationSummaryBufferMemory",
|
||||||
|
"ConversationSummaryMemory",
|
||||||
|
"ConversationTokenBufferMemory",
|
||||||
|
"CosmosDBChatMessageHistory",
|
||||||
|
"DynamoDBChatMessageHistory",
|
||||||
|
"ElasticsearchChatMessageHistory",
|
||||||
|
"FileChatMessageHistory",
|
||||||
|
"InMemoryEntityStore",
|
||||||
|
"MomentoChatMessageHistory",
|
||||||
|
"MongoDBChatMessageHistory",
|
||||||
|
"MotorheadMemory",
|
||||||
|
"PostgresChatMessageHistory",
|
||||||
|
"ReadOnlySharedMemory",
|
||||||
|
"RedisChatMessageHistory",
|
||||||
|
"RedisEntityStore",
|
||||||
|
"SingleStoreDBChatMessageHistory",
|
||||||
|
"SQLChatMessageHistory",
|
||||||
|
"SQLiteEntityStore",
|
||||||
|
"SimpleMemory",
|
||||||
|
"StreamlitChatMessageHistory",
|
||||||
|
"VectorStoreRetrieverMemory",
|
||||||
|
"XataChatMessageHistory",
|
||||||
|
"ZepChatMessageHistory",
|
||||||
|
"ZepMemory",
|
||||||
|
"UpstashRedisEntityStore",
|
||||||
|
"UpstashRedisChatMessageHistory",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
@ -0,0 +1,26 @@
|
|||||||
|
from langchain.output_parsers import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"BooleanOutputParser",
|
||||||
|
"CombiningOutputParser",
|
||||||
|
"CommaSeparatedListOutputParser",
|
||||||
|
"DatetimeOutputParser",
|
||||||
|
"EnumOutputParser",
|
||||||
|
"GuardrailsOutputParser",
|
||||||
|
"ListOutputParser",
|
||||||
|
"MarkdownListOutputParser",
|
||||||
|
"NumberedListOutputParser",
|
||||||
|
"OutputFixingParser",
|
||||||
|
"PydanticOutputParser",
|
||||||
|
"RegexDictParser",
|
||||||
|
"RegexParser",
|
||||||
|
"ResponseSchema",
|
||||||
|
"RetryOutputParser",
|
||||||
|
"RetryWithErrorOutputParser",
|
||||||
|
"StructuredOutputParser",
|
||||||
|
"XMLOutputParser",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
28
libs/langchain/tests/unit_tests/prompts/test_imports.py
Normal file
28
libs/langchain/tests/unit_tests/prompts/test_imports.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
from langchain.prompts import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"AIMessagePromptTemplate",
|
||||||
|
"BaseChatPromptTemplate",
|
||||||
|
"BasePromptTemplate",
|
||||||
|
"ChatMessagePromptTemplate",
|
||||||
|
"ChatPromptTemplate",
|
||||||
|
"FewShotPromptTemplate",
|
||||||
|
"FewShotPromptWithTemplates",
|
||||||
|
"HumanMessagePromptTemplate",
|
||||||
|
"LengthBasedExampleSelector",
|
||||||
|
"MaxMarginalRelevanceExampleSelector",
|
||||||
|
"MessagesPlaceholder",
|
||||||
|
"NGramOverlapExampleSelector",
|
||||||
|
"PipelinePromptTemplate",
|
||||||
|
"Prompt",
|
||||||
|
"PromptTemplate",
|
||||||
|
"SemanticSimilarityExampleSelector",
|
||||||
|
"StringPromptTemplate",
|
||||||
|
"SystemMessagePromptTemplate",
|
||||||
|
"load_prompt",
|
||||||
|
"FewShotChatMessagePromptTemplate",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
49
libs/langchain/tests/unit_tests/retrievers/test_imports.py
Normal file
49
libs/langchain/tests/unit_tests/retrievers/test_imports.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
from langchain.retrievers import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"AmazonKendraRetriever",
|
||||||
|
"ArceeRetriever",
|
||||||
|
"ArxivRetriever",
|
||||||
|
"AzureCognitiveSearchRetriever",
|
||||||
|
"ChatGPTPluginRetriever",
|
||||||
|
"ContextualCompressionRetriever",
|
||||||
|
"ChaindeskRetriever",
|
||||||
|
"CohereRagRetriever",
|
||||||
|
"ElasticSearchBM25Retriever",
|
||||||
|
"GoogleDocumentAIWarehouseRetriever",
|
||||||
|
"GoogleCloudEnterpriseSearchRetriever",
|
||||||
|
"GoogleVertexAIMultiTurnSearchRetriever",
|
||||||
|
"GoogleVertexAISearchRetriever",
|
||||||
|
"KayAiRetriever",
|
||||||
|
"KNNRetriever",
|
||||||
|
"LlamaIndexGraphRetriever",
|
||||||
|
"LlamaIndexRetriever",
|
||||||
|
"MergerRetriever",
|
||||||
|
"MetalRetriever",
|
||||||
|
"MilvusRetriever",
|
||||||
|
"MultiQueryRetriever",
|
||||||
|
"PineconeHybridSearchRetriever",
|
||||||
|
"PubMedRetriever",
|
||||||
|
"RemoteLangChainRetriever",
|
||||||
|
"SVMRetriever",
|
||||||
|
"SelfQueryRetriever",
|
||||||
|
"TavilySearchAPIRetriever",
|
||||||
|
"TFIDFRetriever",
|
||||||
|
"BM25Retriever",
|
||||||
|
"TimeWeightedVectorStoreRetriever",
|
||||||
|
"VespaRetriever",
|
||||||
|
"WeaviateHybridSearchRetriever",
|
||||||
|
"WikipediaRetriever",
|
||||||
|
"ZepRetriever",
|
||||||
|
"ZillizRetriever",
|
||||||
|
"DocArrayRetriever",
|
||||||
|
"RePhraseQueryRetriever",
|
||||||
|
"WebResearchRetriever",
|
||||||
|
"EnsembleRetriever",
|
||||||
|
"ParentDocumentRetriever",
|
||||||
|
"MultiVectorRetriever",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
43
libs/langchain/tests/unit_tests/schema/test_imports.py
Normal file
43
libs/langchain/tests/unit_tests/schema/test_imports.py
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
from langchain.schema import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"BaseCache",
|
||||||
|
"BaseMemory",
|
||||||
|
"BaseStore",
|
||||||
|
"AgentFinish",
|
||||||
|
"AgentAction",
|
||||||
|
"Document",
|
||||||
|
"BaseChatMessageHistory",
|
||||||
|
"BaseDocumentTransformer",
|
||||||
|
"BaseMessage",
|
||||||
|
"ChatMessage",
|
||||||
|
"FunctionMessage",
|
||||||
|
"HumanMessage",
|
||||||
|
"AIMessage",
|
||||||
|
"SystemMessage",
|
||||||
|
"messages_from_dict",
|
||||||
|
"messages_to_dict",
|
||||||
|
"_message_to_dict",
|
||||||
|
"_message_from_dict",
|
||||||
|
"get_buffer_string",
|
||||||
|
"RunInfo",
|
||||||
|
"LLMResult",
|
||||||
|
"ChatResult",
|
||||||
|
"ChatGeneration",
|
||||||
|
"Generation",
|
||||||
|
"PromptValue",
|
||||||
|
"LangChainException",
|
||||||
|
"BaseRetriever",
|
||||||
|
"RUN_KEY",
|
||||||
|
"Memory",
|
||||||
|
"OutputParserException",
|
||||||
|
"StrOutputParser",
|
||||||
|
"BaseOutputParser",
|
||||||
|
"BaseLLMOutputParser",
|
||||||
|
"BasePromptTemplate",
|
||||||
|
"format_document",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
12
libs/langchain/tests/unit_tests/smith/test_imports.py
Normal file
12
libs/langchain/tests/unit_tests/smith/test_imports.py
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
from langchain.smith import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"arun_on_dataset",
|
||||||
|
"run_on_dataset",
|
||||||
|
"ChoicesOutputParser",
|
||||||
|
"RunEvalConfig",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
15
libs/langchain/tests/unit_tests/storage/test_imports.py
Normal file
15
libs/langchain/tests/unit_tests/storage/test_imports.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
from langchain.storage import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"EncoderBackedStore",
|
||||||
|
"InMemoryStore",
|
||||||
|
"LocalFileStore",
|
||||||
|
"RedisStore",
|
||||||
|
"create_lc_store",
|
||||||
|
"create_kv_docstore",
|
||||||
|
"UpstashRedisStore",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
116
libs/langchain/tests/unit_tests/tools/test_imports.py
Normal file
116
libs/langchain/tests/unit_tests/tools/test_imports.py
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
from langchain.tools import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"AINAppOps",
|
||||||
|
"AINOwnerOps",
|
||||||
|
"AINRuleOps",
|
||||||
|
"AINTransfer",
|
||||||
|
"AINValueOps",
|
||||||
|
"AIPluginTool",
|
||||||
|
"APIOperation",
|
||||||
|
"ArxivQueryRun",
|
||||||
|
"AzureCogsFormRecognizerTool",
|
||||||
|
"AzureCogsImageAnalysisTool",
|
||||||
|
"AzureCogsSpeech2TextTool",
|
||||||
|
"AzureCogsText2SpeechTool",
|
||||||
|
"BaseGraphQLTool",
|
||||||
|
"BaseRequestsTool",
|
||||||
|
"BaseSQLDatabaseTool",
|
||||||
|
"BaseSparkSQLTool",
|
||||||
|
"BaseTool",
|
||||||
|
"BearlyInterpreterTool",
|
||||||
|
"BingSearchResults",
|
||||||
|
"BingSearchRun",
|
||||||
|
"BraveSearch",
|
||||||
|
"ClickTool",
|
||||||
|
"CopyFileTool",
|
||||||
|
"CurrentWebPageTool",
|
||||||
|
"DeleteFileTool",
|
||||||
|
"DuckDuckGoSearchResults",
|
||||||
|
"DuckDuckGoSearchRun",
|
||||||
|
"E2BDataAnalysisTool",
|
||||||
|
"EdenAiExplicitImageTool",
|
||||||
|
"EdenAiObjectDetectionTool",
|
||||||
|
"EdenAiParsingIDTool",
|
||||||
|
"EdenAiParsingInvoiceTool",
|
||||||
|
"EdenAiSpeechToTextTool",
|
||||||
|
"EdenAiTextModerationTool",
|
||||||
|
"EdenAiTextToSpeechTool",
|
||||||
|
"EdenaiTool",
|
||||||
|
"ElevenLabsText2SpeechTool",
|
||||||
|
"ExtractHyperlinksTool",
|
||||||
|
"ExtractTextTool",
|
||||||
|
"FileSearchTool",
|
||||||
|
"GetElementsTool",
|
||||||
|
"GmailCreateDraft",
|
||||||
|
"GmailGetMessage",
|
||||||
|
"GmailGetThread",
|
||||||
|
"GmailSearch",
|
||||||
|
"GmailSendMessage",
|
||||||
|
"GoogleCloudTextToSpeechTool",
|
||||||
|
"GooglePlacesTool",
|
||||||
|
"GoogleSearchResults",
|
||||||
|
"GoogleSearchRun",
|
||||||
|
"GoogleSerperResults",
|
||||||
|
"GoogleSerperRun",
|
||||||
|
"HumanInputRun",
|
||||||
|
"IFTTTWebhook",
|
||||||
|
"InfoPowerBITool",
|
||||||
|
"InfoSQLDatabaseTool",
|
||||||
|
"InfoSparkSQLTool",
|
||||||
|
"JiraAction",
|
||||||
|
"JsonGetValueTool",
|
||||||
|
"JsonListKeysTool",
|
||||||
|
"ListDirectoryTool",
|
||||||
|
"ListPowerBITool",
|
||||||
|
"ListSQLDatabaseTool",
|
||||||
|
"ListSparkSQLTool",
|
||||||
|
"MetaphorSearchResults",
|
||||||
|
"MoveFileTool",
|
||||||
|
"NavigateBackTool",
|
||||||
|
"NavigateTool",
|
||||||
|
"O365CreateDraftMessage",
|
||||||
|
"O365SearchEmails",
|
||||||
|
"O365SearchEvents",
|
||||||
|
"O365SendEvent",
|
||||||
|
"O365SendMessage",
|
||||||
|
"OpenAPISpec",
|
||||||
|
"OpenWeatherMapQueryRun",
|
||||||
|
"PubmedQueryRun",
|
||||||
|
"QueryCheckerTool",
|
||||||
|
"QueryPowerBITool",
|
||||||
|
"QuerySQLCheckerTool",
|
||||||
|
"QuerySQLDataBaseTool",
|
||||||
|
"QuerySparkSQLTool",
|
||||||
|
"ReadFileTool",
|
||||||
|
"RequestsDeleteTool",
|
||||||
|
"RequestsGetTool",
|
||||||
|
"RequestsPatchTool",
|
||||||
|
"RequestsPostTool",
|
||||||
|
"RequestsPutTool",
|
||||||
|
"SceneXplainTool",
|
||||||
|
"SearxSearchResults",
|
||||||
|
"SearxSearchRun",
|
||||||
|
"ShellTool",
|
||||||
|
"SleepTool",
|
||||||
|
"StdInInquireTool",
|
||||||
|
"SteamshipImageGenerationTool",
|
||||||
|
"StructuredTool",
|
||||||
|
"Tool",
|
||||||
|
"VectorStoreQATool",
|
||||||
|
"VectorStoreQAWithSourcesTool",
|
||||||
|
"WikipediaQueryRun",
|
||||||
|
"WolframAlphaQueryRun",
|
||||||
|
"WriteFileTool",
|
||||||
|
"YahooFinanceNewsTool",
|
||||||
|
"YouTubeSearchTool",
|
||||||
|
"ZapierNLAListActions",
|
||||||
|
"ZapierNLARunAction",
|
||||||
|
"authenticate",
|
||||||
|
"format_tool_to_openai_function",
|
||||||
|
"tool",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
45
libs/langchain/tests/unit_tests/utilities/test_imports.py
Normal file
45
libs/langchain/tests/unit_tests/utilities/test_imports.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
from langchain.utilities import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"AlphaVantageAPIWrapper",
|
||||||
|
"ApifyWrapper",
|
||||||
|
"ArceeWrapper",
|
||||||
|
"ArxivAPIWrapper",
|
||||||
|
"BibtexparserWrapper",
|
||||||
|
"BingSearchAPIWrapper",
|
||||||
|
"BraveSearchWrapper",
|
||||||
|
"DuckDuckGoSearchAPIWrapper",
|
||||||
|
"GoldenQueryAPIWrapper",
|
||||||
|
"GooglePlacesAPIWrapper",
|
||||||
|
"GoogleScholarAPIWrapper",
|
||||||
|
"GoogleSearchAPIWrapper",
|
||||||
|
"GoogleSerperAPIWrapper",
|
||||||
|
"GraphQLAPIWrapper",
|
||||||
|
"JiraAPIWrapper",
|
||||||
|
"LambdaWrapper",
|
||||||
|
"MaxComputeAPIWrapper",
|
||||||
|
"MetaphorSearchAPIWrapper",
|
||||||
|
"OpenWeatherMapAPIWrapper",
|
||||||
|
"Portkey",
|
||||||
|
"PowerBIDataset",
|
||||||
|
"PubMedAPIWrapper",
|
||||||
|
"PythonREPL",
|
||||||
|
"Requests",
|
||||||
|
"RequestsWrapper",
|
||||||
|
"SQLDatabase",
|
||||||
|
"SceneXplainAPIWrapper",
|
||||||
|
"SearchApiAPIWrapper",
|
||||||
|
"SearxSearchWrapper",
|
||||||
|
"SerpAPIWrapper",
|
||||||
|
"SparkSQL",
|
||||||
|
"TensorflowDatasets",
|
||||||
|
"TextRequestsWrapper",
|
||||||
|
"TwilioAPIWrapper",
|
||||||
|
"WikipediaAPIWrapper",
|
||||||
|
"WolframAlphaAPIWrapper",
|
||||||
|
"ZapierNLAWrapper",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
28
libs/langchain/tests/unit_tests/utils/test_imports.py
Normal file
28
libs/langchain/tests/unit_tests/utils/test_imports.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
from langchain.utils import __all__
|
||||||
|
|
||||||
|
EXPECTED_ALL = [
|
||||||
|
"StrictFormatter",
|
||||||
|
"check_package_version",
|
||||||
|
"comma_list",
|
||||||
|
"convert_to_secret_str",
|
||||||
|
"cosine_similarity",
|
||||||
|
"cosine_similarity_top_k",
|
||||||
|
"formatter",
|
||||||
|
"get_bolded_text",
|
||||||
|
"get_color_mapping",
|
||||||
|
"get_colored_text",
|
||||||
|
"get_from_dict_or_env",
|
||||||
|
"get_from_env",
|
||||||
|
"get_pydantic_field_names",
|
||||||
|
"guard_import",
|
||||||
|
"mock_now",
|
||||||
|
"print_text",
|
||||||
|
"raise_for_status_with_text",
|
||||||
|
"stringify_dict",
|
||||||
|
"stringify_value",
|
||||||
|
"xor_args",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_all_imports() -> None:
|
||||||
|
assert set(__all__) == set(EXPECTED_ALL)
|
Loading…
Reference in New Issue
Block a user