mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-18 18:53:10 +00:00
Fix class promotion (#6187)
In LangChain, all module classes are enumerated in the `__init__.py` file of the correspondent module. But some classes were missed and were not included in the module `__init__.py` This PR: - added the missed classes to the module `__init__.py` files - `__init__.py:__all_` variable value (a list of the class names) was sorted - `langchain.tools.sql_database.tool.QueryCheckerTool` was renamed into the `QuerySQLCheckerTool` because it conflicted with `langchain.tools.spark_sql.tool.QueryCheckerTool` - changes to `pyproject.toml`: - added `pgvector` to `pyproject.toml:extended_testing` - added `pandas` to `pyproject.toml:[tool.poetry.group.test.dependencies]` - commented out the `streamlit` from `collbacks/__init__.py`, It is because now the `streamlit` requires Python >=3.7, !=3.9.7 - fixed duplicate names in `tools` - fixed correspondent ut-s #### Who can review? @hwchase17 @dev2049
This commit is contained in:
parent
c0c2fd0782
commit
c7ca350cd3
@ -31,6 +31,7 @@ from langchain.agents.load_tools import (
|
|||||||
)
|
)
|
||||||
from langchain.agents.loading import load_agent
|
from langchain.agents.loading import load_agent
|
||||||
from langchain.agents.mrkl.base import MRKLChain, ZeroShotAgent
|
from langchain.agents.mrkl.base import MRKLChain, ZeroShotAgent
|
||||||
|
from langchain.agents.openai_functions_agent.base import OpenAIFunctionsAgent
|
||||||
from langchain.agents.react.base import ReActChain, ReActTextWorldAgent
|
from langchain.agents.react.base import ReActChain, ReActTextWorldAgent
|
||||||
from langchain.agents.self_ask_with_search.base import SelfAskWithSearchChain
|
from langchain.agents.self_ask_with_search.base import SelfAskWithSearchChain
|
||||||
from langchain.agents.structured_chat.base import StructuredChatAgent
|
from langchain.agents.structured_chat.base import StructuredChatAgent
|
||||||
@ -47,6 +48,7 @@ __all__ = [
|
|||||||
"ConversationalChatAgent",
|
"ConversationalChatAgent",
|
||||||
"LLMSingleActionAgent",
|
"LLMSingleActionAgent",
|
||||||
"MRKLChain",
|
"MRKLChain",
|
||||||
|
"OpenAIFunctionsAgent",
|
||||||
"ReActChain",
|
"ReActChain",
|
||||||
"ReActTextWorldAgent",
|
"ReActTextWorldAgent",
|
||||||
"SelfAskWithSearchChain",
|
"SelfAskWithSearchChain",
|
||||||
|
@ -10,7 +10,7 @@ from langchain.tools import BaseTool
|
|||||||
from langchain.tools.sql_database.tool import (
|
from langchain.tools.sql_database.tool import (
|
||||||
InfoSQLDatabaseTool,
|
InfoSQLDatabaseTool,
|
||||||
ListSQLDatabaseTool,
|
ListSQLDatabaseTool,
|
||||||
QueryCheckerTool,
|
QuerySQLCheckerTool,
|
||||||
QuerySQLDataBaseTool,
|
QuerySQLDataBaseTool,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -55,5 +55,5 @@ class SQLDatabaseToolkit(BaseToolkit):
|
|||||||
db=self.db, description=info_sql_database_tool_description
|
db=self.db, description=info_sql_database_tool_description
|
||||||
),
|
),
|
||||||
ListSQLDatabaseTool(db=self.db),
|
ListSQLDatabaseTool(db=self.db),
|
||||||
QueryCheckerTool(db=self.db, llm=self.llm),
|
QuerySQLCheckerTool(db=self.db, llm=self.llm),
|
||||||
]
|
]
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
from langchain.callbacks.aim_callback import AimCallbackHandler
|
from langchain.callbacks.aim_callback import AimCallbackHandler
|
||||||
from langchain.callbacks.argilla_callback import ArgillaCallbackHandler
|
from langchain.callbacks.argilla_callback import ArgillaCallbackHandler
|
||||||
|
from langchain.callbacks.arize_callback import ArizeCallbackHandler
|
||||||
from langchain.callbacks.clearml_callback import ClearMLCallbackHandler
|
from langchain.callbacks.clearml_callback import ClearMLCallbackHandler
|
||||||
from langchain.callbacks.comet_ml_callback import CometCallbackHandler
|
from langchain.callbacks.comet_ml_callback import CometCallbackHandler
|
||||||
from langchain.callbacks.file import FileCallbackHandler
|
from langchain.callbacks.file import FileCallbackHandler
|
||||||
@ -15,23 +16,35 @@ from langchain.callbacks.mlflow_callback import MlflowCallbackHandler
|
|||||||
from langchain.callbacks.openai_info import OpenAICallbackHandler
|
from langchain.callbacks.openai_info import OpenAICallbackHandler
|
||||||
from langchain.callbacks.stdout import StdOutCallbackHandler
|
from langchain.callbacks.stdout import StdOutCallbackHandler
|
||||||
from langchain.callbacks.streaming_aiter import AsyncIteratorCallbackHandler
|
from langchain.callbacks.streaming_aiter import AsyncIteratorCallbackHandler
|
||||||
|
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
|
||||||
|
from langchain.callbacks.streaming_stdout_final_only import (
|
||||||
|
FinalStreamingStdOutCallbackHandler,
|
||||||
|
)
|
||||||
|
|
||||||
|
# now streamlit requires Python >=3.7, !=3.9.7 So, it is commented out here.
|
||||||
|
# from langchain.callbacks.streamlit import StreamlitCallbackHandler
|
||||||
from langchain.callbacks.wandb_callback import WandbCallbackHandler
|
from langchain.callbacks.wandb_callback import WandbCallbackHandler
|
||||||
from langchain.callbacks.whylabs_callback import WhyLabsCallbackHandler
|
from langchain.callbacks.whylabs_callback import WhyLabsCallbackHandler
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"ArgillaCallbackHandler",
|
|
||||||
"OpenAICallbackHandler",
|
|
||||||
"StdOutCallbackHandler",
|
|
||||||
"FileCallbackHandler",
|
|
||||||
"AimCallbackHandler",
|
"AimCallbackHandler",
|
||||||
"WandbCallbackHandler",
|
"ArgillaCallbackHandler",
|
||||||
"MlflowCallbackHandler",
|
"ArizeCallbackHandler",
|
||||||
|
"AsyncIteratorCallbackHandler",
|
||||||
"ClearMLCallbackHandler",
|
"ClearMLCallbackHandler",
|
||||||
"CometCallbackHandler",
|
"CometCallbackHandler",
|
||||||
|
"FileCallbackHandler",
|
||||||
|
"FinalStreamingStdOutCallbackHandler",
|
||||||
|
"HumanApprovalCallbackHandler",
|
||||||
|
"MlflowCallbackHandler",
|
||||||
|
"OpenAICallbackHandler",
|
||||||
|
"StdOutCallbackHandler",
|
||||||
|
"StreamingStdOutCallbackHandler",
|
||||||
|
# now streamlit requires Python >=3.7, !=3.9.7 So, it is commented out here.
|
||||||
|
# "StreamlitCallbackHandler",
|
||||||
|
"WandbCallbackHandler",
|
||||||
"WhyLabsCallbackHandler",
|
"WhyLabsCallbackHandler",
|
||||||
"AsyncIteratorCallbackHandler",
|
|
||||||
"get_openai_callback",
|
"get_openai_callback",
|
||||||
"tracing_enabled",
|
"tracing_enabled",
|
||||||
"wandb_tracing_enabled",
|
"wandb_tracing_enabled",
|
||||||
"HumanApprovalCallbackHandler",
|
|
||||||
]
|
]
|
||||||
|
@ -22,6 +22,7 @@ from langchain.chains.llm_summarization_checker.base import LLMSummarizationChec
|
|||||||
from langchain.chains.loading import load_chain
|
from langchain.chains.loading import load_chain
|
||||||
from langchain.chains.mapreduce import MapReduceChain
|
from langchain.chains.mapreduce import MapReduceChain
|
||||||
from langchain.chains.moderation import OpenAIModerationChain
|
from langchain.chains.moderation import OpenAIModerationChain
|
||||||
|
from langchain.chains.natbot.base import NatBotChain
|
||||||
from langchain.chains.openai_functions import (
|
from langchain.chains.openai_functions import (
|
||||||
create_extraction_chain,
|
create_extraction_chain,
|
||||||
create_extraction_chain_pydantic,
|
create_extraction_chain_pydantic,
|
||||||
@ -34,6 +35,13 @@ from langchain.chains.qa_with_sources.base import QAWithSourcesChain
|
|||||||
from langchain.chains.qa_with_sources.retrieval import RetrievalQAWithSourcesChain
|
from langchain.chains.qa_with_sources.retrieval import RetrievalQAWithSourcesChain
|
||||||
from langchain.chains.qa_with_sources.vector_db import VectorDBQAWithSourcesChain
|
from langchain.chains.qa_with_sources.vector_db import VectorDBQAWithSourcesChain
|
||||||
from langchain.chains.retrieval_qa.base import RetrievalQA, VectorDBQA
|
from langchain.chains.retrieval_qa.base import RetrievalQA, VectorDBQA
|
||||||
|
from langchain.chains.router import (
|
||||||
|
LLMRouterChain,
|
||||||
|
MultiPromptChain,
|
||||||
|
MultiRetrievalQAChain,
|
||||||
|
MultiRouteChain,
|
||||||
|
RouterChain,
|
||||||
|
)
|
||||||
from langchain.chains.sequential import SequentialChain, SimpleSequentialChain
|
from langchain.chains.sequential import SequentialChain, SimpleSequentialChain
|
||||||
from langchain.chains.sql_database.base import (
|
from langchain.chains.sql_database.base import (
|
||||||
SQLDatabaseChain,
|
SQLDatabaseChain,
|
||||||
@ -42,41 +50,47 @@ from langchain.chains.sql_database.base import (
|
|||||||
from langchain.chains.transform import TransformChain
|
from langchain.chains.transform import TransformChain
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"ConversationChain",
|
|
||||||
"LLMChain",
|
|
||||||
"LLMBashChain",
|
|
||||||
"LLMCheckerChain",
|
|
||||||
"LLMSummarizationCheckerChain",
|
|
||||||
"LLMMathChain",
|
|
||||||
"PALChain",
|
|
||||||
"QAWithSourcesChain",
|
|
||||||
"SQLDatabaseChain",
|
|
||||||
"SequentialChain",
|
|
||||||
"SimpleSequentialChain",
|
|
||||||
"VectorDBQA",
|
|
||||||
"VectorDBQAWithSourcesChain",
|
|
||||||
"APIChain",
|
"APIChain",
|
||||||
"LLMRequestsChain",
|
|
||||||
"TransformChain",
|
|
||||||
"MapReduceChain",
|
|
||||||
"OpenAIModerationChain",
|
|
||||||
"SQLDatabaseSequentialChain",
|
|
||||||
"load_chain",
|
|
||||||
"AnalyzeDocumentChain",
|
"AnalyzeDocumentChain",
|
||||||
"HypotheticalDocumentEmbedder",
|
|
||||||
"ChatVectorDBChain",
|
"ChatVectorDBChain",
|
||||||
"GraphQAChain",
|
|
||||||
"GraphCypherQAChain",
|
|
||||||
"ConstitutionalChain",
|
"ConstitutionalChain",
|
||||||
|
"ConversationChain",
|
||||||
|
"ConversationalRetrievalChain",
|
||||||
|
"FlareChain",
|
||||||
|
"GraphCypherQAChain",
|
||||||
|
"GraphQAChain",
|
||||||
|
"HypotheticalDocumentEmbedder",
|
||||||
|
"LLMBashChain",
|
||||||
|
"LLMChain",
|
||||||
|
"LLMCheckerChain",
|
||||||
|
"LLMMathChain",
|
||||||
|
"LLMRequestsChain",
|
||||||
|
"LLMRouterChain",
|
||||||
|
"LLMSummarizationCheckerChain",
|
||||||
|
"MapReduceChain",
|
||||||
|
"MultiPromptChain",
|
||||||
|
"MultiRetrievalQAChain",
|
||||||
|
"MultiRouteChain",
|
||||||
|
"NatBotChain",
|
||||||
|
"NebulaGraphQAChain",
|
||||||
|
"OpenAIModerationChain",
|
||||||
|
"OpenAPIEndpointChain",
|
||||||
|
"PALChain",
|
||||||
"QAGenerationChain",
|
"QAGenerationChain",
|
||||||
|
"QAWithSourcesChain",
|
||||||
"RetrievalQA",
|
"RetrievalQA",
|
||||||
"RetrievalQAWithSourcesChain",
|
"RetrievalQAWithSourcesChain",
|
||||||
"ConversationalRetrievalChain",
|
"RouterChain",
|
||||||
"OpenAPIEndpointChain",
|
"SQLDatabaseChain",
|
||||||
"FlareChain",
|
"SQLDatabaseSequentialChain",
|
||||||
"NebulaGraphQAChain",
|
"SequentialChain",
|
||||||
|
"SimpleSequentialChain",
|
||||||
|
"TransformChain",
|
||||||
|
"VectorDBQA",
|
||||||
|
"VectorDBQAWithSourcesChain",
|
||||||
"create_extraction_chain",
|
"create_extraction_chain",
|
||||||
"create_tagging_chain",
|
|
||||||
"create_extraction_chain_pydantic",
|
"create_extraction_chain_pydantic",
|
||||||
|
"create_tagging_chain",
|
||||||
"create_tagging_chain_pydantic",
|
"create_tagging_chain_pydantic",
|
||||||
|
"load_chain",
|
||||||
]
|
]
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
"""Wrappers on top of docstores."""
|
"""Wrappers on top of docstores."""
|
||||||
|
from langchain.docstore.arbitrary_fn import DocstoreFn
|
||||||
from langchain.docstore.in_memory import InMemoryDocstore
|
from langchain.docstore.in_memory import InMemoryDocstore
|
||||||
from langchain.docstore.wikipedia import Wikipedia
|
from langchain.docstore.wikipedia import Wikipedia
|
||||||
|
|
||||||
__all__ = ["InMemoryDocstore", "Wikipedia"]
|
__all__ = ["DocstoreFn", "InMemoryDocstore", "Wikipedia"]
|
||||||
|
@ -16,6 +16,12 @@ from langchain.document_loaders.bibtex import BibtexLoader
|
|||||||
from langchain.document_loaders.bigquery import BigQueryLoader
|
from langchain.document_loaders.bigquery import BigQueryLoader
|
||||||
from langchain.document_loaders.bilibili import BiliBiliLoader
|
from langchain.document_loaders.bilibili import BiliBiliLoader
|
||||||
from langchain.document_loaders.blackboard import BlackboardLoader
|
from langchain.document_loaders.blackboard import BlackboardLoader
|
||||||
|
from langchain.document_loaders.blob_loaders import (
|
||||||
|
Blob,
|
||||||
|
BlobLoader,
|
||||||
|
FileSystemBlobLoader,
|
||||||
|
YoutubeAudioLoader,
|
||||||
|
)
|
||||||
from langchain.document_loaders.blockchain import BlockchainDocumentLoader
|
from langchain.document_loaders.blockchain import BlockchainDocumentLoader
|
||||||
from langchain.document_loaders.chatgpt import ChatGPTLoader
|
from langchain.document_loaders.chatgpt import ChatGPTLoader
|
||||||
from langchain.document_loaders.college_confidential import CollegeConfidentialLoader
|
from langchain.document_loaders.college_confidential import CollegeConfidentialLoader
|
||||||
@ -150,6 +156,8 @@ __all__ = [
|
|||||||
"BigQueryLoader",
|
"BigQueryLoader",
|
||||||
"BiliBiliLoader",
|
"BiliBiliLoader",
|
||||||
"BlackboardLoader",
|
"BlackboardLoader",
|
||||||
|
"Blob",
|
||||||
|
"BlobLoader",
|
||||||
"BlockchainDocumentLoader",
|
"BlockchainDocumentLoader",
|
||||||
"CSVLoader",
|
"CSVLoader",
|
||||||
"ChatGPTLoader",
|
"ChatGPTLoader",
|
||||||
@ -163,10 +171,13 @@ __all__ = [
|
|||||||
"DocugamiLoader",
|
"DocugamiLoader",
|
||||||
"Docx2txtLoader",
|
"Docx2txtLoader",
|
||||||
"DuckDBLoader",
|
"DuckDBLoader",
|
||||||
"FaunaLoader",
|
"EmbaasBlobLoader",
|
||||||
|
"EmbaasLoader",
|
||||||
"EverNoteLoader",
|
"EverNoteLoader",
|
||||||
"FacebookChatLoader",
|
"FacebookChatLoader",
|
||||||
|
"FaunaLoader",
|
||||||
"FigmaFileLoader",
|
"FigmaFileLoader",
|
||||||
|
"FileSystemBlobLoader",
|
||||||
"GCSDirectoryLoader",
|
"GCSDirectoryLoader",
|
||||||
"GCSFileLoader",
|
"GCSFileLoader",
|
||||||
"GitHubIssuesLoader",
|
"GitHubIssuesLoader",
|
||||||
@ -194,8 +205,8 @@ __all__ = [
|
|||||||
"NotionDBLoader",
|
"NotionDBLoader",
|
||||||
"NotionDirectoryLoader",
|
"NotionDirectoryLoader",
|
||||||
"ObsidianLoader",
|
"ObsidianLoader",
|
||||||
"OneDriveLoader",
|
|
||||||
"OneDriveFileLoader",
|
"OneDriveFileLoader",
|
||||||
|
"OneDriveLoader",
|
||||||
"OnlinePDFLoader",
|
"OnlinePDFLoader",
|
||||||
"OutlookMessageLoader",
|
"OutlookMessageLoader",
|
||||||
"PDFMinerLoader",
|
"PDFMinerLoader",
|
||||||
@ -219,6 +230,7 @@ __all__ = [
|
|||||||
"SeleniumURLLoader",
|
"SeleniumURLLoader",
|
||||||
"SitemapLoader",
|
"SitemapLoader",
|
||||||
"SlackDirectoryLoader",
|
"SlackDirectoryLoader",
|
||||||
|
"SnowflakeLoader",
|
||||||
"SpreedlyLoader",
|
"SpreedlyLoader",
|
||||||
"StripeLoader",
|
"StripeLoader",
|
||||||
"TelegramChatApiLoader",
|
"TelegramChatApiLoader",
|
||||||
@ -251,8 +263,6 @@ __all__ = [
|
|||||||
"WebBaseLoader",
|
"WebBaseLoader",
|
||||||
"WhatsAppChatLoader",
|
"WhatsAppChatLoader",
|
||||||
"WikipediaLoader",
|
"WikipediaLoader",
|
||||||
|
"YoutubeAudioLoader",
|
||||||
"YoutubeLoader",
|
"YoutubeLoader",
|
||||||
"SnowflakeLoader",
|
|
||||||
"EmbaasLoader",
|
|
||||||
"EmbaasBlobLoader",
|
|
||||||
]
|
]
|
||||||
|
@ -27,6 +27,7 @@ from langchain.llms.huggingface_pipeline import HuggingFacePipeline
|
|||||||
from langchain.llms.huggingface_text_gen_inference import HuggingFaceTextGenInference
|
from langchain.llms.huggingface_text_gen_inference import HuggingFaceTextGenInference
|
||||||
from langchain.llms.human import HumanInputLLM
|
from langchain.llms.human import HumanInputLLM
|
||||||
from langchain.llms.llamacpp import LlamaCpp
|
from langchain.llms.llamacpp import LlamaCpp
|
||||||
|
from langchain.llms.manifest import ManifestWrapper
|
||||||
from langchain.llms.modal import Modal
|
from langchain.llms.modal import Modal
|
||||||
from langchain.llms.mosaicml import MosaicML
|
from langchain.llms.mosaicml import MosaicML
|
||||||
from langchain.llms.nlpcloud import NLPCloud
|
from langchain.llms.nlpcloud import NLPCloud
|
||||||
@ -47,25 +48,34 @@ from langchain.llms.vertexai import VertexAI
|
|||||||
from langchain.llms.writer import Writer
|
from langchain.llms.writer import Writer
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"Anthropic",
|
"AI21",
|
||||||
"AlephAlpha",
|
"AlephAlpha",
|
||||||
|
"Anthropic",
|
||||||
"Anyscale",
|
"Anyscale",
|
||||||
"Aviary",
|
"Aviary",
|
||||||
|
"AzureOpenAI",
|
||||||
"Banana",
|
"Banana",
|
||||||
"Baseten",
|
"Baseten",
|
||||||
"Beam",
|
"Beam",
|
||||||
"Bedrock",
|
"Bedrock",
|
||||||
|
"CTransformers",
|
||||||
"CerebriumAI",
|
"CerebriumAI",
|
||||||
"Cohere",
|
"Cohere",
|
||||||
"CTransformers",
|
|
||||||
"Databricks",
|
"Databricks",
|
||||||
"DeepInfra",
|
"DeepInfra",
|
||||||
|
"FakeListLLM",
|
||||||
"ForefrontAI",
|
"ForefrontAI",
|
||||||
|
"GPT4All",
|
||||||
"GooglePalm",
|
"GooglePalm",
|
||||||
"GooseAI",
|
"GooseAI",
|
||||||
"GPT4All",
|
"HuggingFaceEndpoint",
|
||||||
|
"HuggingFaceHub",
|
||||||
|
"HuggingFacePipeline",
|
||||||
|
"HuggingFaceTextGenInference",
|
||||||
|
"HumanInputLLM",
|
||||||
"LlamaCpp",
|
"LlamaCpp",
|
||||||
"TextGen",
|
"TextGen",
|
||||||
|
"ManifestWrapper",
|
||||||
"Modal",
|
"Modal",
|
||||||
"MosaicML",
|
"MosaicML",
|
||||||
"NLPCloud",
|
"NLPCloud",
|
||||||
@ -74,25 +84,17 @@ __all__ = [
|
|||||||
"OpenLM",
|
"OpenLM",
|
||||||
"Petals",
|
"Petals",
|
||||||
"PipelineAI",
|
"PipelineAI",
|
||||||
"HuggingFaceEndpoint",
|
"PredictionGuard",
|
||||||
"HuggingFaceHub",
|
|
||||||
"SagemakerEndpoint",
|
|
||||||
"HuggingFacePipeline",
|
|
||||||
"AI21",
|
|
||||||
"AzureOpenAI",
|
|
||||||
"Replicate",
|
|
||||||
"SelfHostedPipeline",
|
|
||||||
"SelfHostedHuggingFaceLLM",
|
|
||||||
"PromptLayerOpenAI",
|
"PromptLayerOpenAI",
|
||||||
"PromptLayerOpenAIChat",
|
"PromptLayerOpenAIChat",
|
||||||
"StochasticAI",
|
|
||||||
"Writer",
|
|
||||||
"RWKV",
|
"RWKV",
|
||||||
"PredictionGuard",
|
"Replicate",
|
||||||
"HumanInputLLM",
|
"SagemakerEndpoint",
|
||||||
"HuggingFaceTextGenInference",
|
"SelfHostedHuggingFaceLLM",
|
||||||
"FakeListLLM",
|
"SelfHostedPipeline",
|
||||||
|
"StochasticAI",
|
||||||
"VertexAI",
|
"VertexAI",
|
||||||
|
"Writer",
|
||||||
]
|
]
|
||||||
|
|
||||||
type_to_cls_dict: Dict[str, Type[BaseLLM]] = {
|
type_to_cls_dict: Dict[str, Type[BaseLLM]] = {
|
||||||
@ -101,6 +103,7 @@ type_to_cls_dict: Dict[str, Type[BaseLLM]] = {
|
|||||||
"anthropic": Anthropic,
|
"anthropic": Anthropic,
|
||||||
"anyscale": Anyscale,
|
"anyscale": Anyscale,
|
||||||
"aviary": Aviary,
|
"aviary": Aviary,
|
||||||
|
"azure": AzureOpenAI,
|
||||||
"bananadev": Banana,
|
"bananadev": Banana,
|
||||||
"baseten": Baseten,
|
"baseten": Baseten,
|
||||||
"beam": Beam,
|
"beam": Beam,
|
||||||
@ -109,32 +112,31 @@ type_to_cls_dict: Dict[str, Type[BaseLLM]] = {
|
|||||||
"ctransformers": CTransformers,
|
"ctransformers": CTransformers,
|
||||||
"databricks": Databricks,
|
"databricks": Databricks,
|
||||||
"deepinfra": DeepInfra,
|
"deepinfra": DeepInfra,
|
||||||
|
"fake-list": FakeListLLM,
|
||||||
"forefrontai": ForefrontAI,
|
"forefrontai": ForefrontAI,
|
||||||
"google_palm": GooglePalm,
|
"google_palm": GooglePalm,
|
||||||
"gooseai": GooseAI,
|
"gooseai": GooseAI,
|
||||||
"gpt4all": GPT4All,
|
"gpt4all": GPT4All,
|
||||||
"huggingface_hub": HuggingFaceHub,
|
|
||||||
"huggingface_endpoint": HuggingFaceEndpoint,
|
"huggingface_endpoint": HuggingFaceEndpoint,
|
||||||
|
"huggingface_hub": HuggingFaceHub,
|
||||||
|
"huggingface_pipeline": HuggingFacePipeline,
|
||||||
|
"huggingface_textgen_inference": HuggingFaceTextGenInference,
|
||||||
|
"human-input": HumanInputLLM,
|
||||||
"llamacpp": LlamaCpp,
|
"llamacpp": LlamaCpp,
|
||||||
"textgen": TextGen,
|
"textgen": TextGen,
|
||||||
"modal": Modal,
|
"modal": Modal,
|
||||||
"mosaic": MosaicML,
|
"mosaic": MosaicML,
|
||||||
"sagemaker_endpoint": SagemakerEndpoint,
|
|
||||||
"nlpcloud": NLPCloud,
|
"nlpcloud": NLPCloud,
|
||||||
"human-input": HumanInputLLM,
|
|
||||||
"openai": OpenAI,
|
"openai": OpenAI,
|
||||||
"openlm": OpenLM,
|
"openlm": OpenLM,
|
||||||
"petals": Petals,
|
"petals": Petals,
|
||||||
"pipelineai": PipelineAI,
|
"pipelineai": PipelineAI,
|
||||||
"huggingface_pipeline": HuggingFacePipeline,
|
|
||||||
"azure": AzureOpenAI,
|
|
||||||
"replicate": Replicate,
|
"replicate": Replicate,
|
||||||
|
"rwkv": RWKV,
|
||||||
|
"sagemaker_endpoint": SagemakerEndpoint,
|
||||||
"self_hosted": SelfHostedPipeline,
|
"self_hosted": SelfHostedPipeline,
|
||||||
"self_hosted_hugging_face": SelfHostedHuggingFaceLLM,
|
"self_hosted_hugging_face": SelfHostedHuggingFaceLLM,
|
||||||
"stochasticai": StochasticAI,
|
"stochasticai": StochasticAI,
|
||||||
"writer": Writer,
|
|
||||||
"rwkv": RWKV,
|
|
||||||
"huggingface_textgen_inference": HuggingFaceTextGenInference,
|
|
||||||
"fake-list": FakeListLLM,
|
|
||||||
"vertexai": VertexAI,
|
"vertexai": VertexAI,
|
||||||
|
"writer": Writer,
|
||||||
}
|
}
|
||||||
|
@ -3,17 +3,19 @@ from langchain.memory.buffer import (
|
|||||||
ConversationStringBufferMemory,
|
ConversationStringBufferMemory,
|
||||||
)
|
)
|
||||||
from langchain.memory.buffer_window import ConversationBufferWindowMemory
|
from langchain.memory.buffer_window import ConversationBufferWindowMemory
|
||||||
from langchain.memory.chat_message_histories import MomentoChatMessageHistory
|
from langchain.memory.chat_message_histories import (
|
||||||
from langchain.memory.chat_message_histories.cassandra import (
|
|
||||||
CassandraChatMessageHistory,
|
CassandraChatMessageHistory,
|
||||||
|
ChatMessageHistory,
|
||||||
|
CosmosDBChatMessageHistory,
|
||||||
|
DynamoDBChatMessageHistory,
|
||||||
|
FileChatMessageHistory,
|
||||||
|
MomentoChatMessageHistory,
|
||||||
|
MongoDBChatMessageHistory,
|
||||||
|
PostgresChatMessageHistory,
|
||||||
|
RedisChatMessageHistory,
|
||||||
|
SQLChatMessageHistory,
|
||||||
|
ZepChatMessageHistory,
|
||||||
)
|
)
|
||||||
from langchain.memory.chat_message_histories.cosmos_db import CosmosDBChatMessageHistory
|
|
||||||
from langchain.memory.chat_message_histories.dynamodb import DynamoDBChatMessageHistory
|
|
||||||
from langchain.memory.chat_message_histories.file import FileChatMessageHistory
|
|
||||||
from langchain.memory.chat_message_histories.in_memory import ChatMessageHistory
|
|
||||||
from langchain.memory.chat_message_histories.mongodb import MongoDBChatMessageHistory
|
|
||||||
from langchain.memory.chat_message_histories.postgres import PostgresChatMessageHistory
|
|
||||||
from langchain.memory.chat_message_histories.redis import RedisChatMessageHistory
|
|
||||||
from langchain.memory.combined import CombinedMemory
|
from langchain.memory.combined import CombinedMemory
|
||||||
from langchain.memory.entity import (
|
from langchain.memory.entity import (
|
||||||
ConversationEntityMemory,
|
ConversationEntityMemory,
|
||||||
@ -22,6 +24,7 @@ from langchain.memory.entity import (
|
|||||||
SQLiteEntityStore,
|
SQLiteEntityStore,
|
||||||
)
|
)
|
||||||
from langchain.memory.kg import ConversationKGMemory
|
from langchain.memory.kg import ConversationKGMemory
|
||||||
|
from langchain.memory.motorhead_memory import MotorheadMemory
|
||||||
from langchain.memory.readonly import ReadOnlySharedMemory
|
from langchain.memory.readonly import ReadOnlySharedMemory
|
||||||
from langchain.memory.simple import SimpleMemory
|
from langchain.memory.simple import SimpleMemory
|
||||||
from langchain.memory.summary import ConversationSummaryMemory
|
from langchain.memory.summary import ConversationSummaryMemory
|
||||||
@ -30,28 +33,31 @@ from langchain.memory.token_buffer import ConversationTokenBufferMemory
|
|||||||
from langchain.memory.vectorstore import VectorStoreRetrieverMemory
|
from langchain.memory.vectorstore import VectorStoreRetrieverMemory
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"CombinedMemory",
|
|
||||||
"ConversationBufferWindowMemory",
|
|
||||||
"ConversationBufferMemory",
|
|
||||||
"SimpleMemory",
|
|
||||||
"ConversationSummaryBufferMemory",
|
|
||||||
"ConversationKGMemory",
|
|
||||||
"ConversationEntityMemory",
|
|
||||||
"InMemoryEntityStore",
|
|
||||||
"RedisEntityStore",
|
|
||||||
"SQLiteEntityStore",
|
|
||||||
"ConversationSummaryMemory",
|
|
||||||
"ChatMessageHistory",
|
|
||||||
"ConversationStringBufferMemory",
|
|
||||||
"ReadOnlySharedMemory",
|
|
||||||
"ConversationTokenBufferMemory",
|
|
||||||
"RedisChatMessageHistory",
|
|
||||||
"DynamoDBChatMessageHistory",
|
|
||||||
"PostgresChatMessageHistory",
|
|
||||||
"VectorStoreRetrieverMemory",
|
|
||||||
"CosmosDBChatMessageHistory",
|
|
||||||
"FileChatMessageHistory",
|
|
||||||
"MongoDBChatMessageHistory",
|
|
||||||
"CassandraChatMessageHistory",
|
"CassandraChatMessageHistory",
|
||||||
|
"ChatMessageHistory",
|
||||||
|
"CombinedMemory",
|
||||||
|
"ConversationBufferMemory",
|
||||||
|
"ConversationBufferWindowMemory",
|
||||||
|
"ConversationEntityMemory",
|
||||||
|
"ConversationKGMemory",
|
||||||
|
"ConversationStringBufferMemory",
|
||||||
|
"ConversationSummaryBufferMemory",
|
||||||
|
"ConversationSummaryMemory",
|
||||||
|
"ConversationTokenBufferMemory",
|
||||||
|
"CosmosDBChatMessageHistory",
|
||||||
|
"DynamoDBChatMessageHistory",
|
||||||
|
"FileChatMessageHistory",
|
||||||
|
"InMemoryEntityStore",
|
||||||
"MomentoChatMessageHistory",
|
"MomentoChatMessageHistory",
|
||||||
|
"MongoDBChatMessageHistory",
|
||||||
|
"MotorheadMemory",
|
||||||
|
"PostgresChatMessageHistory",
|
||||||
|
"ReadOnlySharedMemory",
|
||||||
|
"RedisChatMessageHistory",
|
||||||
|
"RedisEntityStore",
|
||||||
|
"SQLChatMessageHistory",
|
||||||
|
"SQLiteEntityStore",
|
||||||
|
"SimpleMemory",
|
||||||
|
"VectorStoreRetrieverMemory",
|
||||||
|
"ZepChatMessageHistory",
|
||||||
]
|
]
|
||||||
|
@ -7,6 +7,7 @@ from langchain.memory.chat_message_histories.file import FileChatMessageHistory
|
|||||||
from langchain.memory.chat_message_histories.firestore import (
|
from langchain.memory.chat_message_histories.firestore import (
|
||||||
FirestoreChatMessageHistory,
|
FirestoreChatMessageHistory,
|
||||||
)
|
)
|
||||||
|
from langchain.memory.chat_message_histories.in_memory import ChatMessageHistory
|
||||||
from langchain.memory.chat_message_histories.momento import MomentoChatMessageHistory
|
from langchain.memory.chat_message_histories.momento import MomentoChatMessageHistory
|
||||||
from langchain.memory.chat_message_histories.mongodb import MongoDBChatMessageHistory
|
from langchain.memory.chat_message_histories.mongodb import MongoDBChatMessageHistory
|
||||||
from langchain.memory.chat_message_histories.postgres import PostgresChatMessageHistory
|
from langchain.memory.chat_message_histories.postgres import PostgresChatMessageHistory
|
||||||
@ -15,15 +16,16 @@ from langchain.memory.chat_message_histories.sql import SQLChatMessageHistory
|
|||||||
from langchain.memory.chat_message_histories.zep import ZepChatMessageHistory
|
from langchain.memory.chat_message_histories.zep import ZepChatMessageHistory
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"DynamoDBChatMessageHistory",
|
"ChatMessageHistory",
|
||||||
"RedisChatMessageHistory",
|
|
||||||
"PostgresChatMessageHistory",
|
|
||||||
"SQLChatMessageHistory",
|
|
||||||
"FileChatMessageHistory",
|
|
||||||
"CosmosDBChatMessageHistory",
|
|
||||||
"FirestoreChatMessageHistory",
|
|
||||||
"MongoDBChatMessageHistory",
|
|
||||||
"CassandraChatMessageHistory",
|
"CassandraChatMessageHistory",
|
||||||
"ZepChatMessageHistory",
|
"CosmosDBChatMessageHistory",
|
||||||
|
"DynamoDBChatMessageHistory",
|
||||||
|
"FileChatMessageHistory",
|
||||||
|
"FirestoreChatMessageHistory",
|
||||||
"MomentoChatMessageHistory",
|
"MomentoChatMessageHistory",
|
||||||
|
"MongoDBChatMessageHistory",
|
||||||
|
"PostgresChatMessageHistory",
|
||||||
|
"RedisChatMessageHistory",
|
||||||
|
"SQLChatMessageHistory",
|
||||||
|
"ZepChatMessageHistory",
|
||||||
]
|
]
|
||||||
|
@ -1,4 +1,7 @@
|
|||||||
|
from langchain.output_parsers.boolean import BooleanOutputParser
|
||||||
|
from langchain.output_parsers.combining import CombiningOutputParser
|
||||||
from langchain.output_parsers.datetime import DatetimeOutputParser
|
from langchain.output_parsers.datetime import DatetimeOutputParser
|
||||||
|
from langchain.output_parsers.enum import EnumOutputParser
|
||||||
from langchain.output_parsers.fix import OutputFixingParser
|
from langchain.output_parsers.fix import OutputFixingParser
|
||||||
from langchain.output_parsers.list import (
|
from langchain.output_parsers.list import (
|
||||||
CommaSeparatedListOutputParser,
|
CommaSeparatedListOutputParser,
|
||||||
@ -12,16 +15,19 @@ from langchain.output_parsers.retry import RetryOutputParser, RetryWithErrorOutp
|
|||||||
from langchain.output_parsers.structured import ResponseSchema, StructuredOutputParser
|
from langchain.output_parsers.structured import ResponseSchema, StructuredOutputParser
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"RegexParser",
|
"BooleanOutputParser",
|
||||||
"RegexDictParser",
|
"CombiningOutputParser",
|
||||||
"ListOutputParser",
|
|
||||||
"CommaSeparatedListOutputParser",
|
"CommaSeparatedListOutputParser",
|
||||||
"StructuredOutputParser",
|
"DatetimeOutputParser",
|
||||||
"ResponseSchema",
|
"EnumOutputParser",
|
||||||
"GuardrailsOutputParser",
|
"GuardrailsOutputParser",
|
||||||
|
"ListOutputParser",
|
||||||
|
"OutputFixingParser",
|
||||||
"PydanticOutputParser",
|
"PydanticOutputParser",
|
||||||
|
"RegexDictParser",
|
||||||
|
"RegexParser",
|
||||||
|
"ResponseSchema",
|
||||||
"RetryOutputParser",
|
"RetryOutputParser",
|
||||||
"RetryWithErrorOutputParser",
|
"RetryWithErrorOutputParser",
|
||||||
"OutputFixingParser",
|
"StructuredOutputParser",
|
||||||
"DatetimeOutputParser",
|
|
||||||
]
|
]
|
||||||
|
@ -9,24 +9,36 @@ from langchain.prompts.chat import (
|
|||||||
MessagesPlaceholder,
|
MessagesPlaceholder,
|
||||||
SystemMessagePromptTemplate,
|
SystemMessagePromptTemplate,
|
||||||
)
|
)
|
||||||
|
from langchain.prompts.example_selector import (
|
||||||
|
LengthBasedExampleSelector,
|
||||||
|
MaxMarginalRelevanceExampleSelector,
|
||||||
|
NGramOverlapExampleSelector,
|
||||||
|
SemanticSimilarityExampleSelector,
|
||||||
|
)
|
||||||
from langchain.prompts.few_shot import FewShotPromptTemplate
|
from langchain.prompts.few_shot import FewShotPromptTemplate
|
||||||
from langchain.prompts.few_shot_with_templates import FewShotPromptWithTemplates
|
from langchain.prompts.few_shot_with_templates import FewShotPromptWithTemplates
|
||||||
from langchain.prompts.loading import load_prompt
|
from langchain.prompts.loading import load_prompt
|
||||||
|
from langchain.prompts.pipeline import PipelinePromptTemplate
|
||||||
from langchain.prompts.prompt import Prompt, PromptTemplate
|
from langchain.prompts.prompt import Prompt, PromptTemplate
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"BasePromptTemplate",
|
|
||||||
"StringPromptTemplate",
|
|
||||||
"load_prompt",
|
|
||||||
"PromptTemplate",
|
|
||||||
"FewShotPromptTemplate",
|
|
||||||
"Prompt",
|
|
||||||
"FewShotPromptWithTemplates",
|
|
||||||
"ChatPromptTemplate",
|
|
||||||
"MessagesPlaceholder",
|
|
||||||
"HumanMessagePromptTemplate",
|
|
||||||
"AIMessagePromptTemplate",
|
"AIMessagePromptTemplate",
|
||||||
"SystemMessagePromptTemplate",
|
|
||||||
"ChatMessagePromptTemplate",
|
|
||||||
"BaseChatPromptTemplate",
|
"BaseChatPromptTemplate",
|
||||||
|
"BasePromptTemplate",
|
||||||
|
"ChatMessagePromptTemplate",
|
||||||
|
"ChatPromptTemplate",
|
||||||
|
"FewShotPromptTemplate",
|
||||||
|
"FewShotPromptWithTemplates",
|
||||||
|
"HumanMessagePromptTemplate",
|
||||||
|
"LengthBasedExampleSelector",
|
||||||
|
"MaxMarginalRelevanceExampleSelector",
|
||||||
|
"MessagesPlaceholder",
|
||||||
|
"NGramOverlapExampleSelector",
|
||||||
|
"PipelinePromptTemplate",
|
||||||
|
"Prompt",
|
||||||
|
"PromptTemplate",
|
||||||
|
"SemanticSimilarityExampleSelector",
|
||||||
|
"StringPromptTemplate",
|
||||||
|
"SystemMessagePromptTemplate",
|
||||||
|
"load_prompt",
|
||||||
]
|
]
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
"""Logic for selecting examples to include in prompts."""
|
"""Logic for selecting examples to include in prompts."""
|
||||||
from langchain.prompts.example_selector.length_based import LengthBasedExampleSelector
|
from langchain.prompts.example_selector.length_based import LengthBasedExampleSelector
|
||||||
|
from langchain.prompts.example_selector.ngram_overlap import NGramOverlapExampleSelector
|
||||||
from langchain.prompts.example_selector.semantic_similarity import (
|
from langchain.prompts.example_selector.semantic_similarity import (
|
||||||
MaxMarginalRelevanceExampleSelector,
|
MaxMarginalRelevanceExampleSelector,
|
||||||
SemanticSimilarityExampleSelector,
|
SemanticSimilarityExampleSelector,
|
||||||
@ -7,6 +8,7 @@ from langchain.prompts.example_selector.semantic_similarity import (
|
|||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"LengthBasedExampleSelector",
|
"LengthBasedExampleSelector",
|
||||||
"SemanticSimilarityExampleSelector",
|
|
||||||
"MaxMarginalRelevanceExampleSelector",
|
"MaxMarginalRelevanceExampleSelector",
|
||||||
|
"NGramOverlapExampleSelector",
|
||||||
|
"SemanticSimilarityExampleSelector",
|
||||||
]
|
]
|
||||||
|
@ -7,8 +7,13 @@ from langchain.retrievers.databerry import DataberryRetriever
|
|||||||
from langchain.retrievers.docarray import DocArrayRetriever
|
from langchain.retrievers.docarray import DocArrayRetriever
|
||||||
from langchain.retrievers.elastic_search_bm25 import ElasticSearchBM25Retriever
|
from langchain.retrievers.elastic_search_bm25 import ElasticSearchBM25Retriever
|
||||||
from langchain.retrievers.knn import KNNRetriever
|
from langchain.retrievers.knn import KNNRetriever
|
||||||
|
from langchain.retrievers.llama_index import (
|
||||||
|
LlamaIndexGraphRetriever,
|
||||||
|
LlamaIndexRetriever,
|
||||||
|
)
|
||||||
from langchain.retrievers.merger_retriever import MergerRetriever
|
from langchain.retrievers.merger_retriever import MergerRetriever
|
||||||
from langchain.retrievers.metal import MetalRetriever
|
from langchain.retrievers.metal import MetalRetriever
|
||||||
|
from langchain.retrievers.milvus import MilvusRetriever
|
||||||
from langchain.retrievers.pinecone_hybrid_search import PineconeHybridSearchRetriever
|
from langchain.retrievers.pinecone_hybrid_search import PineconeHybridSearchRetriever
|
||||||
from langchain.retrievers.pupmed import PubMedRetriever
|
from langchain.retrievers.pupmed import PubMedRetriever
|
||||||
from langchain.retrievers.remote_retriever import RemoteLangChainRetriever
|
from langchain.retrievers.remote_retriever import RemoteLangChainRetriever
|
||||||
@ -22,10 +27,10 @@ from langchain.retrievers.vespa_retriever import VespaRetriever
|
|||||||
from langchain.retrievers.weaviate_hybrid_search import WeaviateHybridSearchRetriever
|
from langchain.retrievers.weaviate_hybrid_search import WeaviateHybridSearchRetriever
|
||||||
from langchain.retrievers.wikipedia import WikipediaRetriever
|
from langchain.retrievers.wikipedia import WikipediaRetriever
|
||||||
from langchain.retrievers.zep import ZepRetriever
|
from langchain.retrievers.zep import ZepRetriever
|
||||||
|
from langchain.retrievers.zilliz import ZillizRetriever
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"ArxivRetriever",
|
"ArxivRetriever",
|
||||||
"PubMedRetriever",
|
|
||||||
"AwsKendraIndexRetriever",
|
"AwsKendraIndexRetriever",
|
||||||
"AzureCognitiveSearchRetriever",
|
"AzureCognitiveSearchRetriever",
|
||||||
"ChatGPTPluginRetriever",
|
"ChatGPTPluginRetriever",
|
||||||
@ -33,9 +38,13 @@ __all__ = [
|
|||||||
"DataberryRetriever",
|
"DataberryRetriever",
|
||||||
"ElasticSearchBM25Retriever",
|
"ElasticSearchBM25Retriever",
|
||||||
"KNNRetriever",
|
"KNNRetriever",
|
||||||
|
"LlamaIndexGraphRetriever",
|
||||||
|
"LlamaIndexRetriever",
|
||||||
"MergerRetriever",
|
"MergerRetriever",
|
||||||
"MetalRetriever",
|
"MetalRetriever",
|
||||||
|
"MilvusRetriever",
|
||||||
"PineconeHybridSearchRetriever",
|
"PineconeHybridSearchRetriever",
|
||||||
|
"PubMedRetriever",
|
||||||
"RemoteLangChainRetriever",
|
"RemoteLangChainRetriever",
|
||||||
"SVMRetriever",
|
"SVMRetriever",
|
||||||
"SelfQueryRetriever",
|
"SelfQueryRetriever",
|
||||||
@ -45,5 +54,6 @@ __all__ = [
|
|||||||
"WeaviateHybridSearchRetriever",
|
"WeaviateHybridSearchRetriever",
|
||||||
"WikipediaRetriever",
|
"WikipediaRetriever",
|
||||||
"ZepRetriever",
|
"ZepRetriever",
|
||||||
|
"ZillizRetriever",
|
||||||
"DocArrayRetriever",
|
"DocArrayRetriever",
|
||||||
]
|
]
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
"""Core toolkit implementations."""
|
"""Core toolkit implementations."""
|
||||||
|
|
||||||
|
from langchain.tools.arxiv.tool import ArxivQueryRun
|
||||||
from langchain.tools.azure_cognitive_services import (
|
from langchain.tools.azure_cognitive_services import (
|
||||||
AzureCogsFormRecognizerTool,
|
AzureCogsFormRecognizerTool,
|
||||||
AzureCogsImageAnalysisTool,
|
AzureCogsImageAnalysisTool,
|
||||||
@ -11,13 +12,15 @@ from langchain.tools.bing_search.tool import BingSearchResults, BingSearchRun
|
|||||||
from langchain.tools.brave_search.tool import BraveSearch
|
from langchain.tools.brave_search.tool import BraveSearch
|
||||||
from langchain.tools.convert_to_openai import format_tool_to_openai_function
|
from langchain.tools.convert_to_openai import format_tool_to_openai_function
|
||||||
from langchain.tools.ddg_search.tool import DuckDuckGoSearchResults, DuckDuckGoSearchRun
|
from langchain.tools.ddg_search.tool import DuckDuckGoSearchResults, DuckDuckGoSearchRun
|
||||||
from langchain.tools.file_management.copy import CopyFileTool
|
from langchain.tools.file_management import (
|
||||||
from langchain.tools.file_management.delete import DeleteFileTool
|
CopyFileTool,
|
||||||
from langchain.tools.file_management.file_search import FileSearchTool
|
DeleteFileTool,
|
||||||
from langchain.tools.file_management.list_dir import ListDirectoryTool
|
FileSearchTool,
|
||||||
from langchain.tools.file_management.move import MoveFileTool
|
ListDirectoryTool,
|
||||||
from langchain.tools.file_management.read import ReadFileTool
|
MoveFileTool,
|
||||||
from langchain.tools.file_management.write import WriteFileTool
|
ReadFileTool,
|
||||||
|
WriteFileTool,
|
||||||
|
)
|
||||||
from langchain.tools.gmail import (
|
from langchain.tools.gmail import (
|
||||||
GmailCreateDraft,
|
GmailCreateDraft,
|
||||||
GmailGetMessage,
|
GmailGetMessage,
|
||||||
@ -28,8 +31,12 @@ from langchain.tools.gmail import (
|
|||||||
from langchain.tools.google_places.tool import GooglePlacesTool
|
from langchain.tools.google_places.tool import GooglePlacesTool
|
||||||
from langchain.tools.google_search.tool import GoogleSearchResults, GoogleSearchRun
|
from langchain.tools.google_search.tool import GoogleSearchResults, GoogleSearchRun
|
||||||
from langchain.tools.google_serper.tool import GoogleSerperResults, GoogleSerperRun
|
from langchain.tools.google_serper.tool import GoogleSerperResults, GoogleSerperRun
|
||||||
|
from langchain.tools.graphql.tool import BaseGraphQLTool
|
||||||
from langchain.tools.human.tool import HumanInputRun
|
from langchain.tools.human.tool import HumanInputRun
|
||||||
from langchain.tools.ifttt import IFTTTWebhook
|
from langchain.tools.ifttt import IFTTTWebhook
|
||||||
|
from langchain.tools.interaction.tool import StdInInquireTool
|
||||||
|
from langchain.tools.jira.tool import JiraAction
|
||||||
|
from langchain.tools.json.tool import JsonGetValueTool, JsonListKeysTool
|
||||||
from langchain.tools.metaphor_search import MetaphorSearchResults
|
from langchain.tools.metaphor_search import MetaphorSearchResults
|
||||||
from langchain.tools.openapi.utils.api_models import APIOperation
|
from langchain.tools.openapi.utils.api_models import APIOperation
|
||||||
from langchain.tools.openapi.utils.openapi_utils import OpenAPISpec
|
from langchain.tools.openapi.utils.openapi_utils import OpenAPISpec
|
||||||
@ -50,8 +57,33 @@ from langchain.tools.powerbi.tool import (
|
|||||||
QueryPowerBITool,
|
QueryPowerBITool,
|
||||||
)
|
)
|
||||||
from langchain.tools.pubmed.tool import PubmedQueryRun
|
from langchain.tools.pubmed.tool import PubmedQueryRun
|
||||||
|
from langchain.tools.python.tool import PythonAstREPLTool, PythonREPLTool
|
||||||
|
from langchain.tools.requests.tool import (
|
||||||
|
BaseRequestsTool,
|
||||||
|
RequestsDeleteTool,
|
||||||
|
RequestsGetTool,
|
||||||
|
RequestsPatchTool,
|
||||||
|
RequestsPostTool,
|
||||||
|
RequestsPutTool,
|
||||||
|
)
|
||||||
from langchain.tools.scenexplain.tool import SceneXplainTool
|
from langchain.tools.scenexplain.tool import SceneXplainTool
|
||||||
|
from langchain.tools.searx_search.tool import SearxSearchResults, SearxSearchRun
|
||||||
from langchain.tools.shell.tool import ShellTool
|
from langchain.tools.shell.tool import ShellTool
|
||||||
|
from langchain.tools.sleep.tool import SleepTool
|
||||||
|
from langchain.tools.spark_sql.tool import (
|
||||||
|
BaseSparkSQLTool,
|
||||||
|
InfoSparkSQLTool,
|
||||||
|
ListSparkSQLTool,
|
||||||
|
QueryCheckerTool,
|
||||||
|
QuerySparkSQLTool,
|
||||||
|
)
|
||||||
|
from langchain.tools.sql_database.tool import (
|
||||||
|
BaseSQLDatabaseTool,
|
||||||
|
InfoSQLDatabaseTool,
|
||||||
|
ListSQLDatabaseTool,
|
||||||
|
QuerySQLCheckerTool,
|
||||||
|
QuerySQLDataBaseTool,
|
||||||
|
)
|
||||||
from langchain.tools.steamship_image_generation import SteamshipImageGenerationTool
|
from langchain.tools.steamship_image_generation import SteamshipImageGenerationTool
|
||||||
from langchain.tools.vectorstore.tool import (
|
from langchain.tools.vectorstore.tool import (
|
||||||
VectorStoreQATool,
|
VectorStoreQATool,
|
||||||
@ -65,15 +97,21 @@ from langchain.tools.zapier.tool import ZapierNLAListActions, ZapierNLARunAction
|
|||||||
__all__ = [
|
__all__ = [
|
||||||
"AIPluginTool",
|
"AIPluginTool",
|
||||||
"APIOperation",
|
"APIOperation",
|
||||||
|
"ArxivQueryRun",
|
||||||
"AzureCogsFormRecognizerTool",
|
"AzureCogsFormRecognizerTool",
|
||||||
"AzureCogsImageAnalysisTool",
|
"AzureCogsImageAnalysisTool",
|
||||||
"AzureCogsSpeech2TextTool",
|
"AzureCogsSpeech2TextTool",
|
||||||
"AzureCogsText2SpeechTool",
|
"AzureCogsText2SpeechTool",
|
||||||
|
"BaseGraphQLTool",
|
||||||
|
"BaseRequestsTool",
|
||||||
|
"BaseSQLDatabaseTool",
|
||||||
|
"BaseSparkSQLTool",
|
||||||
"BaseTool",
|
"BaseTool",
|
||||||
"BaseTool",
|
"BaseTool",
|
||||||
"BaseTool",
|
"BaseTool",
|
||||||
"BingSearchResults",
|
"BingSearchResults",
|
||||||
"BingSearchRun",
|
"BingSearchRun",
|
||||||
|
"BraveSearch",
|
||||||
"ClickTool",
|
"ClickTool",
|
||||||
"CopyFileTool",
|
"CopyFileTool",
|
||||||
"CurrentWebPageTool",
|
"CurrentWebPageTool",
|
||||||
@ -84,7 +122,6 @@ __all__ = [
|
|||||||
"ExtractTextTool",
|
"ExtractTextTool",
|
||||||
"FileSearchTool",
|
"FileSearchTool",
|
||||||
"GetElementsTool",
|
"GetElementsTool",
|
||||||
"SteamshipImageGenerationTool",
|
|
||||||
"GmailCreateDraft",
|
"GmailCreateDraft",
|
||||||
"GmailGetMessage",
|
"GmailGetMessage",
|
||||||
"GmailGetThread",
|
"GmailGetThread",
|
||||||
@ -98,18 +135,42 @@ __all__ = [
|
|||||||
"HumanInputRun",
|
"HumanInputRun",
|
||||||
"IFTTTWebhook",
|
"IFTTTWebhook",
|
||||||
"InfoPowerBITool",
|
"InfoPowerBITool",
|
||||||
|
"InfoSQLDatabaseTool",
|
||||||
|
"InfoSparkSQLTool",
|
||||||
|
"JiraAction",
|
||||||
|
"JsonGetValueTool",
|
||||||
|
"JsonListKeysTool",
|
||||||
"ListDirectoryTool",
|
"ListDirectoryTool",
|
||||||
"ListPowerBITool",
|
"ListPowerBITool",
|
||||||
|
"ListSQLDatabaseTool",
|
||||||
|
"ListSparkSQLTool",
|
||||||
"MetaphorSearchResults",
|
"MetaphorSearchResults",
|
||||||
"MoveFileTool",
|
"MoveFileTool",
|
||||||
"NavigateBackTool",
|
"NavigateBackTool",
|
||||||
"NavigateTool",
|
"NavigateTool",
|
||||||
"OpenAPISpec",
|
"OpenAPISpec",
|
||||||
"OpenWeatherMapQueryRun",
|
"OpenWeatherMapQueryRun",
|
||||||
|
"PubmedQueryRun",
|
||||||
|
"PythonAstREPLTool",
|
||||||
|
"PythonREPLTool",
|
||||||
|
"QueryCheckerTool",
|
||||||
"QueryPowerBITool",
|
"QueryPowerBITool",
|
||||||
|
"QuerySQLCheckerTool",
|
||||||
|
"QuerySQLDataBaseTool",
|
||||||
|
"QuerySparkSQLTool",
|
||||||
"ReadFileTool",
|
"ReadFileTool",
|
||||||
|
"RequestsDeleteTool",
|
||||||
|
"RequestsGetTool",
|
||||||
|
"RequestsPatchTool",
|
||||||
|
"RequestsPostTool",
|
||||||
|
"RequestsPutTool",
|
||||||
"SceneXplainTool",
|
"SceneXplainTool",
|
||||||
|
"SearxSearchResults",
|
||||||
|
"SearxSearchRun",
|
||||||
"ShellTool",
|
"ShellTool",
|
||||||
|
"SleepTool",
|
||||||
|
"StdInInquireTool",
|
||||||
|
"SteamshipImageGenerationTool",
|
||||||
"StructuredTool",
|
"StructuredTool",
|
||||||
"Tool",
|
"Tool",
|
||||||
"VectorStoreQATool",
|
"VectorStoreQATool",
|
||||||
@ -117,11 +178,9 @@ __all__ = [
|
|||||||
"WikipediaQueryRun",
|
"WikipediaQueryRun",
|
||||||
"WolframAlphaQueryRun",
|
"WolframAlphaQueryRun",
|
||||||
"WriteFileTool",
|
"WriteFileTool",
|
||||||
|
"YouTubeSearchTool",
|
||||||
"ZapierNLAListActions",
|
"ZapierNLAListActions",
|
||||||
"ZapierNLARunAction",
|
"ZapierNLARunAction",
|
||||||
"tool",
|
|
||||||
"YouTubeSearchTool",
|
|
||||||
"BraveSearch",
|
|
||||||
"PubmedQueryRun",
|
|
||||||
"format_tool_to_openai_function",
|
"format_tool_to_openai_function",
|
||||||
|
"tool",
|
||||||
]
|
]
|
||||||
|
@ -40,9 +40,9 @@ class SearxSearchRun(BaseTool):
|
|||||||
|
|
||||||
|
|
||||||
class SearxSearchResults(BaseTool):
|
class SearxSearchResults(BaseTool):
|
||||||
"""Tool that has capability to query a Searx instance and get back json."""
|
"""Tool that has the capability to query a Searx instance and get back json."""
|
||||||
|
|
||||||
name = "Searx Search"
|
name = "Searx Search Results"
|
||||||
description = (
|
description = (
|
||||||
"A meta search engine."
|
"A meta search engine."
|
||||||
"Useful for when you need to answer questions about current events."
|
"Useful for when you need to answer questions about current events."
|
||||||
|
@ -33,7 +33,7 @@ class BaseSQLDatabaseTool(BaseModel):
|
|||||||
class QuerySQLDataBaseTool(BaseSQLDatabaseTool, BaseTool):
|
class QuerySQLDataBaseTool(BaseSQLDatabaseTool, BaseTool):
|
||||||
"""Tool for querying a SQL database."""
|
"""Tool for querying a SQL database."""
|
||||||
|
|
||||||
name = "query_sql_db"
|
name = "sql_db_query"
|
||||||
description = """
|
description = """
|
||||||
Input to this tool is a detailed and correct SQL query, output is a result from the database.
|
Input to this tool is a detailed and correct SQL query, output is a result from the database.
|
||||||
If the query is not correct, an error message will be returned.
|
If the query is not correct, an error message will be returned.
|
||||||
@ -59,7 +59,7 @@ class QuerySQLDataBaseTool(BaseSQLDatabaseTool, BaseTool):
|
|||||||
class InfoSQLDatabaseTool(BaseSQLDatabaseTool, BaseTool):
|
class InfoSQLDatabaseTool(BaseSQLDatabaseTool, BaseTool):
|
||||||
"""Tool for getting metadata about a SQL database."""
|
"""Tool for getting metadata about a SQL database."""
|
||||||
|
|
||||||
name = "schema_sql_db"
|
name = "sql_db_schema"
|
||||||
description = """
|
description = """
|
||||||
Input to this tool is a comma-separated list of tables, output is the schema and sample rows for those tables.
|
Input to this tool is a comma-separated list of tables, output is the schema and sample rows for those tables.
|
||||||
|
|
||||||
@ -85,7 +85,7 @@ class InfoSQLDatabaseTool(BaseSQLDatabaseTool, BaseTool):
|
|||||||
class ListSQLDatabaseTool(BaseSQLDatabaseTool, BaseTool):
|
class ListSQLDatabaseTool(BaseSQLDatabaseTool, BaseTool):
|
||||||
"""Tool for getting tables names."""
|
"""Tool for getting tables names."""
|
||||||
|
|
||||||
name = "list_tables_sql_db"
|
name = "sql_db_list_tables"
|
||||||
description = "Input is an empty string, output is a comma separated list of tables in the database."
|
description = "Input is an empty string, output is a comma separated list of tables in the database."
|
||||||
|
|
||||||
def _run(
|
def _run(
|
||||||
@ -104,14 +104,14 @@ class ListSQLDatabaseTool(BaseSQLDatabaseTool, BaseTool):
|
|||||||
raise NotImplementedError("ListTablesSqlDbTool does not support async")
|
raise NotImplementedError("ListTablesSqlDbTool does not support async")
|
||||||
|
|
||||||
|
|
||||||
class QueryCheckerTool(BaseSQLDatabaseTool, BaseTool):
|
class QuerySQLCheckerTool(BaseSQLDatabaseTool, BaseTool):
|
||||||
"""Use an LLM to check if a query is correct.
|
"""Use an LLM to check if a query is correct.
|
||||||
Adapted from https://www.patterns.app/blog/2023/01/18/crunchbot-sql-analyst-gpt/"""
|
Adapted from https://www.patterns.app/blog/2023/01/18/crunchbot-sql-analyst-gpt/"""
|
||||||
|
|
||||||
template: str = QUERY_CHECKER
|
template: str = QUERY_CHECKER
|
||||||
llm: BaseLanguageModel
|
llm: BaseLanguageModel
|
||||||
llm_chain: LLMChain = Field(init=False)
|
llm_chain: LLMChain = Field(init=False)
|
||||||
name = "query_checker_sql_db"
|
name = "sql_db_query_checker"
|
||||||
description = """
|
description = """
|
||||||
Use this tool to double check if your query is correct before executing it.
|
Use this tool to double check if your query is correct before executing it.
|
||||||
Always use this tool before executing a query with query_sql_db!
|
Always use this tool before executing a query with query_sql_db!
|
||||||
|
@ -4,40 +4,51 @@ from langchain.utilities.apify import ApifyWrapper
|
|||||||
from langchain.utilities.arxiv import ArxivAPIWrapper
|
from langchain.utilities.arxiv import ArxivAPIWrapper
|
||||||
from langchain.utilities.awslambda import LambdaWrapper
|
from langchain.utilities.awslambda import LambdaWrapper
|
||||||
from langchain.utilities.bash import BashProcess
|
from langchain.utilities.bash import BashProcess
|
||||||
|
from langchain.utilities.bibtex import BibtexparserWrapper
|
||||||
from langchain.utilities.bing_search import BingSearchAPIWrapper
|
from langchain.utilities.bing_search import BingSearchAPIWrapper
|
||||||
|
from langchain.utilities.brave_search import BraveSearchWrapper
|
||||||
from langchain.utilities.duckduckgo_search import DuckDuckGoSearchAPIWrapper
|
from langchain.utilities.duckduckgo_search import DuckDuckGoSearchAPIWrapper
|
||||||
from langchain.utilities.google_places_api import GooglePlacesAPIWrapper
|
from langchain.utilities.google_places_api import GooglePlacesAPIWrapper
|
||||||
from langchain.utilities.google_search import GoogleSearchAPIWrapper
|
from langchain.utilities.google_search import GoogleSearchAPIWrapper
|
||||||
from langchain.utilities.google_serper import GoogleSerperAPIWrapper
|
from langchain.utilities.google_serper import GoogleSerperAPIWrapper
|
||||||
from langchain.utilities.graphql import GraphQLAPIWrapper
|
from langchain.utilities.graphql import GraphQLAPIWrapper
|
||||||
|
from langchain.utilities.jira import JiraAPIWrapper
|
||||||
|
from langchain.utilities.max_compute import MaxComputeAPIWrapper
|
||||||
from langchain.utilities.metaphor_search import MetaphorSearchAPIWrapper
|
from langchain.utilities.metaphor_search import MetaphorSearchAPIWrapper
|
||||||
from langchain.utilities.openweathermap import OpenWeatherMapAPIWrapper
|
from langchain.utilities.openweathermap import OpenWeatherMapAPIWrapper
|
||||||
from langchain.utilities.powerbi import PowerBIDataset
|
from langchain.utilities.powerbi import PowerBIDataset
|
||||||
from langchain.utilities.pupmed import PubMedAPIWrapper
|
from langchain.utilities.pupmed import PubMedAPIWrapper
|
||||||
from langchain.utilities.python import PythonREPL
|
from langchain.utilities.python import PythonREPL
|
||||||
|
from langchain.utilities.scenexplain import SceneXplainAPIWrapper
|
||||||
from langchain.utilities.searx_search import SearxSearchWrapper
|
from langchain.utilities.searx_search import SearxSearchWrapper
|
||||||
from langchain.utilities.serpapi import SerpAPIWrapper
|
from langchain.utilities.serpapi import SerpAPIWrapper
|
||||||
from langchain.utilities.spark_sql import SparkSQL
|
from langchain.utilities.spark_sql import SparkSQL
|
||||||
from langchain.utilities.twilio import TwilioAPIWrapper
|
from langchain.utilities.twilio import TwilioAPIWrapper
|
||||||
from langchain.utilities.wikipedia import WikipediaAPIWrapper
|
from langchain.utilities.wikipedia import WikipediaAPIWrapper
|
||||||
from langchain.utilities.wolfram_alpha import WolframAlphaAPIWrapper
|
from langchain.utilities.wolfram_alpha import WolframAlphaAPIWrapper
|
||||||
|
from langchain.utilities.zapier import ZapierNLAWrapper
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"ApifyWrapper",
|
"ApifyWrapper",
|
||||||
"ArxivAPIWrapper",
|
"ArxivAPIWrapper",
|
||||||
"PubMedAPIWrapper",
|
|
||||||
"BashProcess",
|
"BashProcess",
|
||||||
|
"BibtexparserWrapper",
|
||||||
"BingSearchAPIWrapper",
|
"BingSearchAPIWrapper",
|
||||||
|
"BraveSearchWrapper",
|
||||||
"DuckDuckGoSearchAPIWrapper",
|
"DuckDuckGoSearchAPIWrapper",
|
||||||
"GooglePlacesAPIWrapper",
|
"GooglePlacesAPIWrapper",
|
||||||
"GoogleSearchAPIWrapper",
|
"GoogleSearchAPIWrapper",
|
||||||
"GoogleSerperAPIWrapper",
|
"GoogleSerperAPIWrapper",
|
||||||
"GraphQLAPIWrapper",
|
"GraphQLAPIWrapper",
|
||||||
|
"JiraAPIWrapper",
|
||||||
"LambdaWrapper",
|
"LambdaWrapper",
|
||||||
|
"MaxComputeAPIWrapper",
|
||||||
"MetaphorSearchAPIWrapper",
|
"MetaphorSearchAPIWrapper",
|
||||||
"OpenWeatherMapAPIWrapper",
|
"OpenWeatherMapAPIWrapper",
|
||||||
"PowerBIDataset",
|
"PowerBIDataset",
|
||||||
|
"PubMedAPIWrapper",
|
||||||
"PythonREPL",
|
"PythonREPL",
|
||||||
|
"SceneXplainAPIWrapper",
|
||||||
"SearxSearchWrapper",
|
"SearxSearchWrapper",
|
||||||
"SerpAPIWrapper",
|
"SerpAPIWrapper",
|
||||||
"SparkSQL",
|
"SparkSQL",
|
||||||
@ -45,4 +56,5 @@ __all__ = [
|
|||||||
"TwilioAPIWrapper",
|
"TwilioAPIWrapper",
|
||||||
"WikipediaAPIWrapper",
|
"WikipediaAPIWrapper",
|
||||||
"WolframAlphaAPIWrapper",
|
"WolframAlphaAPIWrapper",
|
||||||
|
"ZapierNLAWrapper",
|
||||||
]
|
]
|
||||||
|
@ -32,38 +32,38 @@ from langchain.vectorstores.weaviate import Weaviate
|
|||||||
from langchain.vectorstores.zilliz import Zilliz
|
from langchain.vectorstores.zilliz import Zilliz
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
"AnalyticDB",
|
||||||
|
"Annoy",
|
||||||
|
"AtlasDB",
|
||||||
|
"AwaDB",
|
||||||
"AzureSearch",
|
"AzureSearch",
|
||||||
"Redis",
|
"Chroma",
|
||||||
|
"Clickhouse",
|
||||||
|
"ClickhouseSettings",
|
||||||
|
"DeepLake",
|
||||||
|
"DocArrayHnswSearch",
|
||||||
|
"DocArrayInMemorySearch",
|
||||||
"ElasticVectorSearch",
|
"ElasticVectorSearch",
|
||||||
"FAISS",
|
"FAISS",
|
||||||
"VectorStore",
|
"Hologres",
|
||||||
"Pinecone",
|
"LanceDB",
|
||||||
"Weaviate",
|
"MatchingEngine",
|
||||||
"Qdrant",
|
|
||||||
"Milvus",
|
"Milvus",
|
||||||
"Zilliz",
|
|
||||||
"SingleStoreDB",
|
|
||||||
"Chroma",
|
|
||||||
"OpenSearchVectorSearch",
|
|
||||||
"AtlasDB",
|
|
||||||
"DeepLake",
|
|
||||||
"Annoy",
|
|
||||||
"MongoDBAtlasVectorSearch",
|
"MongoDBAtlasVectorSearch",
|
||||||
"MyScale",
|
"MyScale",
|
||||||
"MyScaleSettings",
|
"MyScaleSettings",
|
||||||
|
"OpenSearchVectorSearch",
|
||||||
|
"Pinecone",
|
||||||
|
"Qdrant",
|
||||||
|
"Redis",
|
||||||
"SKLearnVectorStore",
|
"SKLearnVectorStore",
|
||||||
|
"SingleStoreDB",
|
||||||
"SupabaseVectorStore",
|
"SupabaseVectorStore",
|
||||||
"AnalyticDB",
|
|
||||||
"Vectara",
|
|
||||||
"Tair",
|
"Tair",
|
||||||
"LanceDB",
|
|
||||||
"DocArrayHnswSearch",
|
|
||||||
"DocArrayInMemorySearch",
|
|
||||||
"Typesense",
|
|
||||||
"Hologres",
|
|
||||||
"Clickhouse",
|
|
||||||
"ClickhouseSettings",
|
|
||||||
"Tigris",
|
"Tigris",
|
||||||
"MatchingEngine",
|
"Typesense",
|
||||||
"AwaDB",
|
"Vectara",
|
||||||
|
"VectorStore",
|
||||||
|
"Weaviate",
|
||||||
|
"Zilliz",
|
||||||
]
|
]
|
||||||
|
1250
poetry.lock
generated
1250
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -108,6 +108,9 @@ nebula3-python = {version = "^3.4.0", optional = true}
|
|||||||
langchainplus-sdk = ">=0.0.9"
|
langchainplus-sdk = ">=0.0.9"
|
||||||
awadb = {version = "^0.3.3", optional = true}
|
awadb = {version = "^0.3.3", optional = true}
|
||||||
azure-search-documents = {version = "11.4.0a20230509004", source = "azure-sdk-dev", optional = true}
|
azure-search-documents = {version = "11.4.0a20230509004", source = "azure-sdk-dev", optional = true}
|
||||||
|
# now streamlit requires Python >=3.7, !=3.9.7 So, it is commented out.
|
||||||
|
#streamlit = {version = "^1.18.0", optional = true}
|
||||||
|
|
||||||
|
|
||||||
[tool.poetry.group.docs.dependencies]
|
[tool.poetry.group.docs.dependencies]
|
||||||
autodoc_pydantic = "^1.8.0"
|
autodoc_pydantic = "^1.8.0"
|
||||||
@ -137,6 +140,7 @@ freezegun = "^1.2.2"
|
|||||||
responses = "^0.22.0"
|
responses = "^0.22.0"
|
||||||
pytest-asyncio = "^0.20.3"
|
pytest-asyncio = "^0.20.3"
|
||||||
lark = "^1.1.5"
|
lark = "^1.1.5"
|
||||||
|
pandas = "^2.0.0"
|
||||||
pytest-mock = "^3.10.0"
|
pytest-mock = "^3.10.0"
|
||||||
pytest-socket = "^0.6.0"
|
pytest-socket = "^0.6.0"
|
||||||
syrupy = "^4.0.2"
|
syrupy = "^4.0.2"
|
||||||
@ -169,9 +173,7 @@ pinecone-client = "^2.2.1"
|
|||||||
pinecone-text = "^0.4.2"
|
pinecone-text = "^0.4.2"
|
||||||
pymongo = "^4.3.3"
|
pymongo = "^4.3.3"
|
||||||
clickhouse-connect = "^0.5.14"
|
clickhouse-connect = "^0.5.14"
|
||||||
pgvector = "^0.1.6"
|
|
||||||
transformers = "^4.27.4"
|
transformers = "^4.27.4"
|
||||||
pandas = "^2.0.0"
|
|
||||||
deeplake = "^3.2.21"
|
deeplake = "^3.2.21"
|
||||||
weaviate-client = "^3.15.5"
|
weaviate-client = "^3.15.5"
|
||||||
torch = "^1.0.0"
|
torch = "^1.0.0"
|
||||||
@ -309,6 +311,7 @@ extended_testing = [
|
|||||||
"chardet",
|
"chardet",
|
||||||
"jq",
|
"jq",
|
||||||
"pdfminer.six",
|
"pdfminer.six",
|
||||||
|
"pgvector",
|
||||||
"pypdf",
|
"pypdf",
|
||||||
"pymupdf",
|
"pymupdf",
|
||||||
"pypdfium2",
|
"pypdfium2",
|
||||||
@ -325,6 +328,8 @@ extended_testing = [
|
|||||||
"html2text",
|
"html2text",
|
||||||
"py-trello",
|
"py-trello",
|
||||||
"scikit-learn",
|
"scikit-learn",
|
||||||
|
# now streamlit requires Python >=3.7, !=3.9.7 So, it is commented out.
|
||||||
|
# "streamlit",
|
||||||
"pyspark",
|
"pyspark",
|
||||||
"openai"
|
"openai"
|
||||||
]
|
]
|
||||||
|
@ -11,6 +11,7 @@ _EXPECTED = [
|
|||||||
"ConversationalChatAgent",
|
"ConversationalChatAgent",
|
||||||
"LLMSingleActionAgent",
|
"LLMSingleActionAgent",
|
||||||
"MRKLChain",
|
"MRKLChain",
|
||||||
|
"OpenAIFunctionsAgent",
|
||||||
"ReActChain",
|
"ReActChain",
|
||||||
"ReActTextWorldAgent",
|
"ReActTextWorldAgent",
|
||||||
"SelfAskWithSearchChain",
|
"SelfAskWithSearchChain",
|
||||||
|
@ -21,7 +21,13 @@ def non_abstract_subclasses(
|
|||||||
return subclasses
|
return subclasses
|
||||||
|
|
||||||
|
|
||||||
_PARSERS_TO_SKIP = {"FakeOutputParser", "BaseOutputParser"}
|
# parsers defined not in the output_parsers module:
|
||||||
|
_PARSERS_TO_SKIP = {
|
||||||
|
"FakeOutputParser",
|
||||||
|
"BaseOutputParser",
|
||||||
|
"FinishedOutputParser",
|
||||||
|
"RouterOutputParser",
|
||||||
|
}
|
||||||
_NON_ABSTRACT_PARSERS = non_abstract_subclasses(
|
_NON_ABSTRACT_PARSERS = non_abstract_subclasses(
|
||||||
BaseOutputParser, to_skip=_PARSERS_TO_SKIP
|
BaseOutputParser, to_skip=_PARSERS_TO_SKIP
|
||||||
)
|
)
|
||||||
|
@ -61,9 +61,10 @@ def test_test_group_dependencies(poetry_conf: Mapping[str, Any]) -> None:
|
|||||||
test_group_deps = sorted(poetry_conf["group"]["test"]["dependencies"])
|
test_group_deps = sorted(poetry_conf["group"]["test"]["dependencies"])
|
||||||
|
|
||||||
assert test_group_deps == [
|
assert test_group_deps == [
|
||||||
"duckdb-engine", # Should be removed
|
"duckdb-engine",
|
||||||
"freezegun",
|
"freezegun",
|
||||||
"lark", # Should be removed
|
"lark",
|
||||||
|
"pandas",
|
||||||
"pytest",
|
"pytest",
|
||||||
"pytest-asyncio",
|
"pytest-asyncio",
|
||||||
"pytest-cov",
|
"pytest-cov",
|
||||||
|
@ -18,10 +18,9 @@ def _get_tool_classes(skip_tools_without_default_names: bool) -> List[Type[BaseT
|
|||||||
if isinstance(tool_class, type) and issubclass(tool_class, BaseTool):
|
if isinstance(tool_class, type) and issubclass(tool_class, BaseTool):
|
||||||
if tool_class in _EXCLUDE:
|
if tool_class in _EXCLUDE:
|
||||||
continue
|
continue
|
||||||
if (
|
if skip_tools_without_default_names and tool_class.__fields__[
|
||||||
skip_tools_without_default_names
|
"name"
|
||||||
and tool_class.__fields__["name"].default is None
|
].default in [None, ""]:
|
||||||
):
|
|
||||||
continue
|
continue
|
||||||
results.append(tool_class)
|
results.append(tool_class)
|
||||||
return results
|
return results
|
||||||
|
@ -4,15 +4,21 @@ from langchain.tools import __all__ as public_api
|
|||||||
_EXPECTED = [
|
_EXPECTED = [
|
||||||
"AIPluginTool",
|
"AIPluginTool",
|
||||||
"APIOperation",
|
"APIOperation",
|
||||||
|
"ArxivQueryRun",
|
||||||
"AzureCogsFormRecognizerTool",
|
"AzureCogsFormRecognizerTool",
|
||||||
"AzureCogsImageAnalysisTool",
|
"AzureCogsImageAnalysisTool",
|
||||||
"AzureCogsSpeech2TextTool",
|
"AzureCogsSpeech2TextTool",
|
||||||
"AzureCogsText2SpeechTool",
|
"AzureCogsText2SpeechTool",
|
||||||
|
"BaseGraphQLTool",
|
||||||
|
"BaseRequestsTool",
|
||||||
|
"BaseSQLDatabaseTool",
|
||||||
|
"BaseSparkSQLTool",
|
||||||
"BaseTool",
|
"BaseTool",
|
||||||
"BaseTool",
|
"BaseTool",
|
||||||
"BaseTool",
|
"BaseTool",
|
||||||
"BingSearchResults",
|
"BingSearchResults",
|
||||||
"BingSearchRun",
|
"BingSearchRun",
|
||||||
|
"BraveSearch",
|
||||||
"ClickTool",
|
"ClickTool",
|
||||||
"CopyFileTool",
|
"CopyFileTool",
|
||||||
"CurrentWebPageTool",
|
"CurrentWebPageTool",
|
||||||
@ -36,18 +42,41 @@ _EXPECTED = [
|
|||||||
"HumanInputRun",
|
"HumanInputRun",
|
||||||
"IFTTTWebhook",
|
"IFTTTWebhook",
|
||||||
"InfoPowerBITool",
|
"InfoPowerBITool",
|
||||||
|
"InfoSQLDatabaseTool",
|
||||||
|
"InfoSparkSQLTool",
|
||||||
|
"JiraAction",
|
||||||
|
"JsonGetValueTool",
|
||||||
|
"JsonListKeysTool",
|
||||||
"ListDirectoryTool",
|
"ListDirectoryTool",
|
||||||
"ListPowerBITool",
|
"ListPowerBITool",
|
||||||
|
"ListSQLDatabaseTool",
|
||||||
|
"ListSparkSQLTool",
|
||||||
"MetaphorSearchResults",
|
"MetaphorSearchResults",
|
||||||
"MoveFileTool",
|
"MoveFileTool",
|
||||||
"NavigateBackTool",
|
"NavigateBackTool",
|
||||||
"NavigateTool",
|
"NavigateTool",
|
||||||
"OpenAPISpec",
|
"OpenAPISpec",
|
||||||
"OpenWeatherMapQueryRun",
|
"OpenWeatherMapQueryRun",
|
||||||
|
"PubmedQueryRun",
|
||||||
|
"PythonAstREPLTool",
|
||||||
|
"PythonREPLTool",
|
||||||
|
"QueryCheckerTool",
|
||||||
"QueryPowerBITool",
|
"QueryPowerBITool",
|
||||||
|
"QuerySQLCheckerTool",
|
||||||
|
"QuerySQLDataBaseTool",
|
||||||
|
"QuerySparkSQLTool",
|
||||||
"ReadFileTool",
|
"ReadFileTool",
|
||||||
|
"RequestsDeleteTool",
|
||||||
|
"RequestsGetTool",
|
||||||
|
"RequestsPatchTool",
|
||||||
|
"RequestsPostTool",
|
||||||
|
"RequestsPutTool",
|
||||||
"SceneXplainTool",
|
"SceneXplainTool",
|
||||||
|
"SearxSearchResults",
|
||||||
|
"SearxSearchRun",
|
||||||
"ShellTool",
|
"ShellTool",
|
||||||
|
"SleepTool",
|
||||||
|
"StdInInquireTool",
|
||||||
"SteamshipImageGenerationTool",
|
"SteamshipImageGenerationTool",
|
||||||
"StructuredTool",
|
"StructuredTool",
|
||||||
"Tool",
|
"Tool",
|
||||||
@ -56,13 +85,11 @@ _EXPECTED = [
|
|||||||
"WikipediaQueryRun",
|
"WikipediaQueryRun",
|
||||||
"WolframAlphaQueryRun",
|
"WolframAlphaQueryRun",
|
||||||
"WriteFileTool",
|
"WriteFileTool",
|
||||||
|
"YouTubeSearchTool",
|
||||||
"ZapierNLAListActions",
|
"ZapierNLAListActions",
|
||||||
"ZapierNLARunAction",
|
"ZapierNLARunAction",
|
||||||
"tool",
|
|
||||||
"YouTubeSearchTool",
|
|
||||||
"BraveSearch",
|
|
||||||
"PubmedQueryRun",
|
|
||||||
"format_tool_to_openai_function",
|
"format_tool_to_openai_function",
|
||||||
|
"tool",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user