From c95facc2931c50284d0f02e9b56adff0e362f5e9 Mon Sep 17 00:00:00 2001 From: Neli Hateva Date: Mon, 29 Jan 2024 22:25:53 +0200 Subject: [PATCH] langchain[minor], community[minor]: Implement Ontotext GraphDB QA Chain (#16019) - **Description:** Implement Ontotext GraphDB QA Chain - **Issue:** N/A - **Dependencies:** N/A - **Twitter handle:** @OntotextGraphDB --- docs/api_reference/guide_imports.json | 2 +- .../providers/ontotext_graphdb.mdx | 21 + .../graph/graph_ontotext_graphdb_qa.ipynb | 543 ++++++++++++++++++ .../langchain_community/graphs/__init__.py | 2 + .../graphs/ontotext_graphdb_graph.py | 213 +++++++ libs/community/poetry.lock | 27 +- libs/community/pyproject.toml | 6 +- .../Dockerfile | 6 + .../config.ttl | 46 ++ .../docker-compose.yaml | 9 + .../graphdb_create.sh | 33 ++ .../docker-compose-ontotext-graphdb/start.sh | 5 + .../starwars-data.trig | 43 ++ .../graphs/test_ontotext_graphdb_graph.py | 181 ++++++ .../tests/unit_tests/graphs/test_imports.py | 1 + .../graphs/test_ontotext_graphdb_graph.py | 176 ++++++ libs/langchain/langchain/chains/__init__.py | 2 + .../chains/graph_qa/ontotext_graphdb.py | 182 ++++++ .../langchain/chains/graph_qa/prompts.py | 62 ++ libs/langchain/poetry.lock | 33 +- libs/langchain/pyproject.toml | 4 +- .../Dockerfile | 6 + .../config.ttl | 46 ++ .../docker-compose.yaml | 9 + .../graphdb_create.sh | 33 ++ .../docker-compose-ontotext-graphdb/start.sh | 5 + .../starwars-data.trig | 160 ++++++ .../chains/test_ontotext_graphdb_qa.py | 323 +++++++++++ .../tests/unit_tests/chains/test_imports.py | 1 + .../chains/test_ontotext_graphdb_qa.py | 2 + pyproject.toml | 2 +- 31 files changed, 2170 insertions(+), 14 deletions(-) create mode 100644 docs/docs/integrations/providers/ontotext_graphdb.mdx create mode 100644 docs/docs/use_cases/graph/graph_ontotext_graphdb_qa.ipynb create mode 100644 libs/community/langchain_community/graphs/ontotext_graphdb_graph.py create mode 100644 libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/Dockerfile create mode 100644 libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/config.ttl create mode 100644 libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/docker-compose.yaml create mode 100644 libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/graphdb_create.sh create mode 100755 libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/start.sh create mode 100644 libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/starwars-data.trig create mode 100644 libs/community/tests/integration_tests/graphs/test_ontotext_graphdb_graph.py create mode 100644 libs/community/tests/unit_tests/graphs/test_ontotext_graphdb_graph.py create mode 100644 libs/langchain/langchain/chains/graph_qa/ontotext_graphdb.py create mode 100644 libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/Dockerfile create mode 100644 libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/config.ttl create mode 100644 libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/docker-compose.yaml create mode 100644 libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/graphdb_create.sh create mode 100755 libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/start.sh create mode 100644 libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/starwars-data.trig create mode 100644 libs/langchain/tests/integration_tests/chains/test_ontotext_graphdb_qa.py create mode 100644 libs/langchain/tests/unit_tests/chains/test_ontotext_graphdb_qa.py diff --git a/docs/api_reference/guide_imports.json b/docs/api_reference/guide_imports.json index bebd2ce454a..388fddbcac8 100644 --- a/docs/api_reference/guide_imports.json +++ b/docs/api_reference/guide_imports.json @@ -1 +1 @@ -{"SingleFileFacebookMessengerChatLoader": {"Facebook Messenger": "https://python.langchain.com/docs/integrations/chat_loaders/facebook"}, "FolderFacebookMessengerChatLoader": {"Facebook Messenger": "https://python.langchain.com/docs/integrations/chat_loaders/facebook", "Chat loaders": "https://python.langchain.com/docs/integrations/chat_loaders/index"}, "merge_chat_runs": {"Facebook Messenger": "https://python.langchain.com/docs/integrations/chat_loaders/facebook", "Slack": "https://python.langchain.com/docs/integrations/chat_loaders/slack", "WhatsApp": "https://python.langchain.com/docs/integrations/chat_loaders/whatsapp", "iMessage": "https://python.langchain.com/docs/integrations/chat_loaders/imessage", "Telegram": "https://python.langchain.com/docs/integrations/chat_loaders/telegram", "Discord": "https://python.langchain.com/docs/integrations/chat_loaders/discord"}, "map_ai_messages": {"Facebook Messenger": "https://python.langchain.com/docs/integrations/chat_loaders/facebook", "GMail": "https://python.langchain.com/docs/integrations/chat_loaders/gmail", "Slack": "https://python.langchain.com/docs/integrations/chat_loaders/slack", "WhatsApp": "https://python.langchain.com/docs/integrations/chat_loaders/whatsapp", "iMessage": "https://python.langchain.com/docs/integrations/chat_loaders/imessage", "Telegram": "https://python.langchain.com/docs/integrations/chat_loaders/telegram", "Discord": "https://python.langchain.com/docs/integrations/chat_loaders/discord"}, "convert_messages_for_finetuning": {"Facebook Messenger": "https://python.langchain.com/docs/integrations/chat_loaders/facebook", "Chat loaders": "https://python.langchain.com/docs/integrations/chat_loaders/index", "iMessage": "https://python.langchain.com/docs/integrations/chat_loaders/imessage"}, "ChatOpenAI": {"Facebook Messenger": "https://python.langchain.com/docs/integrations/chat_loaders/facebook", "Slack": "https://python.langchain.com/docs/integrations/chat_loaders/slack", "WhatsApp": "https://python.langchain.com/docs/integrations/chat_loaders/whatsapp", "iMessage": "https://python.langchain.com/docs/integrations/chat_loaders/imessage", "Telegram": "https://python.langchain.com/docs/integrations/chat_loaders/telegram", "Discord": "https://python.langchain.com/docs/integrations/chat_loaders/discord", "RePhraseQueryRetriever": "https://python.langchain.com/docs/integrations/retrievers/re_phrase", "Wikipedia": "https://python.langchain.com/docs/integrations/retrievers/wikipedia", "Arxiv": "https://python.langchain.com/docs/integrations/retrievers/arxiv", "ChatGPT Plugins": "https://python.langchain.com/docs/integrations/tools/chatgpt_plugins", "Human as a tool": "https://python.langchain.com/docs/integrations/tools/human_tools", "Yahoo Finance News": "https://python.langchain.com/docs/integrations/tools/yahoo_finance_news", "ArXiv": "https://python.langchain.com/docs/integrations/tools/arxiv", "Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search", "Shell (bash)": "https://python.langchain.com/docs/integrations/tools/bash", "Xata chat memory": "https://python.langchain.com/docs/integrations/memory/xata_chat_message_history", "Dynamodb Chat Message History": "https://python.langchain.com/docs/integrations/memory/dynamodb_chat_message_history", "OpenAI": "https://python.langchain.com/docs/integrations/chat/openai", "LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor", "Context": "https://python.langchain.com/docs/integrations/callbacks/context", "Label Studio": "https://python.langchain.com/docs/integrations/callbacks/labelstudio", "PromptLayer": "https://python.langchain.com/docs/integrations/callbacks/promptlayer", "CnosDB": "https://python.langchain.com/docs/integrations/providers/cnosdb", "Log10": "https://python.langchain.com/docs/integrations/providers/log10", "Flyte": "https://python.langchain.com/docs/integrations/providers/flyte", "Arthur": "https://python.langchain.com/docs/integrations/providers/arthur_tracking", "CSV": "https://python.langchain.com/docs/integrations/toolkits/csv", "Document Comparison": "https://python.langchain.com/docs/integrations/toolkits/document_comparison_toolkit", "Python": "https://python.langchain.com/docs/integrations/toolkits/python", "PowerBI Dataset": "https://python.langchain.com/docs/integrations/toolkits/powerbi", "SQL Database": "https://python.langchain.com/docs/integrations/toolkits/sql_database", "Airbyte Question Answering": "https://python.langchain.com/docs/integrations/toolkits/airbyte_structured_qa", "Github": "https://python.langchain.com/docs/integrations/toolkits/github", "Spark SQL": "https://python.langchain.com/docs/integrations/toolkits/spark_sql", "AINetwork": "https://python.langchain.com/docs/integrations/toolkits/ainetwork", "Pandas Dataframe": "https://python.langchain.com/docs/integrations/toolkits/pandas", "Neo4j Vector Index": "https://python.langchain.com/docs/integrations/vectorstores/neo4jvector", "OpenAI Functions Metadata Tagger": "https://python.langchain.com/docs/integrations/document_transformers/openai_metadata_tagger", "Loading documents from a YouTube url": "https://python.langchain.com/docs/integrations/document_loaders/youtube_audio", "Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma", "Fallbacks": "https://python.langchain.com/docs/guides/fallbacks", "Debugging": "https://python.langchain.com/docs/guides/debugging", "LangSmith Walkthrough": "https://python.langchain.com/docs/guides/langsmith/walkthrough", "Reversible data anonymization with Microsoft Presidio": "https://python.langchain.com/docs/guides/privacy/presidio_data_anonymization/reversible", "Data anonymization with Microsoft Presidio": "https://python.langchain.com/docs/guides/privacy/presidio_data_anonymization/index", "Comparing Chain Outputs": "https://python.langchain.com/docs/guides/evaluation/examples/comparisons", "Agent Trajectory": "https://python.langchain.com/docs/guides/evaluation/trajectory/trajectory_eval", "Custom Trajectory Evaluator": "https://python.langchain.com/docs/guides/evaluation/trajectory/custom", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/tagging", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "Perform context-aware text splitting": "https://python.langchain.com/docs/use_cases/question_answering/how_to/document-context-aware-QA", "Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents", "Multiple Retrieval Sources": "https://python.langchain.com/docs/use_cases/question_answering/how_to/multiple_retrieval", "Cite sources": "https://python.langchain.com/docs/use_cases/question_answering/how_to/qa_citations", "Retrieve as you generate with FLARE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/flare", "Analysis of Twitter the-algorithm source code with LangChain, GPT4 and Activeloop's Deep Lake": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/twitter-the-algorithm-analysis-deeplake", "Use LangChain, GPT and Activeloop's Deep Lake to work with code base": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/code-analysis-deeplake", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "QA using Activeloop's DeepLake": "https://python.langchain.com/docs/use_cases/question_answering/integrations/semantic-search-over-chat", "Neptune Open Cypher QA Chain": "https://python.langchain.com/docs/use_cases/more/graph/neptune_cypher_qa", "NebulaGraphQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_nebula_qa", "Memgraph QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_memgraph_qa", "KuzuQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_kuzu_qa", "HugeGraph QA Chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_hugegraph_qa", "GraphSparqlQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_sparql_qa", "Diffbot Graph Transformer": "https://python.langchain.com/docs/use_cases/more/graph/diffbot_graphtransformer", "ArangoDB QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_arangodb_qa", "Neo4j DB QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_cypher_qa", "FalkorDBQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_falkordb_qa", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "AutoGPT": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/autogpt", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times", "Wikibase Agent": "https://python.langchain.com/docs/use_cases/more/agents/agents/wikibase_agent", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "CAMEL Role-Playing Autonomous Cooperative Agents": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/camel_role_playing", "Multi-Agent Simulated Environment: Petting Zoo": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/petting_zoo", "Multi-agent decentralized speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_bidding", "Multi-agent authoritarian speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_authoritarian", "Generative Agents in LangChain": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/characters", "Two-Player Dungeons & Dragons": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_player_dnd", "Multi-Player Dungeons & Dragons": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multi_player_dnd", "Simulated Environment: Gymnasium": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/gymnasium", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "How to use a SmartLLMChain": "https://python.langchain.com/docs/use_cases/more/self_check/smart_llm", "Vector SQL Retriever with MyScale": "https://python.langchain.com/docs/use_cases/qa_structured/integrations/myscale_vector_sql", "Elasticsearch": "https://python.langchain.com/docs/use_cases/qa_structured/integrations/elasticsearch", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql", "MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector", "MultiQueryRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/MultiQueryRetriever", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research", "Memory in LLMChain": "https://python.langchain.com/docs/modules/memory/adding_memory", "Custom callback handlers": "https://python.langchain.com/docs/modules/callbacks/custom_callbacks", "Async callbacks": "https://python.langchain.com/docs/modules/callbacks/async_callbacks", "Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools", "Tools as OpenAI Functions": "https://python.langchain.com/docs/modules/agents/tools/tools_as_openai_functions", "OpenAI Multi Functions Agent": "https://python.langchain.com/docs/modules/agents/agent_types/openai_multi_functions_agent", "Handle parsing errors": "https://python.langchain.com/docs/modules/agents/how_to/handle_parsing_errors", "Running Agent as an Iterator": "https://python.langchain.com/docs/modules/agents/how_to/agent_iter", "Add Memory to OpenAI Functions Agent": "https://python.langchain.com/docs/modules/agents/how_to/add_memory_openai_functions", "Custom functions with OpenAI Functions Agent": "https://python.langchain.com/docs/modules/agents/how_to/custom-functions-with-openai-functions-agent", "Use ToolKits with OpenAI Functions": "https://python.langchain.com/docs/modules/agents/how_to/use_toolkits_with_openai_functions", "Retry parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/retry", "Pydantic (JSON) parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/pydantic", "Prompt pipelining": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/prompts_pipelining", "Connecting to a Feature Store": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/connecting_to_a_feature_store", "Custom chain": "https://python.langchain.com/docs/modules/chains/how_to/custom_chain", "Using OpenAI functions": "https://python.langchain.com/docs/modules/chains/how_to/openai_functions", "interface.md": "https://python.langchain.com/docs/expression_language/interface", "First we add a step to load memory": "https://python.langchain.com/docs/expression_language/cookbook/retrieval", "sql_db.md": "https://python.langchain.com/docs/expression_language/cookbook/sql_db", "prompt_llm_parser.md": "https://python.langchain.com/docs/expression_language/cookbook/prompt_llm_parser", "Adding memory": "https://python.langchain.com/docs/expression_language/cookbook/memory", "multiple_chains.md": "https://python.langchain.com/docs/expression_language/cookbook/multiple_chains", "Code writing": "https://python.langchain.com/docs/expression_language/cookbook/code_writing", "Using tools": "https://python.langchain.com/docs/expression_language/cookbook/tools", "Configure Runnable traces": "https://python.langchain.com/docs/expression_language/how_to/trace_config"}, "ChatPromptTemplate": {"Facebook Messenger": "https://python.langchain.com/docs/integrations/chat_loaders/facebook", "Chat loaders": "https://python.langchain.com/docs/integrations/chat_loaders/index", "iMessage": "https://python.langchain.com/docs/integrations/chat_loaders/imessage", "Anthropic": "https://python.langchain.com/docs/integrations/chat/anthropic", "\ud83d\ude85 LiteLLM": "https://python.langchain.com/docs/integrations/chat/litellm", "Konko": "https://python.langchain.com/docs/integrations/chat/konko", "OpenAI": "https://python.langchain.com/docs/integrations/chat/openai", "Google Cloud Platform Vertex AI PaLM ": "https://python.langchain.com/docs/integrations/chat/google_vertex_ai_palm", "JinaChat": "https://python.langchain.com/docs/integrations/chat/jinachat", "Context": "https://python.langchain.com/docs/integrations/callbacks/context", "OpenAI Functions Metadata Tagger": "https://python.langchain.com/docs/integrations/document_transformers/openai_metadata_tagger", "Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma", "Fireworks": "https://python.langchain.com/docs/integrations/llms/fireworks", "Fallbacks": "https://python.langchain.com/docs/guides/fallbacks", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/tagging", "Multiple Retrieval Sources": "https://python.langchain.com/docs/use_cases/question_answering/how_to/multiple_retrieval", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "Multi-agent authoritarian speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_authoritarian", "MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector", "Memory in LLMChain": "https://python.langchain.com/docs/modules/memory/adding_memory", "Retry parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/retry", "Pydantic (JSON) parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/pydantic", "Few-shot examples for chat models": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/few_shot_examples_chat", "Prompt pipelining": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/prompts_pipelining", "Using OpenAI functions": "https://python.langchain.com/docs/modules/chains/how_to/openai_functions", "interface.md": "https://python.langchain.com/docs/expression_language/interface", "First we add a step to load memory": "https://python.langchain.com/docs/expression_language/cookbook/retrieval", "sql_db.md": "https://python.langchain.com/docs/expression_language/cookbook/sql_db", "prompt_llm_parser.md": "https://python.langchain.com/docs/expression_language/cookbook/prompt_llm_parser", "Adding memory": "https://python.langchain.com/docs/expression_language/cookbook/memory", "multiple_chains.md": "https://python.langchain.com/docs/expression_language/cookbook/multiple_chains", "Code writing": "https://python.langchain.com/docs/expression_language/cookbook/code_writing", "Using tools": "https://python.langchain.com/docs/expression_language/cookbook/tools", "Adding moderation": "https://python.langchain.com/docs/expression_language/cookbook/moderation"}, "StrOutputParser": {"Facebook Messenger": "https://python.langchain.com/docs/integrations/chat_loaders/facebook", "Chat loaders": "https://python.langchain.com/docs/integrations/chat_loaders/index", "iMessage": "https://python.langchain.com/docs/integrations/chat_loaders/imessage", "OpaquePrompts": "https://python.langchain.com/docs/integrations/llms/opaqueprompts", "Fallbacks": "https://python.langchain.com/docs/guides/fallbacks", "MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector", "First we add a step to load memory": "https://python.langchain.com/docs/expression_language/cookbook/retrieval", "sql_db.md": "https://python.langchain.com/docs/expression_language/cookbook/sql_db", "prompt_llm_parser.md": "https://python.langchain.com/docs/expression_language/cookbook/prompt_llm_parser", "multiple_chains.md": "https://python.langchain.com/docs/expression_language/cookbook/multiple_chains", "Code writing": "https://python.langchain.com/docs/expression_language/cookbook/code_writing", "Using tools": "https://python.langchain.com/docs/expression_language/cookbook/tools", "Configure Runnable traces": "https://python.langchain.com/docs/expression_language/how_to/trace_config"}, "AIMessage": {"Twitter (via Apify)": "https://python.langchain.com/docs/integrations/chat_loaders/twitter", "Zep": "https://python.langchain.com/docs/integrations/retrievers/zep_memorystore", "Zep Memory": "https://python.langchain.com/docs/integrations/memory/zep_memory", "SQL Chat Message History": "https://python.langchain.com/docs/integrations/memory/sql_chat_message_history", "Anthropic": "https://python.langchain.com/docs/integrations/chat/anthropic", "\ud83d\ude85 LiteLLM": "https://python.langchain.com/docs/integrations/chat/litellm", "Konko": "https://python.langchain.com/docs/integrations/chat/konko", "OpenAI": "https://python.langchain.com/docs/integrations/chat/openai", "JinaChat": "https://python.langchain.com/docs/integrations/chat/jinachat", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "CAMEL Role-Playing Autonomous Cooperative Agents": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/camel_role_playing", "Multi-agent decentralized speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_bidding", "Multi-agent authoritarian speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_authoritarian", "Multi-Player Dungeons & Dragons": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multi_player_dnd", "Simulated Environment: Gymnasium": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/gymnasium", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Prompt pipelining": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/prompts_pipelining"}, "convert_message_to_dict": {"Twitter (via Apify)": "https://python.langchain.com/docs/integrations/chat_loaders/twitter"}, "GMailLoader": {"GMail": "https://python.langchain.com/docs/integrations/chat_loaders/gmail"}, "SlackChatLoader": {"Slack": "https://python.langchain.com/docs/integrations/chat_loaders/slack"}, "ChatSession": {"Slack": "https://python.langchain.com/docs/integrations/chat_loaders/slack", "WhatsApp": "https://python.langchain.com/docs/integrations/chat_loaders/whatsapp", "iMessage": "https://python.langchain.com/docs/integrations/chat_loaders/imessage", "Telegram": "https://python.langchain.com/docs/integrations/chat_loaders/telegram", "Discord": "https://python.langchain.com/docs/integrations/chat_loaders/discord"}, "WhatsAppChatLoader": {"WhatsApp": "https://python.langchain.com/docs/integrations/providers/whatsapp", "WhatsApp Chat": "https://python.langchain.com/docs/integrations/document_loaders/whatsapp_chat"}, "IMessageChatLoader": {"iMessage": "https://python.langchain.com/docs/integrations/chat_loaders/imessage"}, "TelegramChatLoader": {"Telegram": "https://python.langchain.com/docs/integrations/chat_loaders/telegram"}, "base": {"Discord": "https://python.langchain.com/docs/integrations/chat_loaders/discord"}, "HuggingFaceBgeEmbeddings": {"BGE on Hugging Face": "https://python.langchain.com/docs/integrations/text_embedding/bge_huggingface"}, "XinferenceEmbeddings": {"Xorbits inference (Xinference)": "https://python.langchain.com/docs/integrations/text_embedding/xinference"}, "DeepInfraEmbeddings": {"DeepInfra": "https://python.langchain.com/docs/integrations/text_embedding/deepinfra"}, "HuggingFaceEmbeddings": {"Hugging Face": "https://python.langchain.com/docs/integrations/providers/huggingface", "Sentence Transformers": "https://python.langchain.com/docs/integrations/text_embedding/sentence_transformers", "LOTR (Merger Retriever)": "https://python.langchain.com/docs/integrations/retrievers/merger_retriever", "ScaNN": "https://python.langchain.com/docs/integrations/vectorstores/scann", "Annoy": "https://python.langchain.com/docs/integrations/vectorstores/annoy", "your local model path": "https://python.langchain.com/docs/integrations/vectorstores/vearch", "Pairwise Embedding Distance ": "https://python.langchain.com/docs/guides/evaluation/comparison/pairwise_embedding_distance", "Embedding Distance": "https://python.langchain.com/docs/guides/evaluation/string/embedding_distance", "Lost in the middle: The problem with long contexts": "https://python.langchain.com/docs/modules/data_connection/document_transformers/post_retrieval/long_context_reorder"}, "HuggingFaceInferenceAPIEmbeddings": {"Hugging Face": "https://python.langchain.com/docs/integrations/text_embedding/huggingfacehub"}, "GPT4AllEmbeddings": {"GPT4All": "https://python.langchain.com/docs/integrations/text_embedding/gpt4all", "Ollama": "https://python.langchain.com/docs/integrations/llms/ollama", "Use local LLMs": "https://python.langchain.com/docs/use_cases/question_answering/how_to/local_retrieval_qa", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research"}, "MosaicMLInstructorEmbeddings": {"MosaicML": "https://python.langchain.com/docs/integrations/text_embedding/mosaicml"}, "OpenAIEmbeddings": {"OpenAI": "https://python.langchain.com/docs/integrations/providers/openai", "AzureOpenAI": "https://python.langchain.com/docs/integrations/text_embedding/azureopenai", "RePhraseQueryRetriever": "https://python.langchain.com/docs/integrations/retrievers/re_phrase", "Cohere Reranker": "https://python.langchain.com/docs/integrations/retrievers/cohere-reranker", "kNN": "https://python.langchain.com/docs/integrations/retrievers/knn", "DocArray Retriever": "https://python.langchain.com/docs/integrations/retrievers/docarray_retriever", "SVM": "https://python.langchain.com/docs/integrations/retrievers/svm", "Pinecone Hybrid Search": "https://python.langchain.com/docs/integrations/retrievers/pinecone_hybrid_search", "LOTR (Merger Retriever)": "https://python.langchain.com/docs/integrations/retrievers/merger_retriever", "Xata chat memory": "https://python.langchain.com/docs/integrations/memory/xata_chat_message_history", "Confident": "https://python.langchain.com/docs/integrations/callbacks/confident", "Azure OpenAI": "https://python.langchain.com/docs/integrations/providers/azure_openai", "Document Comparison": "https://python.langchain.com/docs/integrations/toolkits/document_comparison_toolkit", "Vectorstore": "https://python.langchain.com/docs/integrations/toolkits/vectorstore", "LanceDB": "https://python.langchain.com/docs/integrations/vectorstores/lancedb", "Weaviate": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/weaviate_self_query", "Xata": "https://python.langchain.com/docs/integrations/vectorstores/xata", "Redis": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/redis_self_query", "PGVector": "https://python.langchain.com/docs/integrations/vectorstores/pgvector", "Rockset": "https://python.langchain.com/docs/integrations/vectorstores/rockset", "DingoDB": "https://python.langchain.com/docs/integrations/vectorstores/dingo", "Zilliz": "https://python.langchain.com/docs/integrations/vectorstores/zilliz", "SingleStoreDB": "https://python.langchain.com/docs/integrations/vectorstores/singlestoredb", "Typesense": "https://python.langchain.com/docs/integrations/vectorstores/typesense", "Atlas": "https://python.langchain.com/docs/integrations/vectorstores/atlas", "Activeloop Deep Lake": "https://python.langchain.com/docs/integrations/vectorstores/activeloop_deeplake", "Neo4j Vector Index": "https://python.langchain.com/docs/integrations/vectorstores/neo4jvector", "Chroma": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/chroma_self_query", "Alibaba Cloud OpenSearch": "https://python.langchain.com/docs/integrations/vectorstores/alibabacloud_opensearch", "Baidu Cloud VectorSearch": "https://python.langchain.com/docs/integrations/vectorstores/baiducloud_vector_search", "StarRocks": "https://python.langchain.com/docs/integrations/vectorstores/starrocks", "scikit-learn": "https://python.langchain.com/docs/integrations/vectorstores/sklearn", "DocArray HnswSearch": "https://python.langchain.com/docs/integrations/vectorstores/docarray_hnsw", "MyScale": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/myscale_self_query", "ClickHouse": "https://python.langchain.com/docs/integrations/vectorstores/clickhouse", "Qdrant": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/qdrant_self_query", "Tigris": "https://python.langchain.com/docs/integrations/vectorstores/tigris", "Supabase (Postgres)": "https://python.langchain.com/docs/integrations/vectorstores/supabase", "OpenSearch": "https://python.langchain.com/docs/integrations/vectorstores/opensearch", "Pinecone": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/pinecone", "Azure Cognitive Search": "https://python.langchain.com/docs/integrations/vectorstores/azuresearch", "Cassandra": "https://python.langchain.com/docs/integrations/vectorstores/cassandra", "USearch": "https://python.langchain.com/docs/integrations/vectorstores/usearch", "Milvus": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/milvus_self_query", "Elasticsearch": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/elasticsearch_self_query", "DocArray InMemorySearch": "https://python.langchain.com/docs/integrations/vectorstores/docarray_in_memory", "Postgres Embedding": "https://python.langchain.com/docs/integrations/vectorstores/pgembedding", "Faiss": "https://python.langchain.com/docs/integrations/vectorstores/faiss", "Epsilla": "https://python.langchain.com/docs/integrations/vectorstores/epsilla", "AnalyticDB": "https://python.langchain.com/docs/integrations/vectorstores/analyticdb", "Hologres": "https://python.langchain.com/docs/integrations/vectorstores/hologres", "MongoDB Atlas": "https://python.langchain.com/docs/integrations/vectorstores/mongodb_atlas", "Meilisearch": "https://python.langchain.com/docs/integrations/vectorstores/meilisearch", "Loading documents from a YouTube url": "https://python.langchain.com/docs/integrations/document_loaders/youtube_audio", "Psychic": "https://python.langchain.com/docs/integrations/document_loaders/psychic", "Docugami": "https://python.langchain.com/docs/integrations/document_loaders/docugami", "LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "Perform context-aware text splitting": "https://python.langchain.com/docs/use_cases/question_answering/how_to/document-context-aware-QA", "Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents", "Retrieve from vector stores directly": "https://python.langchain.com/docs/use_cases/question_answering/how_to/vector_db_text_generation", "Retrieve as you generate with FLARE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/flare", "Improve document indexing with HyDE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/hyde", "Analysis of Twitter the-algorithm source code with LangChain, GPT4 and Activeloop's Deep Lake": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/twitter-the-algorithm-analysis-deeplake", "Use LangChain, GPT and Activeloop's Deep Lake to work with code base": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/code-analysis-deeplake", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "QA using Activeloop's DeepLake": "https://python.langchain.com/docs/use_cases/question_answering/integrations/semantic-search-over-chat", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "AutoGPT": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/autogpt", "BabyAGI User Guide": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi", "BabyAGI with Tools": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi_with_agent", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times", "Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval", "Generative Agents in LangChain": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/characters", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql", "Indexing": "https://python.langchain.com/docs/modules/data_connection/indexing", "Caching": "https://python.langchain.com/docs/modules/data_connection/text_embedding/caching_embeddings", "MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector", "MultiQueryRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/MultiQueryRetriever", "Parent Document Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/parent_document_retriever", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research", "Supabase": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/supabase_self_query", "Deep Lake": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/activeloop_deeplake_self_query", "Memory in the Multi-Input Chain": "https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore", "Custom agent with tool retrieval": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent_with_tool_retrieval", "Select by maximal marginal relevance (MMR)": "https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/mmr", "Few-shot examples for chat models": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/few_shot_examples_chat", "Loading from LangChainHub": "https://python.langchain.com/docs/modules/chains/how_to/from_hub", "First we add a step to load memory": "https://python.langchain.com/docs/expression_language/cookbook/retrieval"}, "VertexAIEmbeddings": {"Google Vertex AI PaLM ": "https://python.langchain.com/docs/integrations/text_embedding/google_vertex_ai_palm"}, "BedrockEmbeddings": {"Bedrock": "https://python.langchain.com/docs/integrations/providers/bedrock"}, "LlamaCppEmbeddings": {"Llama-cpp": "https://python.langchain.com/docs/integrations/text_embedding/llamacpp", "Llama.cpp": "https://python.langchain.com/docs/integrations/providers/llamacpp"}, "NLPCloudEmbeddings": {"NLP Cloud": "https://python.langchain.com/docs/integrations/text_embedding/nlp_cloud", "NLPCloud": "https://python.langchain.com/docs/integrations/providers/nlpcloud"}, "SpacyEmbeddings": {"SpaCy": "https://python.langchain.com/docs/integrations/text_embedding/spacy_embedding", "spaCy": "https://python.langchain.com/docs/integrations/providers/spacy"}, "HuggingFaceInstructEmbeddings": {"InstructEmbeddings": "https://python.langchain.com/docs/integrations/text_embedding/instruct_embeddings", "Vector SQL Retriever with MyScale": "https://python.langchain.com/docs/use_cases/qa_structured/integrations/myscale_vector_sql"}, "QianfanEmbeddingsEndpoint": {"Baidu Qianfan": "https://python.langchain.com/docs/integrations/text_embedding/baidu_qianfan_endpoint"}, "CohereEmbeddings": {"Cohere": "https://python.langchain.com/docs/integrations/providers/cohere", "Memory in the Multi-Input Chain": "https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs", "Router": "https://python.langchain.com/docs/modules/chains/foundational/router"}, "EdenAiEmbeddings": {"EDEN AI": "https://python.langchain.com/docs/integrations/text_embedding/edenai"}, "SentenceTransformerEmbeddings": {"Sentence Transformers": "https://python.langchain.com/docs/integrations/text_embedding/sentence_transformers", "sqlite-vss": "https://python.langchain.com/docs/integrations/vectorstores/sqlitevss", "Chroma": "https://python.langchain.com/docs/integrations/vectorstores/chroma"}, "ClarifaiEmbeddings": {"Clarifai": "https://python.langchain.com/docs/integrations/providers/clarifai"}, "AwaEmbeddings": {"AwaDB": "https://python.langchain.com/docs/integrations/providers/awadb"}, "MiniMaxEmbeddings": {"MiniMax": "https://python.langchain.com/docs/integrations/text_embedding/minimax", "Minimax": "https://python.langchain.com/docs/integrations/providers/minimax"}, "FakeEmbeddings": {"Fake Embeddings": "https://python.langchain.com/docs/integrations/text_embedding/fake", "DocArray Retriever": "https://python.langchain.com/docs/integrations/retrievers/docarray_retriever", "Vectara": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/vectara_self_query", "Tair": "https://python.langchain.com/docs/integrations/vectorstores/tair", "Tencent Cloud VectorDB": "https://python.langchain.com/docs/integrations/vectorstores/tencentvectordb"}, "ElasticsearchEmbeddings": {"Elasticsearch": "https://python.langchain.com/docs/integrations/text_embedding/elasticsearch"}, "SelfHostedEmbeddings": {"Self Hosted": "https://python.langchain.com/docs/integrations/text_embedding/self-hosted"}, "SelfHostedHuggingFaceEmbeddings": {"Self Hosted": "https://python.langchain.com/docs/integrations/text_embedding/self-hosted"}, "SelfHostedHuggingFaceInstructEmbeddings": {"Self Hosted": "https://python.langchain.com/docs/integrations/text_embedding/self-hosted"}, "EmbaasEmbeddings": {"Embaas": "https://python.langchain.com/docs/integrations/text_embedding/embaas"}, "JinaEmbeddings": {"Jina": "https://python.langchain.com/docs/integrations/providers/jina"}, "AlephAlphaAsymmetricSemanticEmbedding": {"Aleph Alpha": "https://python.langchain.com/docs/integrations/providers/aleph_alpha"}, "AlephAlphaSymmetricSemanticEmbedding": {"Aleph Alpha": "https://python.langchain.com/docs/integrations/providers/aleph_alpha"}, "DashScopeEmbeddings": {"DashScope": "https://python.langchain.com/docs/integrations/text_embedding/dashscope", "DashVector": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/dashvector"}, "TensorflowHubEmbeddings": {"TensorflowHub": "https://python.langchain.com/docs/integrations/text_embedding/tensorflowhub", "ScaNN": "https://python.langchain.com/docs/integrations/vectorstores/scann"}, "ModelScopeEmbeddings": {"ModelScope": "https://python.langchain.com/docs/integrations/providers/modelscope"}, "SagemakerEndpointEmbeddings": {"SageMaker": "https://python.langchain.com/docs/integrations/text_embedding/sagemaker-endpoint", "SageMaker Endpoint": "https://python.langchain.com/docs/integrations/providers/sagemaker_endpoint"}, "EmbeddingsContentHandler": {"SageMaker": "https://python.langchain.com/docs/integrations/text_embedding/sagemaker-endpoint"}, "LocalAIEmbeddings": {"LocalAI": "https://python.langchain.com/docs/integrations/text_embedding/localai"}, "WebBaseLoader": {"RePhraseQueryRetriever": "https://python.langchain.com/docs/integrations/retrievers/re_phrase", "Ollama": "https://python.langchain.com/docs/integrations/llms/ollama", "Vectorstore": "https://python.langchain.com/docs/integrations/toolkits/vectorstore", "Zep": "https://python.langchain.com/docs/integrations/vectorstores/zep", "WebBaseLoader": "https://python.langchain.com/docs/integrations/document_loaders/web_base", "MergeDocLoader": "https://python.langchain.com/docs/integrations/document_loaders/merge_doc_loader", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/summarization", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "Use local LLMs": "https://python.langchain.com/docs/use_cases/question_answering/how_to/local_retrieval_qa", "MultiQueryRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/MultiQueryRetriever", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore"}, "RecursiveCharacterTextSplitter": {"RePhraseQueryRetriever": "https://python.langchain.com/docs/integrations/retrievers/re_phrase", "Cohere Reranker": "https://python.langchain.com/docs/integrations/retrievers/cohere-reranker", "Ollama": "https://python.langchain.com/docs/integrations/llms/ollama", "Zep": "https://python.langchain.com/docs/integrations/vectorstores/zep", "your local model path": "https://python.langchain.com/docs/integrations/vectorstores/vearch", "Loading documents from a YouTube url": "https://python.langchain.com/docs/integrations/document_loaders/youtube_audio", "Source Code": "https://python.langchain.com/docs/integrations/document_loaders/source_code", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "Perform context-aware text splitting": "https://python.langchain.com/docs/use_cases/question_answering/how_to/document-context-aware-QA", "Use local LLMs": "https://python.langchain.com/docs/use_cases/question_answering/how_to/local_retrieval_qa", "QA using Activeloop's DeepLake": "https://python.langchain.com/docs/use_cases/question_answering/integrations/semantic-search-over-chat", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times", "MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector", "MultiQueryRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/MultiQueryRetriever", "Parent Document Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/parent_document_retriever", "MarkdownHeaderTextSplitter": "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/markdown_header_metadata"}, "Chroma": {"RePhraseQueryRetriever": "https://python.langchain.com/docs/integrations/retrievers/re_phrase", "LOTR (Merger Retriever)": "https://python.langchain.com/docs/integrations/retrievers/merger_retriever", "Ollama": "https://python.langchain.com/docs/integrations/llms/ollama", "Confident": "https://python.langchain.com/docs/integrations/callbacks/confident", "Chroma": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/chroma_self_query", "Vectorstore": "https://python.langchain.com/docs/integrations/toolkits/vectorstore", "StarRocks": "https://python.langchain.com/docs/integrations/vectorstores/starrocks", "Psychic": "https://python.langchain.com/docs/integrations/document_loaders/psychic", "Docugami": "https://python.langchain.com/docs/integrations/document_loaders/docugami", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "Perform context-aware text splitting": "https://python.langchain.com/docs/use_cases/question_answering/how_to/document-context-aware-QA", "Use local LLMs": "https://python.langchain.com/docs/use_cases/question_answering/how_to/local_retrieval_qa", "Retrieve from vector stores directly": "https://python.langchain.com/docs/use_cases/question_answering/how_to/vector_db_text_generation", "Improve document indexing with HyDE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/hyde", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector", "MultiQueryRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/MultiQueryRetriever", "Parent Document Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/parent_document_retriever", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research", "Lost in the middle: The problem with long contexts": "https://python.langchain.com/docs/modules/data_connection/document_transformers/post_retrieval/long_context_reorder", "Memory in the Multi-Input Chain": "https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore", "Few-shot examples for chat models": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/few_shot_examples_chat", "Router": "https://python.langchain.com/docs/modules/chains/foundational/router", "Loading from LangChainHub": "https://python.langchain.com/docs/modules/chains/how_to/from_hub"}, "RePhraseQueryRetriever": {"RePhraseQueryRetriever": "https://python.langchain.com/docs/integrations/retrievers/re_phrase"}, "PromptTemplate": {"RePhraseQueryRetriever": "https://python.langchain.com/docs/integrations/retrievers/re_phrase", "Zapier Natural Language Actions": "https://python.langchain.com/docs/integrations/tools/zapier", "Dall-E Image Generator": "https://python.langchain.com/docs/integrations/tools/dalle_image_generator", "Streamlit Chat Message History": "https://python.langchain.com/docs/integrations/memory/streamlit_chat_message_history", "Context": "https://python.langchain.com/docs/integrations/callbacks/context", "Argilla": "https://python.langchain.com/docs/integrations/callbacks/argilla", "Comet": "https://python.langchain.com/docs/integrations/providers/comet_tracking", "Aim": "https://python.langchain.com/docs/integrations/providers/aim_tracking", "Weights & Biases": "https://python.langchain.com/docs/integrations/providers/wandb_tracking", "SageMaker Tracking": "https://python.langchain.com/docs/integrations/providers/sagemaker_tracking", "Rebuff": "https://python.langchain.com/docs/integrations/providers/rebuff", "MLflow": "https://python.langchain.com/docs/integrations/providers/mlflow_tracking", "Flyte": "https://python.langchain.com/docs/integrations/providers/flyte", "Vectara Text Generation": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_text_generation", "Natural Language APIs": "https://python.langchain.com/docs/integrations/toolkits/openapi_nla", "your local model path": "https://python.langchain.com/docs/integrations/vectorstores/vearch", "Google Drive": "https://python.langchain.com/docs/integrations/document_loaders/google_drive", "Google Vertex AI PaLM ": "https://python.langchain.com/docs/integrations/llms/google_vertex_ai_palm", "Predibase": "https://python.langchain.com/docs/integrations/llms/predibase", "Hugging Face Local Pipelines": "https://python.langchain.com/docs/integrations/llms/huggingface_pipelines", "Eden AI": "https://python.langchain.com/docs/integrations/llms/edenai", "Fallbacks": "https://python.langchain.com/docs/guides/fallbacks", "Reversible data anonymization with Microsoft Presidio": "https://python.langchain.com/docs/guides/privacy/presidio_data_anonymization/reversible", "Data anonymization with Microsoft Presidio": "https://python.langchain.com/docs/guides/privacy/presidio_data_anonymization/index", "Removing logical fallacies from model output": "https://python.langchain.com/docs/guides/safety/logical_fallacy_chain", "Amazon Comprehend Moderation Chain": "https://python.langchain.com/docs/guides/safety/amazon_comprehend_chain", "Pairwise String Comparison": "https://python.langchain.com/docs/guides/evaluation/comparison/pairwise_string", "Criteria Evaluation": "https://python.langchain.com/docs/guides/evaluation/string/criteria_eval_chain", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/apis", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "Retrieve from vector stores directly": "https://python.langchain.com/docs/use_cases/question_answering/how_to/vector_db_text_generation", "Improve document indexing with HyDE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/hyde", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "Neo4j DB QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_cypher_qa", "Multi-agent authoritarian speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_authoritarian", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Bash chain": "https://python.langchain.com/docs/use_cases/more/code_writing/llm_bash", "How to use a SmartLLMChain": "https://python.langchain.com/docs/use_cases/more/self_check/smart_llm", "Elasticsearch": "https://python.langchain.com/docs/use_cases/qa_structured/integrations/elasticsearch", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql", "MultiQueryRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/MultiQueryRetriever", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research", "Lost in the middle: The problem with long contexts": "https://python.langchain.com/docs/modules/data_connection/document_transformers/post_retrieval/long_context_reorder", "Custom Memory": "https://python.langchain.com/docs/modules/memory/custom_memory", "Memory in the Multi-Input Chain": "https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs", "Memory in LLMChain": "https://python.langchain.com/docs/modules/memory/adding_memory", "Multiple Memory classes": "https://python.langchain.com/docs/modules/memory/multiple_memory", "Customizing Conversational Memory": "https://python.langchain.com/docs/modules/memory/conversational_customization", "Conversation Knowledge Graph": "https://python.langchain.com/docs/modules/memory/types/kg", "Logging to file": "https://python.langchain.com/docs/modules/callbacks/filecallbackhandler", "Retry parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/retry", "Datetime parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/datetime", "Pydantic (JSON) parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/pydantic", "Select by maximal marginal relevance (MMR)": "https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/mmr", "Select by n-gram overlap": "https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/ngram_overlap", "Prompt pipelining": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/prompts_pipelining", "Template formats": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/formats", "Connecting to a Feature Store": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/connecting_to_a_feature_store", "Router": "https://python.langchain.com/docs/modules/chains/foundational/router", "Transformation": "https://python.langchain.com/docs/modules/chains/foundational/transformation", "Custom chain": "https://python.langchain.com/docs/modules/chains/how_to/custom_chain", "Async API": "https://python.langchain.com/docs/modules/chains/how_to/async_chain", "First we add a step to load memory": "https://python.langchain.com/docs/expression_language/cookbook/retrieval", "Configure Runnable traces": "https://python.langchain.com/docs/expression_language/how_to/trace_config"}, "ElasticSearchBM25Retriever": {"ElasticSearch BM25": "https://python.langchain.com/docs/integrations/retrievers/elastic_search_bm25"}, "ZepMemory": {"Zep": "https://python.langchain.com/docs/integrations/retrievers/zep_memorystore", "Zep Memory": "https://python.langchain.com/docs/integrations/memory/zep_memory"}, "CombinedMemory": {"Zep": "https://python.langchain.com/docs/integrations/retrievers/zep_memorystore", "Multiple Memory classes": "https://python.langchain.com/docs/modules/memory/multiple_memory"}, "VectorStoreRetrieverMemory": {"Zep": "https://python.langchain.com/docs/integrations/retrievers/zep_memorystore"}, "HumanMessage": {"Zep": "https://python.langchain.com/docs/integrations/retrievers/zep_memorystore", "Zep Memory": "https://python.langchain.com/docs/integrations/memory/zep_memory", "SQL Chat Message History": "https://python.langchain.com/docs/integrations/memory/sql_chat_message_history", "AzureML Chat Online Endpoint": "https://python.langchain.com/docs/integrations/chat/azureml_chat_endpoint", "Anthropic": "https://python.langchain.com/docs/integrations/chat/anthropic", "\ud83d\ude85 LiteLLM": "https://python.langchain.com/docs/integrations/chat/litellm", "Konko": "https://python.langchain.com/docs/integrations/chat/konko", "OpenAI": "https://python.langchain.com/docs/integrations/chat/openai", "Google Cloud Platform Vertex AI PaLM ": "https://python.langchain.com/docs/integrations/chat/google_vertex_ai_palm", "Bedrock Chat": "https://python.langchain.com/docs/integrations/chat/bedrock", "JinaChat": "https://python.langchain.com/docs/integrations/chat/jinachat", "Ollama": "https://python.langchain.com/docs/integrations/chat/ollama", "Azure": "https://python.langchain.com/docs/integrations/chat/azure_chat_openai", "Baidu Qianfan": "https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint", "ERNIE-Bot Chat": "https://python.langchain.com/docs/integrations/chat/ernie", "PromptLayer ChatOpenAI": "https://python.langchain.com/docs/integrations/chat/promptlayer_chatopenai", "Anyscale": "https://python.langchain.com/docs/integrations/chat/anyscale", "Anthropic Functions": "https://python.langchain.com/docs/integrations/chat/anthropic_functions", "LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor", "Context": "https://python.langchain.com/docs/integrations/callbacks/context", "Label Studio": "https://python.langchain.com/docs/integrations/callbacks/labelstudio", "PromptLayer": "https://python.langchain.com/docs/integrations/callbacks/promptlayer", "Log10": "https://python.langchain.com/docs/integrations/providers/log10", "MLflow AI Gateway": "https://python.langchain.com/docs/integrations/providers/mlflow_ai_gateway", "Flyte": "https://python.langchain.com/docs/integrations/providers/flyte", "Arthur": "https://python.langchain.com/docs/integrations/providers/arthur_tracking", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "CAMEL Role-Playing Autonomous Cooperative Agents": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/camel_role_playing", "Multi-Agent Simulated Environment: Petting Zoo": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/petting_zoo", "Multi-agent decentralized speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_bidding", "Multi-agent authoritarian speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_authoritarian", "Two-Player Dungeons & Dragons": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_player_dnd", "Multi-Player Dungeons & Dragons": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multi_player_dnd", "Simulated Environment: Gymnasium": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/gymnasium", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Custom callback handlers": "https://python.langchain.com/docs/modules/callbacks/custom_callbacks", "Async callbacks": "https://python.langchain.com/docs/modules/callbacks/async_callbacks", "Tools as OpenAI Functions": "https://python.langchain.com/docs/modules/agents/tools/tools_as_openai_functions", "Prompt pipelining": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/prompts_pipelining", "Using OpenAI functions": "https://python.langchain.com/docs/modules/chains/how_to/openai_functions"}, "ZepRetriever": {"Zep": "https://python.langchain.com/docs/integrations/providers/zep", "Zep Memory": "https://python.langchain.com/docs/integrations/memory/zep_memory"}, "VespaRetriever": {"Vespa": "https://python.langchain.com/docs/integrations/providers/vespa"}, "AmazonKendraRetriever": {"Amazon Kendra": "https://python.langchain.com/docs/integrations/retrievers/amazon_kendra_retriever"}, "TextLoader": {"Cohere Reranker": "https://python.langchain.com/docs/integrations/retrievers/cohere-reranker", "Confident": "https://python.langchain.com/docs/integrations/callbacks/confident", "Elasticsearch": "https://python.langchain.com/docs/integrations/vectorstores/elasticsearch", "Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat", "Vectorstore": "https://python.langchain.com/docs/integrations/toolkits/vectorstore", "LanceDB": "https://python.langchain.com/docs/integrations/vectorstores/lancedb", "sqlite-vss": "https://python.langchain.com/docs/integrations/vectorstores/sqlitevss", "Weaviate": "https://python.langchain.com/docs/integrations/vectorstores/weaviate", "DashVector": "https://python.langchain.com/docs/integrations/vectorstores/dashvector", "ScaNN": "https://python.langchain.com/docs/integrations/vectorstores/scann", "Xata": "https://python.langchain.com/docs/integrations/vectorstores/xata", "Vectara": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/vectara_self_query", "PGVector": "https://python.langchain.com/docs/integrations/vectorstores/pgvector", "Rockset": "https://python.langchain.com/docs/integrations/vectorstores/rockset", "DingoDB": "https://python.langchain.com/docs/integrations/vectorstores/dingo", "Zilliz": "https://python.langchain.com/docs/integrations/vectorstores/zilliz", "SingleStoreDB": "https://python.langchain.com/docs/integrations/vectorstores/singlestoredb", "Annoy": "https://python.langchain.com/docs/integrations/vectorstores/annoy", "Typesense": "https://python.langchain.com/docs/integrations/vectorstores/typesense", "Atlas": "https://python.langchain.com/docs/integrations/vectorstores/atlas", "Activeloop Deep Lake": "https://python.langchain.com/docs/integrations/vectorstores/activeloop_deeplake", "Neo4j Vector Index": "https://python.langchain.com/docs/integrations/vectorstores/neo4jvector", "Tair": "https://python.langchain.com/docs/integrations/vectorstores/tair", "Chroma": "https://python.langchain.com/docs/integrations/vectorstores/chroma", "Alibaba Cloud OpenSearch": "https://python.langchain.com/docs/integrations/vectorstores/alibabacloud_opensearch", "Baidu Cloud VectorSearch": "https://python.langchain.com/docs/integrations/vectorstores/baiducloud_vector_search", "StarRocks": "https://python.langchain.com/docs/integrations/vectorstores/starrocks", "scikit-learn": "https://python.langchain.com/docs/integrations/vectorstores/sklearn", "Tencent Cloud VectorDB": "https://python.langchain.com/docs/integrations/vectorstores/tencentvectordb", "DocArray HnswSearch": "https://python.langchain.com/docs/integrations/vectorstores/docarray_hnsw", "MyScale": "https://python.langchain.com/docs/integrations/vectorstores/myscale", "ClickHouse": "https://python.langchain.com/docs/integrations/vectorstores/clickhouse", "Qdrant": "https://python.langchain.com/docs/integrations/vectorstores/qdrant", "Tigris": "https://python.langchain.com/docs/integrations/vectorstores/tigris", "AwaDB": "https://python.langchain.com/docs/integrations/vectorstores/awadb", "Supabase (Postgres)": "https://python.langchain.com/docs/integrations/vectorstores/supabase", "OpenSearch": "https://python.langchain.com/docs/integrations/vectorstores/opensearch", "Pinecone": "https://python.langchain.com/docs/integrations/vectorstores/pinecone", "BagelDB": "https://python.langchain.com/docs/integrations/vectorstores/bageldb", "Azure Cognitive Search": "https://python.langchain.com/docs/integrations/vectorstores/azuresearch", "Cassandra": "https://python.langchain.com/docs/integrations/vectorstores/cassandra", "USearch": "https://python.langchain.com/docs/integrations/vectorstores/usearch", "Milvus": "https://python.langchain.com/docs/integrations/vectorstores/milvus", "Marqo": "https://python.langchain.com/docs/integrations/vectorstores/marqo", "DocArray InMemorySearch": "https://python.langchain.com/docs/integrations/vectorstores/docarray_in_memory", "Postgres Embedding": "https://python.langchain.com/docs/integrations/vectorstores/pgembedding", "Faiss": "https://python.langchain.com/docs/integrations/vectorstores/faiss", "Epsilla": "https://python.langchain.com/docs/integrations/vectorstores/epsilla", "AnalyticDB": "https://python.langchain.com/docs/integrations/vectorstores/analyticdb", "Hologres": "https://python.langchain.com/docs/integrations/vectorstores/hologres", "your local model path": "https://python.langchain.com/docs/integrations/vectorstores/vearch", "MongoDB Atlas": "https://python.langchain.com/docs/integrations/vectorstores/mongodb_atlas", "Meilisearch": "https://python.langchain.com/docs/integrations/vectorstores/meilisearch", "Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents", "Analysis of Twitter the-algorithm source code with LangChain, GPT4 and Activeloop's Deep Lake": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/twitter-the-algorithm-analysis-deeplake", "Use LangChain, GPT and Activeloop's Deep Lake to work with code base": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/code-analysis-deeplake", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "QA using Activeloop's DeepLake": "https://python.langchain.com/docs/use_cases/question_answering/integrations/semantic-search-over-chat", "Graph QA": "https://python.langchain.com/docs/use_cases/more/graph/graph_qa", "Caching": "https://python.langchain.com/docs/modules/data_connection/text_embedding/caching_embeddings", "MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector", "Parent Document Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/parent_document_retriever", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore", "Loading from LangChainHub": "https://python.langchain.com/docs/modules/chains/how_to/from_hub"}, "FAISS": {"Cohere Reranker": "https://python.langchain.com/docs/integrations/retrievers/cohere-reranker", "Facebook Faiss": "https://python.langchain.com/docs/integrations/providers/facebook_faiss", "Document Comparison": "https://python.langchain.com/docs/integrations/toolkits/document_comparison_toolkit", "Faiss": "https://python.langchain.com/docs/integrations/vectorstores/faiss", "Loading documents from a YouTube url": "https://python.langchain.com/docs/integrations/document_loaders/youtube_audio", "Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "AutoGPT": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/autogpt", "BabyAGI User Guide": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi", "BabyAGI with Tools": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi_with_agent", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times", "Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval", "Generative Agents in LangChain": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/characters", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql", "Caching": "https://python.langchain.com/docs/modules/data_connection/text_embedding/caching_embeddings", "Ensemble Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/ensemble", "Custom agent with tool retrieval": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent_with_tool_retrieval", "Select by maximal marginal relevance (MMR)": "https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/mmr", "First we add a step to load memory": "https://python.langchain.com/docs/expression_language/cookbook/retrieval"}, "OpenAI": {"Cohere Reranker": "https://python.langchain.com/docs/integrations/retrievers/cohere-reranker", "Google Serper": "https://python.langchain.com/docs/integrations/providers/google_serper", "Human as a tool": "https://python.langchain.com/docs/integrations/tools/human_tools", "OpenWeatherMap": "https://python.langchain.com/docs/integrations/tools/openweathermap", "Search Tools": "https://python.langchain.com/docs/integrations/tools/search_tools", "Zapier Natural Language Actions": "https://python.langchain.com/docs/integrations/tools/zapier", "Gradio": "https://python.langchain.com/docs/integrations/tools/gradio_tools", "SceneXplain": "https://python.langchain.com/docs/integrations/tools/sceneXplain", "Dall-E Image Generator": "https://python.langchain.com/docs/integrations/tools/dalle_image_generator", "Entity Memory with SQLite storage": "https://python.langchain.com/docs/integrations/memory/entity_memory_with_sqlite", "Streamlit Chat Message History": "https://python.langchain.com/docs/integrations/memory/streamlit_chat_message_history", "Confident": "https://python.langchain.com/docs/integrations/callbacks/confident", "LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor", "Label Studio": "https://python.langchain.com/docs/integrations/callbacks/labelstudio", "Argilla": "https://python.langchain.com/docs/integrations/callbacks/argilla", "PromptLayer": "https://python.langchain.com/docs/integrations/callbacks/promptlayer", "Streamlit": "https://python.langchain.com/docs/integrations/callbacks/.ipynb_checkpoints/streamlit-checkpoint", "Infino": "https://python.langchain.com/docs/integrations/callbacks/infino", "Comet": "https://python.langchain.com/docs/integrations/providers/comet_tracking", "Aim": "https://python.langchain.com/docs/integrations/providers/aim_tracking", "Weights & Biases": "https://python.langchain.com/docs/integrations/providers/wandb_tracking", "SageMaker Tracking": "https://python.langchain.com/docs/integrations/providers/sagemaker_tracking", "OpenAI": "https://python.langchain.com/docs/integrations/llms/openai", "Rebuff": "https://python.langchain.com/docs/integrations/providers/rebuff", "MLflow": "https://python.langchain.com/docs/integrations/providers/mlflow_tracking", "Helicone": "https://python.langchain.com/docs/integrations/providers/helicone", "Shale Protocol": "https://python.langchain.com/docs/integrations/providers/shaleprotocol", "WhyLabs": "https://python.langchain.com/docs/integrations/providers/whylabs_profiling", "WandB Tracing": "https://python.langchain.com/docs/integrations/providers/wandb_tracing", "ClearML": "https://python.langchain.com/docs/integrations/providers/clearml_tracking", "Ray Serve": "https://python.langchain.com/docs/integrations/providers/ray_serve", "Log, Trace, and Monitor": "https://python.langchain.com/docs/integrations/providers/portkey/logging_tracing_portkey", "Portkey": "https://python.langchain.com/docs/integrations/providers/portkey/index", "Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat", "Vectara Text Generation": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_text_generation", "CSV": "https://python.langchain.com/docs/integrations/toolkits/csv", "Xorbits": "https://python.langchain.com/docs/integrations/toolkits/xorbits", "Jira": "https://python.langchain.com/docs/integrations/toolkits/jira", "Spark Dataframe": "https://python.langchain.com/docs/integrations/toolkits/spark", "Python": "https://python.langchain.com/docs/integrations/toolkits/python", "SQL Database": "https://python.langchain.com/docs/integrations/toolkits/sql_database", "Natural Language APIs": "https://python.langchain.com/docs/integrations/toolkits/openapi_nla", "JSON": "https://python.langchain.com/docs/integrations/toolkits/json", "Github": "https://python.langchain.com/docs/integrations/toolkits/github", "Pandas Dataframe": "https://python.langchain.com/docs/integrations/toolkits/pandas", "OpenAPI": "https://python.langchain.com/docs/integrations/toolkits/openapi", "Gitlab": "https://python.langchain.com/docs/integrations/toolkits/gitlab", "Vectara": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/vectara_self_query", "Psychic": "https://python.langchain.com/docs/integrations/document_loaders/psychic", "Docugami": "https://python.langchain.com/docs/integrations/document_loaders/docugami", "Amazon Textract ": "https://python.langchain.com/docs/integrations/document_loaders/pdf-amazonTextractPDFLoader", "OpaquePrompts": "https://python.langchain.com/docs/integrations/llms/opaqueprompts", "LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching", "Fallbacks": "https://python.langchain.com/docs/guides/fallbacks", "Removing logical fallacies from model output": "https://python.langchain.com/docs/guides/safety/logical_fallacy_chain", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/apis", "Perform context-aware text splitting": "https://python.langchain.com/docs/use_cases/question_answering/how_to/document-context-aware-QA", "Retrieve from vector stores directly": "https://python.langchain.com/docs/use_cases/question_answering/how_to/vector_db_text_generation", "Retrieve as you generate with FLARE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/flare", "Improve document indexing with HyDE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/hyde", "QA using Activeloop's DeepLake": "https://python.langchain.com/docs/use_cases/question_answering/integrations/semantic-search-over-chat", "Graph QA": "https://python.langchain.com/docs/use_cases/more/graph/graph_qa", "Tree of Thought (ToT) example": "https://python.langchain.com/docs/use_cases/more/graph/tot", "HuggingGPT": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/hugginggpt", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Bash chain": "https://python.langchain.com/docs/use_cases/more/code_writing/llm_bash", "LLM Symbolic Math ": "https://python.langchain.com/docs/use_cases/more/code_writing/llm_symbolic_math", "Summarization checker chain": "https://python.langchain.com/docs/use_cases/more/self_check/llm_summarization_checker", "Self-checking chain": "https://python.langchain.com/docs/use_cases/more/self_check/llm_checker", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "Vector SQL Retriever with MyScale": "https://python.langchain.com/docs/use_cases/qa_structured/integrations/myscale_vector_sql", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql", "Milvus": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/milvus_self_query", "Weaviate": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/weaviate_self_query", "Elasticsearch": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/elasticsearch_self_query", "Chroma": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/chroma_self_query", "Pinecone": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/pinecone", "Supabase": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/supabase_self_query", "Redis": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/redis_self_query", "MyScale": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/myscale_self_query", "Deep Lake": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/activeloop_deeplake_self_query", "Qdrant": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/qdrant_self_query", "Lost in the middle: The problem with long contexts": "https://python.langchain.com/docs/modules/data_connection/document_transformers/post_retrieval/long_context_reorder", "Memory in the Multi-Input Chain": "https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs", "Memory in LLMChain": "https://python.langchain.com/docs/modules/memory/adding_memory", "Multiple Memory classes": "https://python.langchain.com/docs/modules/memory/multiple_memory", "Customizing Conversational Memory": "https://python.langchain.com/docs/modules/memory/conversational_customization", "Conversation Knowledge Graph": "https://python.langchain.com/docs/modules/memory/types/kg", "Conversation Token Buffer": "https://python.langchain.com/docs/modules/memory/types/token_buffer", "Conversation Summary Buffer": "https://python.langchain.com/docs/modules/memory/types/summary_buffer", "Multiple callback handlers": "https://python.langchain.com/docs/modules/callbacks/multiple_callbacks", "Token counting": "https://python.langchain.com/docs/modules/callbacks/token_counting", "Logging to file": "https://python.langchain.com/docs/modules/callbacks/filecallbackhandler", "Multi-Input Tools": "https://python.langchain.com/docs/modules/agents/tools/multi_input_tool", "Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools", "Tool Input Schema": "https://python.langchain.com/docs/modules/agents/tools/tool_input_validation", "Human-in-the-loop Tool Validation": "https://python.langchain.com/docs/modules/agents/tools/human_approval", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore", "Access intermediate steps": "https://python.langchain.com/docs/modules/agents/how_to/intermediate_steps", "Timeouts for agents": "https://python.langchain.com/docs/modules/agents/how_to/max_time_limit", "Streaming final agent output": "https://python.langchain.com/docs/modules/agents/how_to/streaming_stdout_final_only", "Cap the max number of iterations": "https://python.langchain.com/docs/modules/agents/how_to/max_iterations", "Async API": "https://python.langchain.com/docs/modules/chains/how_to/async_chain", "Tracking token usage": "https://python.langchain.com/docs/modules/model_io/models/llms/token_usage_tracking", "Serialization": "https://python.langchain.com/docs/modules/model_io/models/llms/llm_serialization", "Retry parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/retry", "Datetime parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/datetime", "Pydantic (JSON) parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/pydantic", "Router": "https://python.langchain.com/docs/modules/chains/foundational/router", "Transformation": "https://python.langchain.com/docs/modules/chains/foundational/transformation", "Adding moderation": "https://python.langchain.com/docs/expression_language/cookbook/moderation"}, "ContextualCompressionRetriever": {"Cohere Reranker": "https://python.langchain.com/docs/integrations/retrievers/cohere-reranker", "LOTR (Merger Retriever)": "https://python.langchain.com/docs/integrations/retrievers/merger_retriever"}, "CohereRerank": {"Cohere Reranker": "https://python.langchain.com/docs/integrations/retrievers/cohere-reranker", "Cohere": "https://python.langchain.com/docs/integrations/providers/cohere"}, "RetrievalQA": {"Cohere Reranker": "https://python.langchain.com/docs/integrations/retrievers/cohere-reranker", "Ollama": "https://python.langchain.com/docs/integrations/llms/ollama", "Confident": "https://python.langchain.com/docs/integrations/callbacks/confident", "Document Comparison": "https://python.langchain.com/docs/integrations/toolkits/document_comparison_toolkit", "ScaNN": "https://python.langchain.com/docs/integrations/vectorstores/scann", "Activeloop Deep Lake": "https://python.langchain.com/docs/integrations/vectorstores/activeloop_deeplake", "StarRocks": "https://python.langchain.com/docs/integrations/vectorstores/starrocks", "your local model path": "https://python.langchain.com/docs/integrations/vectorstores/vearch", "Loading documents from a YouTube url": "https://python.langchain.com/docs/integrations/document_loaders/youtube_audio", "Docugami": "https://python.langchain.com/docs/integrations/document_loaders/docugami", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "Perform context-aware text splitting": "https://python.langchain.com/docs/use_cases/question_answering/how_to/document-context-aware-QA", "Use local LLMs": "https://python.langchain.com/docs/use_cases/question_answering/how_to/local_retrieval_qa", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "QA using Activeloop's DeepLake": "https://python.langchain.com/docs/use_cases/question_answering/integrations/semantic-search-over-chat", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore"}, "KNNRetriever": {"kNN": "https://python.langchain.com/docs/integrations/retrievers/knn"}, "WikipediaRetriever": {"Wikipedia": "https://python.langchain.com/docs/integrations/providers/wikipedia"}, "ConversationalRetrievalChain": {"Wikipedia": "https://python.langchain.com/docs/integrations/retrievers/wikipedia", "Arxiv": "https://python.langchain.com/docs/integrations/retrievers/arxiv", "Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat", "Vectara": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/vectara_self_query", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding", "Analysis of Twitter the-algorithm source code with LangChain, GPT4 and Activeloop's Deep Lake": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/twitter-the-algorithm-analysis-deeplake", "Use LangChain, GPT and Activeloop's Deep Lake to work with code base": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/code-analysis-deeplake", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "QA using Activeloop's DeepLake": "https://python.langchain.com/docs/use_cases/question_answering/integrations/semantic-search-over-chat"}, "MetalRetriever": {"Metal": "https://python.langchain.com/docs/integrations/providers/metal"}, "CSVLoader": {"ChatGPT Plugin": "https://python.langchain.com/docs/integrations/retrievers/chatgpt-plugin", "CSV": "https://python.langchain.com/docs/integrations/document_loaders/csv"}, "Document": {"ChatGPT Plugin": "https://python.langchain.com/docs/integrations/retrievers/chatgpt-plugin", "Weaviate Hybrid Search": "https://python.langchain.com/docs/integrations/retrievers/weaviate-hybrid", "BM25": "https://python.langchain.com/docs/integrations/retrievers/bm25", "TF-IDF": "https://python.langchain.com/docs/integrations/retrievers/tf_idf", "Apify": "https://python.langchain.com/docs/integrations/tools/apify", "Vectara Text Generation": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_text_generation", "PGVector": "https://python.langchain.com/docs/integrations/vectorstores/pgvector", "Annoy": "https://python.langchain.com/docs/integrations/vectorstores/annoy", "Neo4j Vector Index": "https://python.langchain.com/docs/integrations/vectorstores/neo4jvector", "Postgres Embedding": "https://python.langchain.com/docs/integrations/vectorstores/pgembedding", "Faiss": "https://python.langchain.com/docs/integrations/vectorstores/faiss", "Nuclia Understanding API document transformer": "https://python.langchain.com/docs/integrations/document_transformers/nuclia_transformer", "OpenAI Functions Metadata Tagger": "https://python.langchain.com/docs/integrations/document_transformers/openai_metadata_tagger", "Doctran Extract Properties": "https://python.langchain.com/docs/integrations/document_transformers/doctran_extract_properties", "Doctran Interrogate Documents": "https://python.langchain.com/docs/integrations/document_transformers/doctran_interrogate_document", "Doctran Translate Documents": "https://python.langchain.com/docs/integrations/document_transformers/doctran_translate_document", "TensorFlow Datasets": "https://python.langchain.com/docs/integrations/document_loaders/tensorflow_datasets", "Airbyte Salesforce": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_salesforce", "Airbyte CDK": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_cdk", "Airbyte Stripe": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_stripe", "Copy Paste": "https://python.langchain.com/docs/integrations/document_loaders/copypaste", "Airbyte Typeform": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_typeform", "Apify Dataset": "https://python.langchain.com/docs/integrations/document_loaders/apify_dataset", "Docugami": "https://python.langchain.com/docs/integrations/document_loaders/docugami", "Airbyte Hubspot": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_hubspot", "Airbyte Gong": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_gong", "Airbyte Shopify": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_shopify", "Airbyte Zendesk Support": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_zendesk_support", "SageMakerEndpoint": "https://python.langchain.com/docs/integrations/llms/sagemaker", "LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching", "Retrieve from vector stores directly": "https://python.langchain.com/docs/use_cases/question_answering/how_to/vector_db_text_generation", "Multiple Retrieval Sources": "https://python.langchain.com/docs/use_cases/question_answering/how_to/multiple_retrieval", "Retrieve as you generate with FLARE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/flare", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times", "Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql", "Indexing": "https://python.langchain.com/docs/modules/data_connection/indexing", "MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector", "Milvus": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/milvus_self_query", "Weaviate": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/weaviate_self_query", "Vectara": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/vectara_self_query", "DashVector": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/dashvector", "Elasticsearch": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/elasticsearch_self_query", "Chroma": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/chroma_self_query", "Pinecone": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/pinecone", "Supabase": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/supabase_self_query", "Redis": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/redis_self_query", "MyScale": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/myscale_self_query", "Deep Lake": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/activeloop_deeplake_self_query", "Qdrant": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/qdrant_self_query", "Memory in the Multi-Input Chain": "https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs", "Custom agent with tool retrieval": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent_with_tool_retrieval"}, "ChatGPTPluginRetriever": {"ChatGPT Plugin": "https://python.langchain.com/docs/integrations/retrievers/chatgpt-plugin", "OpenAI": "https://python.langchain.com/docs/integrations/providers/openai"}, "GoogleVertexAISearchRetriever": {"Google Vertex AI Search": "https://python.langchain.com/docs/integrations/retrievers/google_vertex_ai_search"}, "DocArrayRetriever": {"DocArray Retriever": "https://python.langchain.com/docs/integrations/retrievers/docarray_retriever"}, "SVMRetriever": {"SVM": "https://python.langchain.com/docs/integrations/retrievers/svm", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering"}, "PineconeHybridSearchRetriever": {"Pinecone Hybrid Search": "https://python.langchain.com/docs/integrations/retrievers/pinecone_hybrid_search"}, "PubMedRetriever": {"PubMed": "https://python.langchain.com/docs/integrations/providers/pubmed"}, "WeaviateHybridSearchRetriever": {"Weaviate Hybrid Search": "https://python.langchain.com/docs/integrations/retrievers/weaviate-hybrid"}, "ArxivRetriever": {"Arxiv": "https://python.langchain.com/docs/integrations/providers/arxiv"}, "BM25Retriever": {"BM25": "https://python.langchain.com/docs/integrations/retrievers/bm25", "Ensemble Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/ensemble"}, "AzureCognitiveSearchRetriever": {"Azure Cognitive Search": "https://python.langchain.com/docs/integrations/providers/azure_cognitive_search_"}, "ChaindeskRetriever": {"Chaindesk": "https://python.langchain.com/docs/integrations/providers/chaindesk"}, "MergerRetriever": {"LOTR (Merger Retriever)": "https://python.langchain.com/docs/integrations/retrievers/merger_retriever"}, "EmbeddingsRedundantFilter": {"LOTR (Merger Retriever)": "https://python.langchain.com/docs/integrations/retrievers/merger_retriever"}, "EmbeddingsClusteringFilter": {"LOTR (Merger Retriever)": "https://python.langchain.com/docs/integrations/retrievers/merger_retriever"}, "DocumentCompressorPipeline": {"LOTR (Merger Retriever)": "https://python.langchain.com/docs/integrations/retrievers/merger_retriever"}, "LongContextReorder": {"LOTR (Merger Retriever)": "https://python.langchain.com/docs/integrations/retrievers/merger_retriever", "Lost in the middle: The problem with long contexts": "https://python.langchain.com/docs/modules/data_connection/document_transformers/post_retrieval/long_context_reorder"}, "TFIDFRetriever": {"TF-IDF": "https://python.langchain.com/docs/integrations/retrievers/tf_idf"}, "load_tools": {"ChatGPT Plugins": "https://python.langchain.com/docs/integrations/tools/chatgpt_plugins", "Human as a tool": "https://python.langchain.com/docs/integrations/tools/human_tools", "AWS Lambda": "https://python.langchain.com/docs/integrations/tools/awslambda", "Google Drive": "https://python.langchain.com/docs/integrations/tools/google_drive", "Requests": "https://python.langchain.com/docs/integrations/tools/requests", "OpenWeatherMap": "https://python.langchain.com/docs/integrations/providers/openweathermap", "Search Tools": "https://python.langchain.com/docs/integrations/tools/search_tools", "Eleven Labs Text2Speech": "https://python.langchain.com/docs/integrations/tools/eleven_labs_tts", "ArXiv": "https://python.langchain.com/docs/integrations/tools/arxiv", "GraphQL": "https://python.langchain.com/docs/integrations/tools/graphql", "SceneXplain": "https://python.langchain.com/docs/integrations/tools/sceneXplain", "Dall-E Image Generator": "https://python.langchain.com/docs/integrations/tools/dalle_image_generator", "LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor", "Argilla": "https://python.langchain.com/docs/integrations/callbacks/argilla", "Streamlit": "https://python.langchain.com/docs/integrations/callbacks/.ipynb_checkpoints/streamlit-checkpoint", "SerpAPI": "https://python.langchain.com/docs/integrations/providers/serpapi", "Comet": "https://python.langchain.com/docs/integrations/providers/comet_tracking", "Aim": "https://python.langchain.com/docs/integrations/providers/aim_tracking", "Golden": "https://python.langchain.com/docs/integrations/providers/golden", "Weights & Biases": "https://python.langchain.com/docs/integrations/providers/wandb_tracking", "SageMaker Tracking": "https://python.langchain.com/docs/integrations/providers/sagemaker_tracking", "Wolfram Alpha": "https://python.langchain.com/docs/integrations/providers/wolfram_alpha", "MLflow": "https://python.langchain.com/docs/integrations/providers/mlflow_tracking", "DataForSEO": "https://python.langchain.com/docs/integrations/providers/dataforseo", "SearxNG Search API": "https://python.langchain.com/docs/integrations/providers/searx", "Google Serper": "https://python.langchain.com/docs/integrations/providers/google_serper", "Flyte": "https://python.langchain.com/docs/integrations/providers/flyte", "WandB Tracing": "https://python.langchain.com/docs/integrations/providers/wandb_tracing", "ClearML": "https://python.langchain.com/docs/integrations/providers/clearml_tracking", "Google Search": "https://python.langchain.com/docs/integrations/providers/google_search", "Log, Trace, and Monitor": "https://python.langchain.com/docs/integrations/providers/portkey/logging_tracing_portkey", "Portkey": "https://python.langchain.com/docs/integrations/providers/portkey/index", "Google Drive tool": "https://python.langchain.com/docs/integrations/toolkits/google_drive", "Bittensor": "https://python.langchain.com/docs/integrations/llms/bittensor", "Amazon API Gateway": "https://python.langchain.com/docs/integrations/llms/amazon_api_gateway", "Debugging": "https://python.langchain.com/docs/guides/debugging", "LangSmith Walkthrough": "https://python.langchain.com/docs/guides/langsmith/walkthrough", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Multiple callback handlers": "https://python.langchain.com/docs/modules/callbacks/multiple_callbacks", "Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools", "Human-in-the-loop Tool Validation": "https://python.langchain.com/docs/modules/agents/tools/human_approval", "Access intermediate steps": "https://python.langchain.com/docs/modules/agents/how_to/intermediate_steps", "Timeouts for agents": "https://python.langchain.com/docs/modules/agents/how_to/max_time_limit", "Streaming final agent output": "https://python.langchain.com/docs/modules/agents/how_to/streaming_stdout_final_only", "Cap the max number of iterations": "https://python.langchain.com/docs/modules/agents/how_to/max_iterations", "Async API": "https://python.langchain.com/docs/modules/agents/how_to/async_agent", "Human input chat model": "https://python.langchain.com/docs/modules/model_io/models/chat/human_input_chat_model", "Fake LLM": "https://python.langchain.com/docs/modules/model_io/models/llms/fake_llm", "Tracking token usage": "https://python.langchain.com/docs/modules/model_io/models/llms/token_usage_tracking", "Human input LLM": "https://python.langchain.com/docs/modules/model_io/models/llms/human_input_llm"}, "initialize_agent": {"ChatGPT Plugins": "https://python.langchain.com/docs/integrations/tools/chatgpt_plugins", "Google Serper": "https://python.langchain.com/docs/integrations/providers/google_serper", "Human as a tool": "https://python.langchain.com/docs/integrations/tools/human_tools", "Yahoo Finance News": "https://python.langchain.com/docs/integrations/tools/yahoo_finance_news", "AWS Lambda": "https://python.langchain.com/docs/integrations/tools/awslambda", "Google Drive": "https://python.langchain.com/docs/integrations/tools/google_drive", "OpenWeatherMap": "https://python.langchain.com/docs/integrations/tools/openweathermap", "Search Tools": "https://python.langchain.com/docs/integrations/tools/search_tools", "Eleven Labs Text2Speech": "https://python.langchain.com/docs/integrations/tools/eleven_labs_tts", "Zapier Natural Language Actions": "https://python.langchain.com/docs/integrations/tools/zapier", "ArXiv": "https://python.langchain.com/docs/integrations/tools/arxiv", "Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search", "GraphQL": "https://python.langchain.com/docs/integrations/tools/graphql", "Gradio": "https://python.langchain.com/docs/integrations/tools/gradio_tools", "SceneXplain": "https://python.langchain.com/docs/integrations/tools/sceneXplain", "Eden AI": "https://python.langchain.com/docs/integrations/tools/edenai_tools", "Dall-E Image Generator": "https://python.langchain.com/docs/integrations/tools/dalle_image_generator", "Shell (bash)": "https://python.langchain.com/docs/integrations/tools/bash", "Zep Memory": "https://python.langchain.com/docs/integrations/memory/zep_memory", "Xata chat memory": "https://python.langchain.com/docs/integrations/memory/xata_chat_message_history", "Dynamodb Chat Message History": "https://python.langchain.com/docs/integrations/memory/dynamodb_chat_message_history", "LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor", "Argilla": "https://python.langchain.com/docs/integrations/callbacks/argilla", "Streamlit": "https://python.langchain.com/docs/integrations/callbacks/.ipynb_checkpoints/streamlit-checkpoint", "Comet": "https://python.langchain.com/docs/integrations/providers/comet_tracking", "Aim": "https://python.langchain.com/docs/integrations/providers/aim_tracking", "Weights & Biases": "https://python.langchain.com/docs/integrations/providers/wandb_tracking", "SageMaker Tracking": "https://python.langchain.com/docs/integrations/providers/sagemaker_tracking", "MLflow": "https://python.langchain.com/docs/integrations/providers/mlflow_tracking", "Flyte": "https://python.langchain.com/docs/integrations/providers/flyte", "WandB Tracing": "https://python.langchain.com/docs/integrations/providers/wandb_tracing", "ClearML": "https://python.langchain.com/docs/integrations/providers/clearml_tracking", "Log, Trace, and Monitor": "https://python.langchain.com/docs/integrations/providers/portkey/logging_tracing_portkey", "Portkey": "https://python.langchain.com/docs/integrations/providers/portkey/index", "Jira": "https://python.langchain.com/docs/integrations/toolkits/jira", "Document Comparison": "https://python.langchain.com/docs/integrations/toolkits/document_comparison_toolkit", "Azure Cognitive Services": "https://python.langchain.com/docs/integrations/toolkits/azure_cognitive_services", "Natural Language APIs": "https://python.langchain.com/docs/integrations/toolkits/openapi_nla", "Gmail": "https://python.langchain.com/docs/integrations/toolkits/gmail", "Github": "https://python.langchain.com/docs/integrations/toolkits/github", "Google Drive tool": "https://python.langchain.com/docs/integrations/toolkits/google_drive", "AINetwork": "https://python.langchain.com/docs/integrations/toolkits/ainetwork", "PlayWright Browser": "https://python.langchain.com/docs/integrations/toolkits/playwright", "Office365": "https://python.langchain.com/docs/integrations/toolkits/office365", "MultiOn": "https://python.langchain.com/docs/integrations/toolkits/multion", "Amadeus": "https://python.langchain.com/docs/integrations/toolkits/amadeus", "Gitlab": "https://python.langchain.com/docs/integrations/toolkits/gitlab", "Bittensor": "https://python.langchain.com/docs/integrations/llms/bittensor", "Amazon API Gateway": "https://python.langchain.com/docs/integrations/llms/amazon_api_gateway", "Debugging": "https://python.langchain.com/docs/guides/debugging", "LangSmith Walkthrough": "https://python.langchain.com/docs/guides/langsmith/walkthrough", "Hugging Face Prompt Injection Identification": "https://python.langchain.com/docs/guides/safety/hugging_face_prompt_injection", "Comparing Chain Outputs": "https://python.langchain.com/docs/guides/evaluation/examples/comparisons", "Agent Trajectory": "https://python.langchain.com/docs/guides/evaluation/trajectory/trajectory_eval", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "Multi-modal outputs: Image & Text": "https://python.langchain.com/docs/use_cases/more/agents/multi_modal/multi_modal_output_agent", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Multiple callback handlers": "https://python.langchain.com/docs/modules/callbacks/multiple_callbacks", "Multi-Input Tools": "https://python.langchain.com/docs/modules/agents/tools/multi_input_tool", "Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools", "Tool Input Schema": "https://python.langchain.com/docs/modules/agents/tools/tool_input_validation", "Human-in-the-loop Tool Validation": "https://python.langchain.com/docs/modules/agents/tools/human_approval", "Self-ask with search": "https://python.langchain.com/docs/modules/agents/agent_types/self_ask_with_search", "ReAct document store": "https://python.langchain.com/docs/modules/agents/agent_types/react_docstore", "OpenAI Multi Functions Agent": "https://python.langchain.com/docs/modules/agents/agent_types/openai_multi_functions_agent", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore", "Access intermediate steps": "https://python.langchain.com/docs/modules/agents/how_to/intermediate_steps", "Handle parsing errors": "https://python.langchain.com/docs/modules/agents/how_to/handle_parsing_errors", "Running Agent as an Iterator": "https://python.langchain.com/docs/modules/agents/how_to/agent_iter", "Timeouts for agents": "https://python.langchain.com/docs/modules/agents/how_to/max_time_limit", "Streaming final agent output": "https://python.langchain.com/docs/modules/agents/how_to/streaming_stdout_final_only", "Add Memory to OpenAI Functions Agent": "https://python.langchain.com/docs/modules/agents/how_to/add_memory_openai_functions", "Cap the max number of iterations": "https://python.langchain.com/docs/modules/agents/how_to/max_iterations", "Custom functions with OpenAI Functions Agent": "https://python.langchain.com/docs/modules/agents/how_to/custom-functions-with-openai-functions-agent", "Async API": "https://python.langchain.com/docs/modules/agents/how_to/async_agent", "Use ToolKits with OpenAI Functions": "https://python.langchain.com/docs/modules/agents/how_to/use_toolkits_with_openai_functions", "Human input chat model": "https://python.langchain.com/docs/modules/model_io/models/chat/human_input_chat_model", "Fake LLM": "https://python.langchain.com/docs/modules/model_io/models/llms/fake_llm", "Tracking token usage": "https://python.langchain.com/docs/modules/model_io/models/llms/token_usage_tracking", "Human input LLM": "https://python.langchain.com/docs/modules/model_io/models/llms/human_input_llm"}, "AgentType": {"ChatGPT Plugins": "https://python.langchain.com/docs/integrations/tools/chatgpt_plugins", "Google Serper": "https://python.langchain.com/docs/integrations/providers/google_serper", "Human as a tool": "https://python.langchain.com/docs/integrations/tools/human_tools", "Yahoo Finance News": "https://python.langchain.com/docs/integrations/tools/yahoo_finance_news", "AWS Lambda": "https://python.langchain.com/docs/integrations/tools/awslambda", "Google Drive": "https://python.langchain.com/docs/integrations/tools/google_drive", "OpenWeatherMap": "https://python.langchain.com/docs/integrations/tools/openweathermap", "Search Tools": "https://python.langchain.com/docs/integrations/tools/search_tools", "Eleven Labs Text2Speech": "https://python.langchain.com/docs/integrations/tools/eleven_labs_tts", "Zapier Natural Language Actions": "https://python.langchain.com/docs/integrations/tools/zapier", "ArXiv": "https://python.langchain.com/docs/integrations/tools/arxiv", "Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search", "GraphQL": "https://python.langchain.com/docs/integrations/tools/graphql", "Eden AI": "https://python.langchain.com/docs/integrations/tools/edenai_tools", "Shell (bash)": "https://python.langchain.com/docs/integrations/tools/bash", "Zep Memory": "https://python.langchain.com/docs/integrations/memory/zep_memory", "Xata chat memory": "https://python.langchain.com/docs/integrations/memory/xata_chat_message_history", "Dynamodb Chat Message History": "https://python.langchain.com/docs/integrations/memory/dynamodb_chat_message_history", "LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor", "Argilla": "https://python.langchain.com/docs/integrations/callbacks/argilla", "Streamlit": "https://python.langchain.com/docs/integrations/callbacks/.ipynb_checkpoints/streamlit-checkpoint", "Aim": "https://python.langchain.com/docs/integrations/providers/aim_tracking", "Weights & Biases": "https://python.langchain.com/docs/integrations/providers/wandb_tracking", "MLflow": "https://python.langchain.com/docs/integrations/providers/mlflow_tracking", "Flyte": "https://python.langchain.com/docs/integrations/providers/flyte", "WandB Tracing": "https://python.langchain.com/docs/integrations/providers/wandb_tracing", "ClearML": "https://python.langchain.com/docs/integrations/providers/clearml_tracking", "Log, Trace, and Monitor": "https://python.langchain.com/docs/integrations/providers/portkey/logging_tracing_portkey", "Portkey": "https://python.langchain.com/docs/integrations/providers/portkey/index", "CSV": "https://python.langchain.com/docs/integrations/toolkits/csv", "Jira": "https://python.langchain.com/docs/integrations/toolkits/jira", "Document Comparison": "https://python.langchain.com/docs/integrations/toolkits/document_comparison_toolkit", "Python": "https://python.langchain.com/docs/integrations/toolkits/python", "Azure Cognitive Services": "https://python.langchain.com/docs/integrations/toolkits/azure_cognitive_services", "SQL Database": "https://python.langchain.com/docs/integrations/toolkits/sql_database", "Natural Language APIs": "https://python.langchain.com/docs/integrations/toolkits/openapi_nla", "Gmail": "https://python.langchain.com/docs/integrations/toolkits/gmail", "Airbyte Question Answering": "https://python.langchain.com/docs/integrations/toolkits/airbyte_structured_qa", "Github": "https://python.langchain.com/docs/integrations/toolkits/github", "Google Drive tool": "https://python.langchain.com/docs/integrations/toolkits/google_drive", "AINetwork": "https://python.langchain.com/docs/integrations/toolkits/ainetwork", "PlayWright Browser": "https://python.langchain.com/docs/integrations/toolkits/playwright", "Office365": "https://python.langchain.com/docs/integrations/toolkits/office365", "Pandas Dataframe": "https://python.langchain.com/docs/integrations/toolkits/pandas", "MultiOn": "https://python.langchain.com/docs/integrations/toolkits/multion", "Amadeus": "https://python.langchain.com/docs/integrations/toolkits/amadeus", "Gitlab": "https://python.langchain.com/docs/integrations/toolkits/gitlab", "Bittensor": "https://python.langchain.com/docs/integrations/llms/bittensor", "Amazon API Gateway": "https://python.langchain.com/docs/integrations/llms/amazon_api_gateway", "Debugging": "https://python.langchain.com/docs/guides/debugging", "LangSmith Walkthrough": "https://python.langchain.com/docs/guides/langsmith/walkthrough", "Hugging Face Prompt Injection Identification": "https://python.langchain.com/docs/guides/safety/hugging_face_prompt_injection", "Comparing Chain Outputs": "https://python.langchain.com/docs/guides/evaluation/examples/comparisons", "Agent Trajectory": "https://python.langchain.com/docs/guides/evaluation/trajectory/trajectory_eval", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "Multi-modal outputs: Image & Text": "https://python.langchain.com/docs/use_cases/more/agents/multi_modal/multi_modal_output_agent", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql", "Multiple callback handlers": "https://python.langchain.com/docs/modules/callbacks/multiple_callbacks", "Multi-Input Tools": "https://python.langchain.com/docs/modules/agents/tools/multi_input_tool", "Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools", "Tool Input Schema": "https://python.langchain.com/docs/modules/agents/tools/tool_input_validation", "Human-in-the-loop Tool Validation": "https://python.langchain.com/docs/modules/agents/tools/human_approval", "Self-ask with search": "https://python.langchain.com/docs/modules/agents/agent_types/self_ask_with_search", "ReAct document store": "https://python.langchain.com/docs/modules/agents/agent_types/react_docstore", "OpenAI Multi Functions Agent": "https://python.langchain.com/docs/modules/agents/agent_types/openai_multi_functions_agent", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore", "Access intermediate steps": "https://python.langchain.com/docs/modules/agents/how_to/intermediate_steps", "Handle parsing errors": "https://python.langchain.com/docs/modules/agents/how_to/handle_parsing_errors", "Running Agent as an Iterator": "https://python.langchain.com/docs/modules/agents/how_to/agent_iter", "Timeouts for agents": "https://python.langchain.com/docs/modules/agents/how_to/max_time_limit", "Streaming final agent output": "https://python.langchain.com/docs/modules/agents/how_to/streaming_stdout_final_only", "Add Memory to OpenAI Functions Agent": "https://python.langchain.com/docs/modules/agents/how_to/add_memory_openai_functions", "Cap the max number of iterations": "https://python.langchain.com/docs/modules/agents/how_to/max_iterations", "Custom functions with OpenAI Functions Agent": "https://python.langchain.com/docs/modules/agents/how_to/custom-functions-with-openai-functions-agent", "Async API": "https://python.langchain.com/docs/modules/agents/how_to/async_agent", "Use ToolKits with OpenAI Functions": "https://python.langchain.com/docs/modules/agents/how_to/use_toolkits_with_openai_functions", "Human input chat model": "https://python.langchain.com/docs/modules/model_io/models/chat/human_input_chat_model", "Fake LLM": "https://python.langchain.com/docs/modules/model_io/models/llms/fake_llm", "Tracking token usage": "https://python.langchain.com/docs/modules/model_io/models/llms/token_usage_tracking", "Human input LLM": "https://python.langchain.com/docs/modules/model_io/models/llms/human_input_llm"}, "AIPluginTool": {"ChatGPT Plugins": "https://python.langchain.com/docs/integrations/tools/chatgpt_plugins"}, "DataForSeoAPIWrapper": {"DataForSeo": "https://python.langchain.com/docs/integrations/tools/dataforseo", "DataForSEO": "https://python.langchain.com/docs/integrations/providers/dataforseo"}, "Tool": {"DataForSeo": "https://python.langchain.com/docs/integrations/tools/dataforseo", "Google Serper": "https://python.langchain.com/docs/integrations/providers/google_serper", "SerpAPI": "https://python.langchain.com/docs/integrations/tools/serpapi", "Google Search": "https://python.langchain.com/docs/integrations/tools/google_search", "Zep Memory": "https://python.langchain.com/docs/integrations/memory/zep_memory", "Dynamodb Chat Message History": "https://python.langchain.com/docs/integrations/memory/dynamodb_chat_message_history", "SageMaker Tracking": "https://python.langchain.com/docs/integrations/providers/sagemaker_tracking", "Document Comparison": "https://python.langchain.com/docs/integrations/toolkits/document_comparison_toolkit", "Natural Language APIs": "https://python.langchain.com/docs/integrations/toolkits/openapi_nla", "Github": "https://python.langchain.com/docs/integrations/toolkits/github", "Bittensor": "https://python.langchain.com/docs/integrations/llms/bittensor", "Pydantic compatibility": "https://python.langchain.com/docs/guides/pydantic_compatibility", "Comparing Chain Outputs": "https://python.langchain.com/docs/guides/evaluation/examples/comparisons", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "AutoGPT": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/autogpt", "BabyAGI with Tools": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi_with_agent", "Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Wikibase Agent": "https://python.langchain.com/docs/use_cases/more/agents/agents/wikibase_agent", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Message Memory in Agent backed by a database": "https://python.langchain.com/docs/modules/memory/agent_with_memory_in_db", "Memory in Agent": "https://python.langchain.com/docs/modules/memory/agent_with_memory", "Multi-Input Tools": "https://python.langchain.com/docs/modules/agents/tools/multi_input_tool", "Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools", "Self-ask with search": "https://python.langchain.com/docs/modules/agents/agent_types/self_ask_with_search", "ReAct document store": "https://python.langchain.com/docs/modules/agents/agent_types/react_docstore", "OpenAI Multi Functions Agent": "https://python.langchain.com/docs/modules/agents/agent_types/openai_multi_functions_agent", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore", "Custom MRKL agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_mrkl_agent", "Handle parsing errors": "https://python.langchain.com/docs/modules/agents/how_to/handle_parsing_errors", "Shared memory across agents and tools": "https://python.langchain.com/docs/modules/agents/how_to/sharedmemory_for_tools", "Custom multi-action agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_multi_action_agent", "Running Agent as an Iterator": "https://python.langchain.com/docs/modules/agents/how_to/agent_iter", "Timeouts for agents": "https://python.langchain.com/docs/modules/agents/how_to/max_time_limit", "Add Memory to OpenAI Functions Agent": "https://python.langchain.com/docs/modules/agents/how_to/add_memory_openai_functions", "Cap the max number of iterations": "https://python.langchain.com/docs/modules/agents/how_to/max_iterations", "Custom agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent", "Use ToolKits with OpenAI Functions": "https://python.langchain.com/docs/modules/agents/how_to/use_toolkits_with_openai_functions", "Custom agent with tool retrieval": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent_with_tool_retrieval"}, "SearxSearchWrapper": {"SearxNG Search": "https://python.langchain.com/docs/integrations/tools/searx_search", "SearxNG Search API": "https://python.langchain.com/docs/integrations/providers/searx"}, "GoogleSerperAPIWrapper": {"Google Serper": "https://python.langchain.com/docs/integrations/providers/google_serper", "Retrieve as you generate with FLARE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/flare"}, "GooglePlacesTool": {"Google Places": "https://python.langchain.com/docs/integrations/tools/google_places"}, "HumanInputRun": {"Human as a tool": "https://python.langchain.com/docs/integrations/tools/human_tools", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times"}, "NucliaUnderstandingAPI": {"Nuclia Understanding": "https://python.langchain.com/docs/integrations/tools/nuclia", "Nuclia Understanding API document transformer": "https://python.langchain.com/docs/integrations/document_transformers/nuclia_transformer", "Nuclia Understanding API document loader": "https://python.langchain.com/docs/integrations/document_loaders/nuclia"}, "YahooFinanceNewsTool": {"Yahoo Finance News": "https://python.langchain.com/docs/integrations/tools/yahoo_finance_news"}, "TwilioAPIWrapper": {"Twilio": "https://python.langchain.com/docs/integrations/tools/twilio"}, "IFTTTWebhook": {"IFTTT WebHooks": "https://python.langchain.com/docs/integrations/tools/ifttt"}, "WikipediaQueryRun": {"Wikipedia": "https://python.langchain.com/docs/integrations/tools/wikipedia"}, "WikipediaAPIWrapper": {"Wikipedia": "https://python.langchain.com/docs/integrations/tools/wikipedia", "Zep Memory": "https://python.langchain.com/docs/integrations/memory/zep_memory"}, "AlphaVantageAPIWrapper": {"Alpha Vantage": "https://python.langchain.com/docs/integrations/tools/alpha_vantage"}, "TextRequestsWrapper": {"Requests": "https://python.langchain.com/docs/integrations/tools/requests", "JSON": "https://python.langchain.com/docs/integrations/toolkits/json", "OpenAPI": "https://python.langchain.com/docs/integrations/toolkits/openapi", "Tool Input Schema": "https://python.langchain.com/docs/modules/agents/tools/tool_input_validation"}, "OpenWeatherMapAPIWrapper": {"OpenWeatherMap": "https://python.langchain.com/docs/integrations/providers/openweathermap"}, "PubmedQueryRun": {"PubMed": "https://python.langchain.com/docs/integrations/tools/pubmed"}, "YouTubeSearchTool": {"YouTube": "https://python.langchain.com/docs/integrations/tools/youtube"}, "ElevenLabsText2SpeechTool": {"Eleven Labs Text2Speech": "https://python.langchain.com/docs/integrations/tools/eleven_labs_tts"}, "VectorstoreIndexCreator": {"Apify": "https://python.langchain.com/docs/integrations/tools/apify", "HuggingFace dataset": "https://python.langchain.com/docs/integrations/document_loaders/hugging_face_dataset", "Spreedly": "https://python.langchain.com/docs/integrations/document_loaders/spreedly", "Image captions": "https://python.langchain.com/docs/integrations/document_loaders/image_captions", "Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma", "Apify Dataset": "https://python.langchain.com/docs/integrations/document_loaders/apify_dataset", "Iugu": "https://python.langchain.com/docs/integrations/document_loaders/iugu", "Stripe": "https://python.langchain.com/docs/integrations/document_loaders/stripe", "Modern Treasury": "https://python.langchain.com/docs/integrations/document_loaders/modern_treasury", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "Multiple Retrieval Sources": "https://python.langchain.com/docs/use_cases/question_answering/how_to/multiple_retrieval"}, "ApifyWrapper": {"Apify": "https://python.langchain.com/docs/integrations/providers/apify"}, "ZapierToolkit": {"Zapier Natural Language Actions": "https://python.langchain.com/docs/integrations/tools/zapier"}, "ZapierNLAWrapper": {"Zapier Natural Language Actions": "https://python.langchain.com/docs/integrations/tools/zapier"}, "LLMChain": {"Zapier Natural Language Actions": "https://python.langchain.com/docs/integrations/tools/zapier", "Dall-E Image Generator": "https://python.langchain.com/docs/integrations/tools/dalle_image_generator", "Streamlit Chat Message History": "https://python.langchain.com/docs/integrations/memory/streamlit_chat_message_history", "Argilla": "https://python.langchain.com/docs/integrations/callbacks/argilla", "Comet": "https://python.langchain.com/docs/integrations/providers/comet_tracking", "Aim": "https://python.langchain.com/docs/integrations/providers/aim_tracking", "Weights & Biases": "https://python.langchain.com/docs/integrations/providers/wandb_tracking", "SageMaker Tracking": "https://python.langchain.com/docs/integrations/providers/sagemaker_tracking", "Rebuff": "https://python.langchain.com/docs/integrations/providers/rebuff", "MLflow": "https://python.langchain.com/docs/integrations/providers/mlflow_tracking", "Flyte": "https://python.langchain.com/docs/integrations/providers/flyte", "Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat", "Vectara Text Generation": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_text_generation", "Natural Language APIs": "https://python.langchain.com/docs/integrations/toolkits/openapi_nla", "JSON": "https://python.langchain.com/docs/integrations/toolkits/json", "Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma", "Predibase": "https://python.langchain.com/docs/integrations/llms/predibase", "Eden AI": "https://python.langchain.com/docs/integrations/llms/edenai", "Azure ML": "https://python.langchain.com/docs/integrations/llms/azure_ml", "Removing logical fallacies from model output": "https://python.langchain.com/docs/guides/safety/logical_fallacy_chain", "Amazon Comprehend Moderation Chain": "https://python.langchain.com/docs/guides/safety/amazon_comprehend_chain", "Custom Trajectory Evaluator": "https://python.langchain.com/docs/guides/evaluation/trajectory/custom", "Custom Pairwise Evaluator": "https://python.langchain.com/docs/guides/evaluation/comparison/custom", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/apis", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/summarization", "Retrieve from vector stores directly": "https://python.langchain.com/docs/use_cases/question_answering/how_to/vector_db_text_generation", "Improve document indexing with HyDE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/hyde", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "Multi-agent authoritarian speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_authoritarian", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research", "Lost in the middle: The problem with long contexts": "https://python.langchain.com/docs/modules/data_connection/document_transformers/post_retrieval/long_context_reorder", "Memory in LLMChain": "https://python.langchain.com/docs/modules/memory/adding_memory", "Logging to file": "https://python.langchain.com/docs/modules/callbacks/filecallbackhandler", "XML Agent": "https://python.langchain.com/docs/modules/agents/agent_types/xml_agent", "Datetime parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/datetime", "Prompt pipelining": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/prompts_pipelining", "Connecting to a Feature Store": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/connecting_to_a_feature_store", "Router": "https://python.langchain.com/docs/modules/chains/foundational/router", "Transformation": "https://python.langchain.com/docs/modules/chains/foundational/transformation", "Async API": "https://python.langchain.com/docs/modules/chains/how_to/async_chain"}, "TransformChain": {"Zapier Natural Language Actions": "https://python.langchain.com/docs/integrations/tools/zapier", "Rebuff": "https://python.langchain.com/docs/integrations/providers/rebuff", "Transformation": "https://python.langchain.com/docs/modules/chains/foundational/transformation"}, "SimpleSequentialChain": {"Zapier Natural Language Actions": "https://python.langchain.com/docs/integrations/tools/zapier", "SageMaker Tracking": "https://python.langchain.com/docs/integrations/providers/sagemaker_tracking", "Rebuff": "https://python.langchain.com/docs/integrations/providers/rebuff", "Baseten": "https://python.langchain.com/docs/integrations/llms/baseten", "Predibase": "https://python.langchain.com/docs/integrations/llms/predibase", "Eden AI": "https://python.langchain.com/docs/integrations/llms/edenai", "Replicate": "https://python.langchain.com/docs/integrations/llms/replicate", "Transformation": "https://python.langchain.com/docs/modules/chains/foundational/transformation"}, "ZapierNLARunAction": {"Zapier Natural Language Actions": "https://python.langchain.com/docs/integrations/tools/zapier"}, "GoldenQueryAPIWrapper": {"Golden Query": "https://python.langchain.com/docs/integrations/tools/golden_query", "Golden": "https://python.langchain.com/docs/integrations/providers/golden"}, "ArxivAPIWrapper": {"ArXiv": "https://python.langchain.com/docs/integrations/tools/arxiv"}, "tool": {"Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search", "LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor", "JSONFormer": "https://python.langchain.com/docs/integrations/llms/jsonformer_experimental", "Agent Trajectory": "https://python.langchain.com/docs/guides/evaluation/trajectory/trajectory_eval", "Agents": "https://python.langchain.com/docs/expression_language/cookbook/agent", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times", "Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools", "XML Agent": "https://python.langchain.com/docs/modules/agents/agent_types/xml_agent"}, "OpenAIFunctionsAgent": {"Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search", "LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor", "Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents"}, "SystemMessage": {"Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search", "SQL Chat Message History": "https://python.langchain.com/docs/integrations/memory/sql_chat_message_history", "Anthropic": "https://python.langchain.com/docs/integrations/chat/anthropic", "\ud83d\ude85 LiteLLM": "https://python.langchain.com/docs/integrations/chat/litellm", "Konko": "https://python.langchain.com/docs/integrations/chat/konko", "OpenAI": "https://python.langchain.com/docs/integrations/chat/openai", "Google Cloud Platform Vertex AI PaLM ": "https://python.langchain.com/docs/integrations/chat/google_vertex_ai_palm", "JinaChat": "https://python.langchain.com/docs/integrations/chat/jinachat", "Anyscale": "https://python.langchain.com/docs/integrations/chat/anyscale", "LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor", "Context": "https://python.langchain.com/docs/integrations/callbacks/context", "Label Studio": "https://python.langchain.com/docs/integrations/callbacks/labelstudio", "MLflow AI Gateway": "https://python.langchain.com/docs/integrations/providers/mlflow_ai_gateway", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "CAMEL Role-Playing Autonomous Cooperative Agents": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/camel_role_playing", "Multi-Agent Simulated Environment: Petting Zoo": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/petting_zoo", "Multi-agent decentralized speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_bidding", "Multi-agent authoritarian speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_authoritarian", "Two-Player Dungeons & Dragons": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_player_dnd", "Multi-Player Dungeons & Dragons": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multi_player_dnd", "Simulated Environment: Gymnasium": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/gymnasium", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Memory in LLMChain": "https://python.langchain.com/docs/modules/memory/adding_memory", "Use ToolKits with OpenAI Functions": "https://python.langchain.com/docs/modules/agents/how_to/use_toolkits_with_openai_functions", "Prompt pipelining": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/prompts_pipelining", "Using OpenAI functions": "https://python.langchain.com/docs/modules/chains/how_to/openai_functions"}, "AgentExecutor": {"Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search", "LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor", "Jina": "https://python.langchain.com/docs/integrations/providers/jina", "PowerBI Dataset": "https://python.langchain.com/docs/integrations/toolkits/powerbi", "SQL Database": "https://python.langchain.com/docs/integrations/toolkits/sql_database", "JSON": "https://python.langchain.com/docs/integrations/toolkits/json", "Bittensor": "https://python.langchain.com/docs/integrations/llms/bittensor", "Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents", "Agents": "https://python.langchain.com/docs/expression_language/cookbook/agent", "BabyAGI with Tools": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi_with_agent", "Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Wikibase Agent": "https://python.langchain.com/docs/use_cases/more/agents/agents/wikibase_agent", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql", "Message Memory in Agent backed by a database": "https://python.langchain.com/docs/modules/memory/agent_with_memory_in_db", "Memory in Agent": "https://python.langchain.com/docs/modules/memory/agent_with_memory", "XML Agent": "https://python.langchain.com/docs/modules/agents/agent_types/xml_agent", "Custom MRKL agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_mrkl_agent", "Shared memory across agents and tools": "https://python.langchain.com/docs/modules/agents/how_to/sharedmemory_for_tools", "Custom multi-action agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_multi_action_agent", "Running Agent as an Iterator": "https://python.langchain.com/docs/modules/agents/how_to/agent_iter", "Custom agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent", "Custom agent with tool retrieval": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent_with_tool_retrieval"}, "MetaphorSearchAPIWrapper": {"Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search"}, "PlayWrightBrowserToolkit": {"Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search", "PlayWright Browser": "https://python.langchain.com/docs/integrations/toolkits/playwright"}, "create_async_playwright_browser": {"Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search", "PlayWright Browser": "https://python.langchain.com/docs/integrations/toolkits/playwright"}, "MetaphorSearchResults": {"Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search"}, "SerpAPIWrapper": {"SerpAPI": "https://python.langchain.com/docs/integrations/providers/serpapi", "Bittensor": "https://python.langchain.com/docs/integrations/llms/bittensor", "AutoGPT": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/autogpt"}, "GraphQLAPIWrapper": {"GraphQL": "https://python.langchain.com/docs/integrations/tools/graphql"}, "DuckDuckGoSearchRun": {"DuckDuckGo Search": "https://python.langchain.com/docs/integrations/tools/ddg", "Github": "https://python.langchain.com/docs/integrations/toolkits/github", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times", "Using tools": "https://python.langchain.com/docs/expression_language/cookbook/tools"}, "DuckDuckGoSearchResults": {"DuckDuckGo Search": "https://python.langchain.com/docs/integrations/tools/ddg"}, "DuckDuckGoSearchAPIWrapper": {"DuckDuckGo Search": "https://python.langchain.com/docs/integrations/tools/ddg"}, "ConversationBufferMemory": {"Gradio": "https://python.langchain.com/docs/integrations/tools/gradio_tools", "SceneXplain": "https://python.langchain.com/docs/integrations/tools/sceneXplain", "Xata chat memory": "https://python.langchain.com/docs/integrations/memory/xata_chat_message_history", "Streamlit Chat Message History": "https://python.langchain.com/docs/integrations/memory/streamlit_chat_message_history", "Dynamodb Chat Message History": "https://python.langchain.com/docs/integrations/memory/dynamodb_chat_message_history", "Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat", "Bittensor": "https://python.langchain.com/docs/integrations/llms/bittensor", "Bedrock": "https://python.langchain.com/docs/integrations/llms/bedrock", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Message Memory in Agent backed by a database": "https://python.langchain.com/docs/modules/memory/agent_with_memory_in_db", "Memory in the Multi-Input Chain": "https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs", "Memory in LLMChain": "https://python.langchain.com/docs/modules/memory/adding_memory", "Multiple Memory classes": "https://python.langchain.com/docs/modules/memory/multiple_memory", "Customizing Conversational Memory": "https://python.langchain.com/docs/modules/memory/conversational_customization", "Memory in Agent": "https://python.langchain.com/docs/modules/memory/agent_with_memory", "Shared memory across agents and tools": "https://python.langchain.com/docs/modules/agents/how_to/sharedmemory_for_tools", "Add Memory to OpenAI Functions Agent": "https://python.langchain.com/docs/modules/agents/how_to/add_memory_openai_functions", "First we add a step to load memory": "https://python.langchain.com/docs/expression_language/cookbook/retrieval", "Adding memory": "https://python.langchain.com/docs/expression_language/cookbook/memory"}, "SceneXplainTool": {"SceneXplain": "https://python.langchain.com/docs/integrations/tools/sceneXplain"}, "WolframAlphaAPIWrapper": {"Wolfram Alpha": "https://python.langchain.com/docs/integrations/providers/wolfram_alpha"}, "load_huggingface_tool": {"HuggingFace Hub Tools": "https://python.langchain.com/docs/integrations/tools/huggingface_tools"}, "EdenAiSpeechToTextTool": {"Eden AI": "https://python.langchain.com/docs/integrations/tools/edenai_tools"}, "EdenAiTextToSpeechTool": {"Eden AI": "https://python.langchain.com/docs/integrations/tools/edenai_tools"}, "EdenAiExplicitImageTool": {"Eden AI": "https://python.langchain.com/docs/integrations/tools/edenai_tools"}, "EdenAiObjectDetectionTool": {"Eden AI": "https://python.langchain.com/docs/integrations/tools/edenai_tools"}, "EdenAiParsingIDTool": {"Eden AI": "https://python.langchain.com/docs/integrations/tools/edenai_tools"}, "EdenAiParsingInvoiceTool": {"Eden AI": "https://python.langchain.com/docs/integrations/tools/edenai_tools"}, "EdenAiTextModerationTool": {"Eden AI": "https://python.langchain.com/docs/integrations/tools/edenai_tools"}, "EdenAI": {"Eden AI": "https://python.langchain.com/docs/integrations/llms/edenai"}, "GoogleSearchAPIWrapper": {"Google Search": "https://python.langchain.com/docs/integrations/providers/google_search", "Bittensor": "https://python.langchain.com/docs/integrations/llms/bittensor", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/web_scraping", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research", "Message Memory in Agent backed by a database": "https://python.langchain.com/docs/modules/memory/agent_with_memory_in_db", "Memory in Agent": "https://python.langchain.com/docs/modules/memory/agent_with_memory", "Shared memory across agents and tools": "https://python.langchain.com/docs/modules/agents/how_to/sharedmemory_for_tools"}, "BingSearchAPIWrapper": {"Bing Search": "https://python.langchain.com/docs/integrations/tools/bing_search"}, "DallEAPIWrapper": {"Dall-E Image Generator": "https://python.langchain.com/docs/integrations/tools/dalle_image_generator"}, "ShellTool": {"Shell (bash)": "https://python.langchain.com/docs/integrations/tools/bash", "Human-in-the-loop Tool Validation": "https://python.langchain.com/docs/modules/agents/tools/human_approval"}, "ReadFileTool": {"File System": "https://python.langchain.com/docs/integrations/tools/filesystem", "AutoGPT": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/autogpt", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times"}, "CopyFileTool": {"File System": "https://python.langchain.com/docs/integrations/tools/filesystem"}, "DeleteFileTool": {"File System": "https://python.langchain.com/docs/integrations/tools/filesystem"}, "MoveFileTool": {"File System": "https://python.langchain.com/docs/integrations/tools/filesystem", "Tools as OpenAI Functions": "https://python.langchain.com/docs/modules/agents/tools/tools_as_openai_functions"}, "WriteFileTool": {"File System": "https://python.langchain.com/docs/integrations/tools/filesystem", "AutoGPT": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/autogpt", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times"}, "ListDirectoryTool": {"File System": "https://python.langchain.com/docs/integrations/tools/filesystem"}, "FileManagementToolkit": {"File System": "https://python.langchain.com/docs/integrations/tools/filesystem"}, "BraveSearch": {"Brave Search": "https://python.langchain.com/docs/integrations/providers/brave_search"}, "RedisChatMessageHistory": {"Redis Chat Message History": "https://python.langchain.com/docs/integrations/memory/redis_chat_message_history", "Message Memory in Agent backed by a database": "https://python.langchain.com/docs/modules/memory/agent_with_memory_in_db"}, "ConversationChain": {"Entity Memory with SQLite storage": "https://python.langchain.com/docs/integrations/memory/entity_memory_with_sqlite", "Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma", "Bedrock": "https://python.langchain.com/docs/integrations/llms/bedrock", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Multiple Memory classes": "https://python.langchain.com/docs/modules/memory/multiple_memory", "Customizing Conversational Memory": "https://python.langchain.com/docs/modules/memory/conversational_customization", "Conversation Knowledge Graph": "https://python.langchain.com/docs/modules/memory/types/kg", "Conversation Token Buffer": "https://python.langchain.com/docs/modules/memory/types/token_buffer", "Conversation Summary Buffer": "https://python.langchain.com/docs/modules/memory/types/summary_buffer", "Router": "https://python.langchain.com/docs/modules/chains/foundational/router"}, "ConversationEntityMemory": {"Entity Memory with SQLite storage": "https://python.langchain.com/docs/integrations/memory/entity_memory_with_sqlite"}, "SQLiteEntityStore": {"Entity Memory with SQLite storage": "https://python.langchain.com/docs/integrations/memory/entity_memory_with_sqlite"}, "ENTITY_MEMORY_CONVERSATION_TEMPLATE": {"Entity Memory with SQLite storage": "https://python.langchain.com/docs/integrations/memory/entity_memory_with_sqlite"}, "PostgresChatMessageHistory": {"Postgres Chat Message History": "https://python.langchain.com/docs/integrations/memory/postgres_chat_message_history"}, "MomentoChatMessageHistory": {"Momento Chat Message History": "https://python.langchain.com/docs/integrations/memory/momento_chat_message_history"}, "MongoDBChatMessageHistory": {"Mongodb Chat Message History": "https://python.langchain.com/docs/integrations/memory/mongodb_chat_message_history"}, "XataChatMessageHistory": {"Xata chat memory": "https://python.langchain.com/docs/integrations/memory/xata_chat_message_history"}, "XataVectorStore": {"Xata chat memory": "https://python.langchain.com/docs/integrations/memory/xata_chat_message_history", "Xata": "https://python.langchain.com/docs/integrations/vectorstores/xata"}, "create_retriever_tool": {"Xata chat memory": "https://python.langchain.com/docs/integrations/memory/xata_chat_message_history", "Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql"}, "CassandraChatMessageHistory": {"Cassandra Chat Message History": "https://python.langchain.com/docs/integrations/memory/cassandra_chat_message_history", "Cassandra": "https://python.langchain.com/docs/integrations/providers/cassandra"}, "SQLChatMessageHistory": {"SQL Chat Message History": "https://python.langchain.com/docs/integrations/memory/sql_chat_message_history"}, "BaseMessage": {"SQL Chat Message History": "https://python.langchain.com/docs/integrations/memory/sql_chat_message_history", "CAMEL Role-Playing Autonomous Cooperative Agents": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/camel_role_playing", "Multi-agent decentralized speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_bidding", "Multi-agent authoritarian speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_authoritarian", "Multi-Player Dungeons & Dragons": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multi_player_dnd", "Simulated Environment: Gymnasium": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/gymnasium", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools"}, "BaseMessageConverter": {"SQL Chat Message History": "https://python.langchain.com/docs/integrations/memory/sql_chat_message_history"}, "MotorheadMemory": {"Mot\u00f6rhead Memory": "https://python.langchain.com/docs/integrations/memory/motorhead_memory", "Mot\u00f6rhead Memory (Managed)": "https://python.langchain.com/docs/integrations/memory/motorhead_memory_managed"}, "StreamlitChatMessageHistory": {"Streamlit Chat Message History": "https://python.langchain.com/docs/integrations/memory/streamlit_chat_message_history"}, "DynamoDBChatMessageHistory": {"Dynamodb Chat Message History": "https://python.langchain.com/docs/integrations/memory/dynamodb_chat_message_history"}, "PythonREPL": {"Dynamodb Chat Message History": "https://python.langchain.com/docs/integrations/memory/dynamodb_chat_message_history", "Python": "https://python.langchain.com/docs/integrations/toolkits/python", "Code writing": "https://python.langchain.com/docs/expression_language/cookbook/code_writing"}, "RocksetChatMessageHistory": {"Rockset Chat Message History": "https://python.langchain.com/docs/integrations/memory/rockset_chat_message_history"}, "AzureMLChatOnlineEndpoint": {"AzureML Chat Online Endpoint": "https://python.langchain.com/docs/integrations/chat/azureml_chat_endpoint"}, "LlamaContentFormatter": {"AzureML Chat Online Endpoint": "https://python.langchain.com/docs/integrations/chat/azureml_chat_endpoint"}, "ChatAnthropic": {"Anthropic": "https://python.langchain.com/docs/integrations/chat/anthropic", "Log10": "https://python.langchain.com/docs/integrations/providers/log10", "PlayWright Browser": "https://python.langchain.com/docs/integrations/toolkits/playwright", "Fallbacks": "https://python.langchain.com/docs/guides/fallbacks", "Agent Trajectory": "https://python.langchain.com/docs/guides/evaluation/trajectory/trajectory_eval", "Custom Pairwise Evaluator": "https://python.langchain.com/docs/guides/evaluation/comparison/custom", "Pairwise String Comparison": "https://python.langchain.com/docs/guides/evaluation/comparison/pairwise_string", "Criteria Evaluation": "https://python.langchain.com/docs/guides/evaluation/string/criteria_eval_chain", "XML Agent": "https://python.langchain.com/docs/modules/agents/agent_types/xml_agent", "Few-shot examples for chat models": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/few_shot_examples_chat", "Agents": "https://python.langchain.com/docs/expression_language/cookbook/agent"}, "SystemMessagePromptTemplate": {"Anthropic": "https://python.langchain.com/docs/integrations/chat/anthropic", "\ud83d\ude85 LiteLLM": "https://python.langchain.com/docs/integrations/chat/litellm", "Konko": "https://python.langchain.com/docs/integrations/chat/konko", "OpenAI": "https://python.langchain.com/docs/integrations/chat/openai", "Google Cloud Platform Vertex AI PaLM ": "https://python.langchain.com/docs/integrations/chat/google_vertex_ai_palm", "JinaChat": "https://python.langchain.com/docs/integrations/chat/jinachat", "Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "CAMEL Role-Playing Autonomous Cooperative Agents": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/camel_role_playing", "Code writing": "https://python.langchain.com/docs/expression_language/cookbook/code_writing"}, "AIMessagePromptTemplate": {"Anthropic": "https://python.langchain.com/docs/integrations/chat/anthropic", "\ud83d\ude85 LiteLLM": "https://python.langchain.com/docs/integrations/chat/litellm", "Konko": "https://python.langchain.com/docs/integrations/chat/konko", "OpenAI": "https://python.langchain.com/docs/integrations/chat/openai", "JinaChat": "https://python.langchain.com/docs/integrations/chat/jinachat", "Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma"}, "HumanMessagePromptTemplate": {"Anthropic": "https://python.langchain.com/docs/integrations/chat/anthropic", "\ud83d\ude85 LiteLLM": "https://python.langchain.com/docs/integrations/chat/litellm", "Konko": "https://python.langchain.com/docs/integrations/chat/konko", "OpenAI": "https://python.langchain.com/docs/integrations/chat/openai", "Google Cloud Platform Vertex AI PaLM ": "https://python.langchain.com/docs/integrations/chat/google_vertex_ai_palm", "JinaChat": "https://python.langchain.com/docs/integrations/chat/jinachat", "Context": "https://python.langchain.com/docs/integrations/callbacks/context", "Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma", "Fireworks": "https://python.langchain.com/docs/integrations/llms/fireworks", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/extraction", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "CAMEL Role-Playing Autonomous Cooperative Agents": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/camel_role_playing", "Multi-agent authoritarian speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_authoritarian", "Memory in LLMChain": "https://python.langchain.com/docs/modules/memory/adding_memory", "Retry parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/retry", "Pydantic (JSON) parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/pydantic", "Prompt pipelining": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/prompts_pipelining", "Using OpenAI functions": "https://python.langchain.com/docs/modules/chains/how_to/openai_functions", "Code writing": "https://python.langchain.com/docs/expression_language/cookbook/code_writing"}, "CallbackManager": {"Anthropic": "https://python.langchain.com/docs/integrations/chat/anthropic", "\ud83d\ude85 LiteLLM": "https://python.langchain.com/docs/integrations/chat/litellm", "Ollama": "https://python.langchain.com/docs/integrations/llms/ollama", "Llama.cpp": "https://python.langchain.com/docs/integrations/llms/llamacpp", "Titan Takeoff": "https://python.langchain.com/docs/integrations/llms/titan_takeoff", "Run LLMs locally": "https://python.langchain.com/docs/guides/local_llms", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding", "Use local LLMs": "https://python.langchain.com/docs/use_cases/question_answering/how_to/local_retrieval_qa", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research"}, "StreamingStdOutCallbackHandler": {"Anthropic": "https://python.langchain.com/docs/integrations/chat/anthropic", "\ud83d\ude85 LiteLLM": "https://python.langchain.com/docs/integrations/chat/litellm", "Ollama": "https://python.langchain.com/docs/integrations/llms/ollama", "GPT4All": "https://python.langchain.com/docs/integrations/llms/gpt4all", "Arthur": "https://python.langchain.com/docs/integrations/providers/arthur_tracking", "Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat", "TextGen": "https://python.langchain.com/docs/integrations/llms/textgen", "Llama.cpp": "https://python.langchain.com/docs/integrations/llms/llamacpp", "Titan Takeoff": "https://python.langchain.com/docs/integrations/llms/titan_takeoff", "Eden AI": "https://python.langchain.com/docs/integrations/llms/edenai", "C Transformers": "https://python.langchain.com/docs/integrations/llms/ctransformers", "Huggingface TextGen Inference": "https://python.langchain.com/docs/integrations/llms/huggingface_textgen_inference", "Replicate": "https://python.langchain.com/docs/integrations/llms/replicate", "Run LLMs locally": "https://python.langchain.com/docs/guides/local_llms", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding", "Use local LLMs": "https://python.langchain.com/docs/use_cases/question_answering/how_to/local_retrieval_qa", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research"}, "ChatLiteLLM": {"\ud83d\ude85 LiteLLM": "https://python.langchain.com/docs/integrations/chat/litellm"}, "create_tagging_chain": {"Llama API": "https://python.langchain.com/docs/integrations/chat/llama_api", "Anthropic Functions": "https://python.langchain.com/docs/integrations/chat/anthropic_functions", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/tagging"}, "ChatKonko": {"Konko": "https://python.langchain.com/docs/integrations/chat/konko"}, "ChatVertexAI": {"Google Cloud Platform Vertex AI PaLM ": "https://python.langchain.com/docs/integrations/chat/google_vertex_ai_palm"}, "BedrockChat": {"Bedrock Chat": "https://python.langchain.com/docs/integrations/chat/bedrock"}, "JinaChat": {"JinaChat": "https://python.langchain.com/docs/integrations/chat/jinachat"}, "ChatOllama": {"Ollama": "https://python.langchain.com/docs/integrations/chat/ollama"}, "LLMResult": {"Ollama": "https://python.langchain.com/docs/integrations/llms/ollama", "Async callbacks": "https://python.langchain.com/docs/modules/callbacks/async_callbacks"}, "BaseCallbackHandler": {"Ollama": "https://python.langchain.com/docs/integrations/llms/ollama", "Custom callback handlers": "https://python.langchain.com/docs/modules/callbacks/custom_callbacks", "Multiple callback handlers": "https://python.langchain.com/docs/modules/callbacks/multiple_callbacks", "Async callbacks": "https://python.langchain.com/docs/modules/callbacks/async_callbacks", "Streaming final agent output": "https://python.langchain.com/docs/modules/agents/how_to/streaming_stdout_final_only"}, "AzureChatOpenAI": {"Azure": "https://python.langchain.com/docs/integrations/chat/azure_chat_openai", "Azure OpenAI": "https://python.langchain.com/docs/integrations/providers/azure_openai"}, "get_openai_callback": {"Azure": "https://python.langchain.com/docs/integrations/chat/azure_chat_openai", "Token counting": "https://python.langchain.com/docs/modules/callbacks/token_counting", "Tracking token usage": "https://python.langchain.com/docs/modules/model_io/models/llms/token_usage_tracking", "Run arbitrary functions": "https://python.langchain.com/docs/expression_language/how_to/functions"}, "QianfanChatEndpoint": {"Baidu Qianfan": "https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint"}, "ErnieBotChat": {"ERNIE-Bot Chat": "https://python.langchain.com/docs/integrations/chat/ernie"}, "PromptLayerChatOpenAI": {"PromptLayer ChatOpenAI": "https://python.langchain.com/docs/integrations/chat/promptlayer_chatopenai"}, "ChatAnyscale": {"Anyscale": "https://python.langchain.com/docs/integrations/chat/anyscale"}, "create_extraction_chain": {"Anthropic Functions": "https://python.langchain.com/docs/integrations/chat/anthropic_functions", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/extraction"}, "DeepEvalCallbackHandler": {"Confident": "https://python.langchain.com/docs/integrations/callbacks/confident"}, "CharacterTextSplitter": {"Confident": "https://python.langchain.com/docs/integrations/callbacks/confident", "Hugging Face": "https://python.langchain.com/docs/integrations/providers/huggingface", "OpenAI": "https://python.langchain.com/docs/integrations/providers/openai", "Elasticsearch": "https://python.langchain.com/docs/integrations/vectorstores/elasticsearch", "Vectara Text Generation": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_text_generation", "Document Comparison": "https://python.langchain.com/docs/integrations/toolkits/document_comparison_toolkit", "Vectorstore": "https://python.langchain.com/docs/integrations/toolkits/vectorstore", "LanceDB": "https://python.langchain.com/docs/integrations/vectorstores/lancedb", "sqlite-vss": "https://python.langchain.com/docs/integrations/vectorstores/sqlitevss", "Weaviate": "https://python.langchain.com/docs/integrations/vectorstores/weaviate", "DashVector": "https://python.langchain.com/docs/integrations/vectorstores/dashvector", "ScaNN": "https://python.langchain.com/docs/integrations/vectorstores/scann", "Xata": "https://python.langchain.com/docs/integrations/vectorstores/xata", "Vectara": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/vectara_self_query", "PGVector": "https://python.langchain.com/docs/integrations/vectorstores/pgvector", "Rockset": "https://python.langchain.com/docs/integrations/vectorstores/rockset", "DingoDB": "https://python.langchain.com/docs/integrations/vectorstores/dingo", "Zilliz": "https://python.langchain.com/docs/integrations/vectorstores/zilliz", "SingleStoreDB": "https://python.langchain.com/docs/integrations/vectorstores/singlestoredb", "Annoy": "https://python.langchain.com/docs/integrations/vectorstores/annoy", "Typesense": "https://python.langchain.com/docs/integrations/vectorstores/typesense", "Activeloop Deep Lake": "https://python.langchain.com/docs/integrations/vectorstores/activeloop_deeplake", "Neo4j Vector Index": "https://python.langchain.com/docs/integrations/vectorstores/neo4jvector", "Tair": "https://python.langchain.com/docs/integrations/vectorstores/tair", "Chroma": "https://python.langchain.com/docs/integrations/vectorstores/chroma", "Alibaba Cloud OpenSearch": "https://python.langchain.com/docs/integrations/vectorstores/alibabacloud_opensearch", "Baidu Cloud VectorSearch": "https://python.langchain.com/docs/integrations/vectorstores/baiducloud_vector_search", "StarRocks": "https://python.langchain.com/docs/integrations/vectorstores/starrocks", "scikit-learn": "https://python.langchain.com/docs/integrations/vectorstores/sklearn", "Tencent Cloud VectorDB": "https://python.langchain.com/docs/integrations/vectorstores/tencentvectordb", "DocArray HnswSearch": "https://python.langchain.com/docs/integrations/vectorstores/docarray_hnsw", "MyScale": "https://python.langchain.com/docs/integrations/vectorstores/myscale", "ClickHouse": "https://python.langchain.com/docs/integrations/vectorstores/clickhouse", "Qdrant": "https://python.langchain.com/docs/integrations/vectorstores/qdrant", "Tigris": "https://python.langchain.com/docs/integrations/vectorstores/tigris", "AwaDB": "https://python.langchain.com/docs/integrations/vectorstores/awadb", "Supabase (Postgres)": "https://python.langchain.com/docs/integrations/vectorstores/supabase", "OpenSearch": "https://python.langchain.com/docs/integrations/vectorstores/opensearch", "Pinecone": "https://python.langchain.com/docs/integrations/vectorstores/pinecone", "BagelDB": "https://python.langchain.com/docs/integrations/vectorstores/bageldb", "Azure Cognitive Search": "https://python.langchain.com/docs/integrations/vectorstores/azuresearch", "Cassandra": "https://python.langchain.com/docs/integrations/vectorstores/cassandra", "USearch": "https://python.langchain.com/docs/integrations/vectorstores/usearch", "Milvus": "https://python.langchain.com/docs/integrations/vectorstores/milvus", "Marqo": "https://python.langchain.com/docs/integrations/vectorstores/marqo", "DocArray InMemorySearch": "https://python.langchain.com/docs/integrations/vectorstores/docarray_in_memory", "Postgres Embedding": "https://python.langchain.com/docs/integrations/vectorstores/pgembedding", "Faiss": "https://python.langchain.com/docs/integrations/vectorstores/faiss", "Epsilla": "https://python.langchain.com/docs/integrations/vectorstores/epsilla", "AnalyticDB": "https://python.langchain.com/docs/integrations/vectorstores/analyticdb", "Hologres": "https://python.langchain.com/docs/integrations/vectorstores/hologres", "MongoDB Atlas": "https://python.langchain.com/docs/integrations/vectorstores/mongodb_atlas", "Meilisearch": "https://python.langchain.com/docs/integrations/vectorstores/meilisearch", "Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma", "Psychic": "https://python.langchain.com/docs/integrations/document_loaders/psychic", "Manifest": "https://python.langchain.com/docs/integrations/llms/manifest", "LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/summarization", "Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents", "Retrieve from vector stores directly": "https://python.langchain.com/docs/use_cases/question_answering/how_to/vector_db_text_generation", "Improve document indexing with HyDE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/hyde", "Analysis of Twitter the-algorithm source code with LangChain, GPT4 and Activeloop's Deep Lake": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/twitter-the-algorithm-analysis-deeplake", "Use LangChain, GPT and Activeloop's Deep Lake to work with code base": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/code-analysis-deeplake", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "QA using Activeloop's DeepLake": "https://python.langchain.com/docs/use_cases/question_answering/integrations/semantic-search-over-chat", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Indexing": "https://python.langchain.com/docs/modules/data_connection/indexing", "Caching": "https://python.langchain.com/docs/modules/data_connection/text_embedding/caching_embeddings", "Split by tokens ": "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/split_by_token", "Memory in the Multi-Input Chain": "https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore", "Loading from LangChainHub": "https://python.langchain.com/docs/modules/chains/how_to/from_hub"}, "LLMonitorCallbackHandler": {"LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor"}, "ContextCallbackHandler": {"Context": "https://python.langchain.com/docs/integrations/callbacks/context"}, "LabelStudioCallbackHandler": {"Label Studio": "https://python.langchain.com/docs/integrations/callbacks/labelstudio"}, "ArgillaCallbackHandler": {"Argilla": "https://python.langchain.com/docs/integrations/providers/argilla"}, "StdOutCallbackHandler": {"Argilla": "https://python.langchain.com/docs/integrations/callbacks/argilla", "Comet": "https://python.langchain.com/docs/integrations/providers/comet_tracking", "Aim": "https://python.langchain.com/docs/integrations/providers/aim_tracking", "Weights & Biases": "https://python.langchain.com/docs/integrations/providers/wandb_tracking", "ClearML": "https://python.langchain.com/docs/integrations/providers/clearml_tracking", "OpaquePrompts": "https://python.langchain.com/docs/integrations/llms/opaqueprompts", "Vector SQL Retriever with MyScale": "https://python.langchain.com/docs/use_cases/qa_structured/integrations/myscale_vector_sql", "Async API": "https://python.langchain.com/docs/modules/agents/how_to/async_agent", "Custom chain": "https://python.langchain.com/docs/modules/chains/how_to/custom_chain"}, "PromptLayerCallbackHandler": {"PromptLayer": "https://python.langchain.com/docs/integrations/callbacks/promptlayer"}, "GPT4All": {"PromptLayer": "https://python.langchain.com/docs/integrations/callbacks/promptlayer", "GPT4All": "https://python.langchain.com/docs/integrations/llms/gpt4all", "Run LLMs locally": "https://python.langchain.com/docs/guides/local_llms", "Use local LLMs": "https://python.langchain.com/docs/use_cases/question_answering/how_to/local_retrieval_qa"}, "StreamlitCallbackHandler": {"Streamlit": "https://python.langchain.com/docs/integrations/callbacks/.ipynb_checkpoints/streamlit-checkpoint", "GPT4All": "https://python.langchain.com/docs/integrations/providers/gpt4all"}, "InfinoCallbackHandler": {"Infino": "https://python.langchain.com/docs/integrations/providers/infino"}, "FigmaFileLoader": {"Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma"}, "AzureOpenAI": {"Azure OpenAI": "https://python.langchain.com/docs/integrations/llms/azure_openai", "OpenAI": "https://python.langchain.com/docs/integrations/providers/openai"}, "MyScale": {"MyScale": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/myscale_self_query"}, "Baseten": {"Baseten": "https://python.langchain.com/docs/integrations/llms/baseten"}, "WeatherDataLoader": {"Weather": "https://python.langchain.com/docs/integrations/document_loaders/weather"}, "Tair": {"Tair": "https://python.langchain.com/docs/integrations/vectorstores/tair"}, "UnstructuredWordDocumentLoader": {"Microsoft Word": "https://python.langchain.com/docs/integrations/document_loaders/microsoft_word"}, "CollegeConfidentialLoader": {"College Confidential": "https://python.langchain.com/docs/integrations/document_loaders/college_confidential"}, "RWKV": {"RWKV-4": "https://python.langchain.com/docs/integrations/providers/rwkv"}, "GoogleDriveLoader": {"Google Drive": "https://python.langchain.com/docs/integrations/document_loaders/google_drive"}, "Fireworks": {"Fireworks": "https://python.langchain.com/docs/integrations/llms/fireworks"}, "DeepLake": {"Activeloop Deep Lake": "https://python.langchain.com/docs/integrations/vectorstores/activeloop_deeplake", "Analysis of Twitter the-algorithm source code with LangChain, GPT4 and Activeloop's Deep Lake": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/twitter-the-algorithm-analysis-deeplake", "Use LangChain, GPT and Activeloop's Deep Lake to work with code base": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/code-analysis-deeplake", "QA using Activeloop's DeepLake": "https://python.langchain.com/docs/use_cases/question_answering/integrations/semantic-search-over-chat", "Deep Lake": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/activeloop_deeplake_self_query"}, "AmazonAPIGateway": {"Amazon API Gateway": "https://python.langchain.com/docs/integrations/llms/amazon_api_gateway"}, "UnstructuredPowerPointLoader": {"Microsoft PowerPoint": "https://python.langchain.com/docs/integrations/document_loaders/microsoft_powerpoint"}, "CometCallbackHandler": {"Comet": "https://python.langchain.com/docs/integrations/providers/comet_tracking"}, "CTransformers": {"C Transformers": "https://python.langchain.com/docs/integrations/llms/ctransformers"}, "BiliBiliLoader": {"BiliBili": "https://python.langchain.com/docs/integrations/document_loaders/bilibili"}, "MongoDBAtlasVectorSearch": {"MongoDB Atlas": "https://python.langchain.com/docs/integrations/vectorstores/mongodb_atlas"}, "SupabaseVectorStore": {"Supabase (Postgres)": "https://python.langchain.com/docs/integrations/vectorstores/supabase", "Supabase": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/supabase_self_query"}, "DiffbotLoader": {"Diffbot": "https://python.langchain.com/docs/integrations/document_loaders/diffbot"}, "DeepSparse": {"DeepSparse": "https://python.langchain.com/docs/integrations/llms/deepsparse"}, "AimCallbackHandler": {"Aim": "https://python.langchain.com/docs/integrations/providers/aim_tracking"}, "ModernTreasuryLoader": {"Modern Treasury": "https://python.langchain.com/docs/integrations/document_loaders/modern_treasury"}, "FacebookChatLoader": {"Facebook Chat": "https://python.langchain.com/docs/integrations/document_loaders/facebook_chat"}, "Banana": {"Banana": "https://python.langchain.com/docs/integrations/llms/banana"}, "HuggingFacePipeline": {"Hugging Face": "https://python.langchain.com/docs/integrations/providers/huggingface", "Hugging Face Local Pipelines": "https://python.langchain.com/docs/integrations/llms/huggingface_pipelines", "RELLM": "https://python.langchain.com/docs/integrations/llms/rellm_experimental", "JSONFormer": "https://python.langchain.com/docs/integrations/llms/jsonformer_experimental"}, "HuggingFaceHub": {"Hugging Face": "https://python.langchain.com/docs/integrations/providers/huggingface"}, "HuggingFaceHubEmbeddings": {"Hugging Face": "https://python.langchain.com/docs/integrations/providers/huggingface"}, "DocugamiLoader": {"Docugami": "https://python.langchain.com/docs/integrations/document_loaders/docugami"}, "GutenbergLoader": {"Gutenberg": "https://python.langchain.com/docs/integrations/document_loaders/gutenberg"}, "AzureBlobStorageContainerLoader": {"Azure Blob Storage": "https://python.langchain.com/docs/integrations/providers/azure_blob_storage", "Azure Blob Storage Container": "https://python.langchain.com/docs/integrations/document_loaders/azure_blob_storage_container"}, "AzureBlobStorageFileLoader": {"Azure Blob Storage": "https://python.langchain.com/docs/integrations/providers/azure_blob_storage", "Azure Blob Storage File": "https://python.langchain.com/docs/integrations/document_loaders/azure_blob_storage_file"}, "WikipediaLoader": {"Wikipedia": "https://python.langchain.com/docs/integrations/document_loaders/wikipedia", "Diffbot Graph Transformer": "https://python.langchain.com/docs/use_cases/more/graph/diffbot_graphtransformer"}, "ConfluenceLoader": {"Confluence": "https://python.langchain.com/docs/integrations/document_loaders/confluence"}, "Predibase": {"Predibase": "https://python.langchain.com/docs/integrations/llms/predibase"}, "Beam": {"Beam": "https://python.langchain.com/docs/integrations/llms/beam"}, "GrobidParser": {"Grobid": "https://python.langchain.com/docs/integrations/document_loaders/grobid"}, "GenericLoader": {"Grobid": "https://python.langchain.com/docs/integrations/document_loaders/grobid", "Loading documents from a YouTube url": "https://python.langchain.com/docs/integrations/document_loaders/youtube_audio", "Source Code": "https://python.langchain.com/docs/integrations/document_loaders/source_code", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding"}, "Typesense": {"Typesense": "https://python.langchain.com/docs/integrations/vectorstores/typesense"}, "Hologres": {"Hologres": "https://python.langchain.com/docs/integrations/vectorstores/hologres"}, "AI21": {"AI21 Labs": "https://python.langchain.com/docs/integrations/providers/ai21", "AI21": "https://python.langchain.com/docs/integrations/llms/ai21"}, "WandbCallbackHandler": {"Weights & Biases": "https://python.langchain.com/docs/integrations/providers/wandb_tracking"}, "ObsidianLoader": {"Obsidian": "https://python.langchain.com/docs/integrations/document_loaders/obsidian"}, "create_sql_agent": {"CnosDB": "https://python.langchain.com/docs/integrations/providers/cnosdb", "SQL Database": "https://python.langchain.com/docs/integrations/toolkits/sql_database", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql"}, "SQLDatabaseToolkit": {"CnosDB": "https://python.langchain.com/docs/integrations/providers/cnosdb", "SQL Database": "https://python.langchain.com/docs/integrations/toolkits/sql_database", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql", "Use ToolKits with OpenAI Functions": "https://python.langchain.com/docs/modules/agents/how_to/use_toolkits_with_openai_functions"}, "SageMakerCallbackHandler": {"SageMaker Tracking": "https://python.langchain.com/docs/integrations/providers/sagemaker_tracking"}, "OpenAIModerationChain": {"OpenAI": "https://python.langchain.com/docs/integrations/providers/openai", "Adding moderation": "https://python.langchain.com/docs/expression_language/cookbook/moderation"}, "ChatGPTLoader": {"OpenAI": "https://python.langchain.com/docs/integrations/providers/openai", "ChatGPT Data": "https://python.langchain.com/docs/integrations/document_loaders/chatgpt_loader"}, "Nebula": {"Nebula": "https://python.langchain.com/docs/integrations/providers/symblai_nebula", "Nebula (Symbl.ai)": "https://python.langchain.com/docs/integrations/llms/symblai_nebula"}, "AZLyricsLoader": {"AZLyrics": "https://python.langchain.com/docs/integrations/document_loaders/azlyrics"}, "ToMarkdownLoader": {"2Markdown": "https://python.langchain.com/docs/integrations/document_loaders/tomarkdown"}, "DingoDB": {"DingoDB": "https://python.langchain.com/docs/integrations/vectorstores/dingo"}, "GitLoader": {"Git": "https://python.langchain.com/docs/integrations/document_loaders/git"}, "MlflowAIGateway": {"MLflow AI Gateway": "https://python.langchain.com/docs/integrations/providers/mlflow_ai_gateway"}, "MlflowAIGatewayEmbeddings": {"MLflow AI Gateway": "https://python.langchain.com/docs/integrations/providers/mlflow_ai_gateway"}, "ChatMLflowAIGateway": {"MLflow AI Gateway": "https://python.langchain.com/docs/integrations/providers/mlflow_ai_gateway"}, "SingleStoreDB": {"SingleStoreDB": "https://python.langchain.com/docs/integrations/vectorstores/singlestoredb"}, "Tigris": {"Tigris": "https://python.langchain.com/docs/integrations/vectorstores/tigris"}, "Bedrock": {"Bedrock": "https://python.langchain.com/docs/integrations/llms/bedrock"}, "Meilisearch": {"Meilisearch": "https://python.langchain.com/docs/integrations/vectorstores/meilisearch"}, "S3DirectoryLoader": {"AWS S3 Directory": "https://python.langchain.com/docs/integrations/document_loaders/aws_s3_directory"}, "S3FileLoader": {"AWS S3 Directory": "https://python.langchain.com/docs/integrations/providers/aws_s3", "AWS S3 File": "https://python.langchain.com/docs/integrations/document_loaders/aws_s3_file"}, "SQLDatabase": {"Rebuff": "https://python.langchain.com/docs/integrations/providers/rebuff", "SQL Database": "https://python.langchain.com/docs/integrations/toolkits/sql_database", "Multiple Retrieval Sources": "https://python.langchain.com/docs/use_cases/question_answering/how_to/multiple_retrieval", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "Vector SQL Retriever with MyScale": "https://python.langchain.com/docs/use_cases/qa_structured/integrations/myscale_vector_sql", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql", "sql_db.md": "https://python.langchain.com/docs/expression_language/cookbook/sql_db"}, "Weaviate": {"Weaviate": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/weaviate_self_query"}, "Clickhouse": {"ClickHouse": "https://python.langchain.com/docs/integrations/vectorstores/clickhouse"}, "ClickhouseSettings": {"ClickHouse": "https://python.langchain.com/docs/integrations/vectorstores/clickhouse"}, "AirbyteJSONLoader": {"Airbyte": "https://python.langchain.com/docs/integrations/providers/airbyte", "Airbyte JSON": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_json"}, "TelegramChatFileLoader": {"Telegram": "https://python.langchain.com/docs/integrations/document_loaders/telegram"}, "TelegramChatApiLoader": {"Telegram": "https://python.langchain.com/docs/integrations/document_loaders/telegram"}, "PredictionGuard": {"Prediction Guard": "https://python.langchain.com/docs/integrations/llms/predictionguard"}, "ScaNN": {"ScaNN": "https://python.langchain.com/docs/integrations/vectorstores/scann"}, "NotionDirectoryLoader": {"Notion DB": "https://python.langchain.com/docs/integrations/providers/notion", "Notion DB 1/2": "https://python.langchain.com/docs/integrations/document_loaders/notion", "Perform context-aware text splitting": "https://python.langchain.com/docs/use_cases/question_answering/how_to/document-context-aware-QA"}, "NotionDBLoader": {"Notion DB": "https://python.langchain.com/docs/integrations/providers/notion", "Notion DB 2/2": "https://python.langchain.com/docs/integrations/document_loaders/notiondb"}, "MWDumpLoader": {"MediaWikiDump": "https://python.langchain.com/docs/integrations/document_loaders/mediawikidump"}, "BraveSearchLoader": {"Brave Search": "https://python.langchain.com/docs/integrations/document_loaders/brave_search"}, "StarRocks": {"StarRocks": "https://python.langchain.com/docs/integrations/vectorstores/starrocks"}, "ElasticsearchStore": {"Elasticsearch": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/elasticsearch_self_query", "Indexing": "https://python.langchain.com/docs/modules/data_connection/indexing"}, "DatadogLogsLoader": {"Datadog Logs": "https://python.langchain.com/docs/integrations/document_loaders/datadog_logs"}, "ApifyDatasetLoader": {"Apify": "https://python.langchain.com/docs/integrations/providers/apify", "Apify Dataset": "https://python.langchain.com/docs/integrations/document_loaders/apify_dataset"}, "NLPCloud": {"NLPCloud": "https://python.langchain.com/docs/integrations/providers/nlpcloud", "NLP Cloud": "https://python.langchain.com/docs/integrations/llms/nlpcloud"}, "Milvus": {"Milvus": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/milvus_self_query", "Zilliz": "https://python.langchain.com/docs/integrations/vectorstores/zilliz"}, "Qdrant": {"Qdrant": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/qdrant_self_query"}, "GitbookLoader": {"GitBook": "https://python.langchain.com/docs/integrations/document_loaders/gitbook"}, "OpenSearchVectorSearch": {"OpenSearch": "https://python.langchain.com/docs/integrations/vectorstores/opensearch"}, "Pinecone": {"Pinecone": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/pinecone"}, "Rockset": {"Rockset": "https://python.langchain.com/docs/integrations/vectorstores/rockset"}, "RocksetLoader": {"Rockset": "https://python.langchain.com/docs/integrations/document_loaders/rockset"}, "Minimax": {"Minimax": "https://python.langchain.com/docs/integrations/llms/minimax"}, "UnstructuredFileLoader": {"Unstructured": "https://python.langchain.com/docs/integrations/providers/unstructured", "Unstructured File": "https://python.langchain.com/docs/integrations/document_loaders/unstructured_file"}, "SelfHostedPipeline": {"Runhouse": "https://python.langchain.com/docs/integrations/llms/runhouse"}, "SelfHostedHuggingFaceLLM": {"Runhouse": "https://python.langchain.com/docs/integrations/llms/runhouse"}, "MlflowCallbackHandler": {"MLflow": "https://python.langchain.com/docs/integrations/providers/mlflow_tracking"}, "SpreedlyLoader": {"Spreedly": "https://python.langchain.com/docs/integrations/document_loaders/spreedly"}, "OpenLLM": {"OpenLLM": "https://python.langchain.com/docs/integrations/llms/openllm"}, "PubMedLoader": {"PubMed": "https://python.langchain.com/docs/integrations/document_loaders/pubmed"}, "SearxSearchResults": {"SearxNG Search API": "https://python.langchain.com/docs/integrations/providers/searx"}, "SpacyTextSplitter": {"spaCy": "https://python.langchain.com/docs/integrations/providers/spacy", "Atlas": "https://python.langchain.com/docs/integrations/vectorstores/atlas", "Split by tokens ": "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/split_by_token"}, "Modal": {"Modal": "https://python.langchain.com/docs/integrations/llms/modal"}, "PGEmbedding": {"Postgres Embedding": "https://python.langchain.com/docs/integrations/vectorstores/pgembedding"}, "Xinference": {"Xorbits Inference (Xinference)": "https://python.langchain.com/docs/integrations/llms/xinference"}, "IFixitLoader": {"iFixit": "https://python.langchain.com/docs/integrations/document_loaders/ifixit"}, "AlephAlpha": {"Aleph Alpha": "https://python.langchain.com/docs/integrations/llms/aleph_alpha"}, "PipelineAI": {"PipelineAI": "https://python.langchain.com/docs/integrations/llms/pipelineai"}, "Epsilla": {"Epsilla": "https://python.langchain.com/docs/integrations/vectorstores/epsilla"}, "LlamaCpp": {"Llama.cpp": "https://python.langchain.com/docs/integrations/llms/llamacpp", "Run LLMs locally": "https://python.langchain.com/docs/guides/local_llms", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding", "Use local LLMs": "https://python.langchain.com/docs/use_cases/question_answering/how_to/local_retrieval_qa", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research"}, "AwaDB": {"AwaDB": "https://python.langchain.com/docs/integrations/vectorstores/awadb"}, "ArxivLoader": {"Arxiv": "https://python.langchain.com/docs/integrations/document_loaders/arxiv"}, "Anyscale": {"Anyscale": "https://python.langchain.com/docs/integrations/llms/anyscale"}, "AINetworkToolkit": {"AINetwork": "https://python.langchain.com/docs/integrations/toolkits/ainetwork"}, "StripeLoader": {"Stripe": "https://python.langchain.com/docs/integrations/document_loaders/stripe"}, "Bagel": {"BagelDB": "https://python.langchain.com/docs/integrations/vectorstores/bageldb"}, "BlackboardLoader": {"Blackboard": "https://python.langchain.com/docs/integrations/document_loaders/blackboard"}, "LanceDB": {"LanceDB": "https://python.langchain.com/docs/integrations/vectorstores/lancedb"}, "OneDriveLoader": {"Microsoft OneDrive": "https://python.langchain.com/docs/integrations/document_loaders/microsoft_onedrive"}, "AnalyticDB": {"AnalyticDB": "https://python.langchain.com/docs/integrations/vectorstores/analyticdb"}, "YoutubeLoader": {"YouTube": "https://python.langchain.com/docs/integrations/providers/youtube", "YouTube transcripts": "https://python.langchain.com/docs/integrations/document_loaders/youtube_transcript"}, "GoogleApiYoutubeLoader": {"YouTube": "https://python.langchain.com/docs/integrations/providers/youtube", "YouTube transcripts": "https://python.langchain.com/docs/integrations/document_loaders/youtube_transcript"}, "PromptLayerOpenAI": {"PromptLayer": "https://python.langchain.com/docs/integrations/providers/promptlayer", "PromptLayer OpenAI": "https://python.langchain.com/docs/integrations/llms/promptlayer_openai"}, "USearch": {"USearch": "https://python.langchain.com/docs/integrations/vectorstores/usearch"}, "WhyLabsCallbackHandler": {"WhyLabs": "https://python.langchain.com/docs/integrations/providers/whylabs_profiling"}, "FlyteCallbackHandler": {"Flyte": "https://python.langchain.com/docs/integrations/providers/flyte"}, "wandb_tracing_enabled": {"WandB Tracing": "https://python.langchain.com/docs/integrations/providers/wandb_tracing"}, "ManifestWrapper": {"Hazy Research": "https://python.langchain.com/docs/integrations/providers/hazy_research", "Manifest": "https://python.langchain.com/docs/integrations/llms/manifest"}, "Marqo": {"Marqo": "https://python.langchain.com/docs/integrations/vectorstores/marqo"}, "IMSDbLoader": {"IMSDb": "https://python.langchain.com/docs/integrations/document_loaders/imsdb"}, "PGVector": {"PGVector": "https://python.langchain.com/docs/integrations/vectorstores/pgvector"}, "DeepInfra": {"DeepInfra": "https://python.langchain.com/docs/integrations/llms/deepinfra"}, "ZeroShotAgent": {"Jina": "https://python.langchain.com/docs/integrations/providers/jina", "Bittensor": "https://python.langchain.com/docs/integrations/llms/bittensor", "BabyAGI with Tools": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi_with_agent", "Message Memory in Agent backed by a database": "https://python.langchain.com/docs/modules/memory/agent_with_memory_in_db", "Memory in Agent": "https://python.langchain.com/docs/modules/memory/agent_with_memory", "Custom MRKL agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_mrkl_agent", "Shared memory across agents and tools": "https://python.langchain.com/docs/modules/agents/how_to/sharedmemory_for_tools"}, "RedditPostsLoader": {"Reddit": "https://python.langchain.com/docs/integrations/document_loaders/reddit"}, "TrelloLoader": {"Trello": "https://python.langchain.com/docs/integrations/document_loaders/trello"}, "SKLearnVectorStore": {"scikit-learn": "https://python.langchain.com/docs/integrations/vectorstores/sklearn"}, "EverNoteLoader": {"EverNote": "https://python.langchain.com/docs/integrations/document_loaders/evernote"}, "TwitterTweetLoader": {"Twitter": "https://python.langchain.com/docs/integrations/document_loaders/twitter"}, "DiscordChatLoader": {"Discord": "https://python.langchain.com/docs/integrations/document_loaders/discord"}, "RedisCache": {"Redis": "https://python.langchain.com/docs/integrations/providers/redis", "LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching"}, "RedisSemanticCache": {"Redis": "https://python.langchain.com/docs/integrations/providers/redis", "LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching"}, "Redis": {"Redis": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/redis_self_query"}, "SelfQueryRetriever": {"Chroma": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/chroma_self_query", "Vectara": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/vectara_self_query", "Docugami": "https://python.langchain.com/docs/integrations/document_loaders/docugami", "Perform context-aware text splitting": "https://python.langchain.com/docs/use_cases/question_answering/how_to/document-context-aware-QA", "Milvus": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/milvus_self_query", "Weaviate": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/weaviate_self_query", "DashVector": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/dashvector", "Elasticsearch": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/elasticsearch_self_query", "Pinecone": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/pinecone", "Supabase": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/supabase_self_query", "Redis": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/redis_self_query", "MyScale": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/myscale_self_query", "Deep Lake": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/activeloop_deeplake_self_query", "Qdrant": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/qdrant_self_query"}, "MatchingEngine": {"Google Vertex AI MatchingEngine": "https://python.langchain.com/docs/integrations/vectorstores/matchingengine"}, "ClearMLCallbackHandler": {"ClearML": "https://python.langchain.com/docs/integrations/providers/clearml_tracking"}, "Cohere": {"Cohere": "https://python.langchain.com/docs/integrations/llms/cohere"}, "SlackDirectoryLoader": {"Slack": "https://python.langchain.com/docs/integrations/document_loaders/slack"}, "LLMContentHandler": {"SageMaker Endpoint": "https://python.langchain.com/docs/integrations/providers/sagemaker_endpoint", "SageMakerEndpoint": "https://python.langchain.com/docs/integrations/llms/sagemaker", "Amazon Comprehend Moderation Chain": "https://python.langchain.com/docs/guides/safety/amazon_comprehend_chain"}, "ContentHandlerBase": {"SageMaker Endpoint": "https://python.langchain.com/docs/integrations/providers/sagemaker_endpoint"}, "HNLoader": {"Hacker News": "https://python.langchain.com/docs/integrations/document_loaders/hacker_news"}, "Annoy": {"Annoy": "https://python.langchain.com/docs/integrations/vectorstores/annoy"}, "DashVector": {"DashVector": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/dashvector"}, "Cassandra": {"Cassandra": "https://python.langchain.com/docs/integrations/vectorstores/cassandra"}, "TencentVectorDB": {"TencentVectorDB": "https://python.langchain.com/docs/integrations/providers/tencentvectordb", "Tencent Cloud VectorDB": "https://python.langchain.com/docs/integrations/vectorstores/tencentvectordb"}, "Vearch": {"Vearch": "https://python.langchain.com/docs/integrations/providers/vearch"}, "GCSDirectoryLoader": {"Google Cloud Storage": "https://python.langchain.com/docs/integrations/providers/google_cloud_storage", "Google Cloud Storage Directory": "https://python.langchain.com/docs/integrations/document_loaders/google_cloud_storage_directory"}, "GCSFileLoader": {"Google Cloud Storage": "https://python.langchain.com/docs/integrations/providers/google_cloud_storage", "Google Cloud Storage File": "https://python.langchain.com/docs/integrations/document_loaders/google_cloud_storage_file"}, "ArthurCallbackHandler": {"Arthur": "https://python.langchain.com/docs/integrations/providers/arthur_tracking"}, "DuckDBLoader": {"DuckDB": "https://python.langchain.com/docs/integrations/document_loaders/duckdb"}, "Petals": {"Petals": "https://python.langchain.com/docs/integrations/llms/petals"}, "MomentoCache": {"Momento": "https://python.langchain.com/docs/integrations/providers/momento", "LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching"}, "NIBittensorLLM": {"NIBittensor": "https://python.langchain.com/docs/integrations/providers/bittensor", "Bittensor": "https://python.langchain.com/docs/integrations/llms/bittensor"}, "Neo4jVector": {"Neo4j": "https://python.langchain.com/docs/integrations/providers/neo4j", "Neo4j Vector Index": "https://python.langchain.com/docs/integrations/vectorstores/neo4jvector"}, "Neo4jGraph": {"Neo4j": "https://python.langchain.com/docs/integrations/providers/neo4j", "Diffbot Graph Transformer": "https://python.langchain.com/docs/use_cases/more/graph/diffbot_graphtransformer", "Neo4j DB QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_cypher_qa"}, "GraphCypherQAChain": {"Neo4j": "https://python.langchain.com/docs/integrations/providers/neo4j", "Memgraph QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_memgraph_qa", "Diffbot Graph Transformer": "https://python.langchain.com/docs/use_cases/more/graph/diffbot_graphtransformer", "Neo4j DB QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_cypher_qa"}, "AirtableLoader": {"Airtable": "https://python.langchain.com/docs/integrations/document_loaders/airtable"}, "TensorflowDatasetLoader": {"TensorFlow Datasets": "https://python.langchain.com/docs/integrations/document_loaders/tensorflow_datasets"}, "Clarifai": {"Clarifai": "https://python.langchain.com/docs/integrations/llms/clarifai"}, "BigQueryLoader": {"Google BigQuery": "https://python.langchain.com/docs/integrations/document_loaders/google_bigquery"}, "RoamLoader": {"Roam": "https://python.langchain.com/docs/integrations/document_loaders/roam"}, "Portkey": {"Log, Trace, and Monitor": "https://python.langchain.com/docs/integrations/providers/portkey/logging_tracing_portkey", "Portkey": "https://python.langchain.com/docs/integrations/providers/portkey/index"}, "Vectara": {"Vectara": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/vectara_self_query", "Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat", "Vectara Text Generation": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_text_generation"}, "VectaraRetriever": {"Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat"}, "load_qa_chain": {"Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat", "Amazon Textract ": "https://python.langchain.com/docs/integrations/document_loaders/pdf-amazonTextractPDFLoader", "SageMakerEndpoint": "https://python.langchain.com/docs/integrations/llms/sagemaker", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "Use local LLMs": "https://python.langchain.com/docs/use_cases/question_answering/how_to/local_retrieval_qa", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research", "Memory in the Multi-Input Chain": "https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs"}, "CONDENSE_QUESTION_PROMPT": {"Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat"}, "load_qa_with_sources_chain": {"Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times"}, "QA_PROMPT": {"Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat"}, "create_csv_agent": {"CSV": "https://python.langchain.com/docs/integrations/toolkits/csv"}, "create_xorbits_agent": {"Xorbits": "https://python.langchain.com/docs/integrations/toolkits/xorbits"}, "JiraToolkit": {"Jira": "https://python.langchain.com/docs/integrations/toolkits/jira"}, "JiraAPIWrapper": {"Jira": "https://python.langchain.com/docs/integrations/toolkits/jira"}, "create_spark_dataframe_agent": {"Spark Dataframe": "https://python.langchain.com/docs/integrations/toolkits/spark"}, "PyPDFLoader": {"Document Comparison": "https://python.langchain.com/docs/integrations/toolkits/document_comparison_toolkit", "Google Cloud Storage File": "https://python.langchain.com/docs/integrations/document_loaders/google_cloud_storage_file", "MergeDocLoader": "https://python.langchain.com/docs/integrations/document_loaders/merge_doc_loader", "QA using Activeloop's DeepLake": "https://python.langchain.com/docs/use_cases/question_answering/integrations/semantic-search-over-chat"}, "create_python_agent": {"Python": "https://python.langchain.com/docs/integrations/toolkits/python"}, "PythonREPLTool": {"Python": "https://python.langchain.com/docs/integrations/toolkits/python"}, "create_pbi_agent": {"PowerBI Dataset": "https://python.langchain.com/docs/integrations/toolkits/powerbi"}, "PowerBIToolkit": {"PowerBI Dataset": "https://python.langchain.com/docs/integrations/toolkits/powerbi"}, "PowerBIDataset": {"PowerBI Dataset": "https://python.langchain.com/docs/integrations/toolkits/powerbi"}, "AzureCognitiveServicesToolkit": {"Azure Cognitive Services": "https://python.langchain.com/docs/integrations/toolkits/azure_cognitive_services"}, "Requests": {"Natural Language APIs": "https://python.langchain.com/docs/integrations/toolkits/openapi_nla"}, "APIOperation": {"Natural Language APIs": "https://python.langchain.com/docs/integrations/toolkits/openapi_nla"}, "OpenAPISpec": {"Natural Language APIs": "https://python.langchain.com/docs/integrations/toolkits/openapi_nla"}, "NLAToolkit": {"Natural Language APIs": "https://python.langchain.com/docs/integrations/toolkits/openapi_nla", "Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval"}, "GmailToolkit": {"Gmail": "https://python.langchain.com/docs/integrations/toolkits/gmail"}, "build_resource_service": {"Gmail": "https://python.langchain.com/docs/integrations/toolkits/gmail"}, "get_gmail_credentials": {"Gmail": "https://python.langchain.com/docs/integrations/toolkits/gmail"}, "create_json_agent": {"JSON": "https://python.langchain.com/docs/integrations/toolkits/json"}, "JsonToolkit": {"JSON": "https://python.langchain.com/docs/integrations/toolkits/json"}, "JsonSpec": {"JSON": "https://python.langchain.com/docs/integrations/toolkits/json", "OpenAPI": "https://python.langchain.com/docs/integrations/toolkits/openapi"}, "AirbyteStripeLoader": {"Airbyte Question Answering": "https://python.langchain.com/docs/integrations/toolkits/airbyte_structured_qa", "Airbyte Stripe": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_stripe"}, "create_pandas_dataframe_agent": {"Airbyte Question Answering": "https://python.langchain.com/docs/integrations/toolkits/airbyte_structured_qa", "Pandas Dataframe": "https://python.langchain.com/docs/integrations/toolkits/pandas", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times"}, "GitHubToolkit": {"Github": "https://python.langchain.com/docs/integrations/toolkits/github"}, "GitHubAPIWrapper": {"Github": "https://python.langchain.com/docs/integrations/toolkits/github"}, "GitHubAction": {"Github": "https://python.langchain.com/docs/integrations/toolkits/github"}, "create_spark_sql_agent": {"Spark SQL": "https://python.langchain.com/docs/integrations/toolkits/spark_sql"}, "SparkSQLToolkit": {"Spark SQL": "https://python.langchain.com/docs/integrations/toolkits/spark_sql"}, "SparkSQL": {"Spark SQL": "https://python.langchain.com/docs/integrations/toolkits/spark_sql"}, "create_sync_playwright_browser": {"PlayWright Browser": "https://python.langchain.com/docs/integrations/toolkits/playwright"}, "O365Toolkit": {"Office365": "https://python.langchain.com/docs/integrations/toolkits/office365"}, "MultionToolkit": {"MultiOn": "https://python.langchain.com/docs/integrations/toolkits/multion"}, "AmadeusToolkit": {"Amadeus": "https://python.langchain.com/docs/integrations/toolkits/amadeus"}, "create_vectorstore_agent": {"Vectorstore": "https://python.langchain.com/docs/integrations/toolkits/vectorstore"}, "VectorStoreToolkit": {"Vectorstore": "https://python.langchain.com/docs/integrations/toolkits/vectorstore"}, "VectorStoreInfo": {"Vectorstore": "https://python.langchain.com/docs/integrations/toolkits/vectorstore"}, "create_vectorstore_router_agent": {"Vectorstore": "https://python.langchain.com/docs/integrations/toolkits/vectorstore"}, "VectorStoreRouterToolkit": {"Vectorstore": "https://python.langchain.com/docs/integrations/toolkits/vectorstore"}, "reduce_openapi_spec": {"OpenAPI": "https://python.langchain.com/docs/integrations/toolkits/openapi"}, "RequestsWrapper": {"OpenAPI": "https://python.langchain.com/docs/integrations/toolkits/openapi"}, "create_openapi_agent": {"OpenAPI": "https://python.langchain.com/docs/integrations/toolkits/openapi"}, "OpenAPIToolkit": {"OpenAPI": "https://python.langchain.com/docs/integrations/toolkits/openapi"}, "GitLabToolkit": {"Gitlab": "https://python.langchain.com/docs/integrations/toolkits/gitlab"}, "GitLabAPIWrapper": {"Gitlab": "https://python.langchain.com/docs/integrations/toolkits/gitlab"}, "SQLiteVSS": {"sqlite-vss": "https://python.langchain.com/docs/integrations/vectorstores/sqlitevss"}, "RetrievalQAWithSourcesChain": {"Weaviate": "https://python.langchain.com/docs/integrations/vectorstores/weaviate", "Neo4j Vector Index": "https://python.langchain.com/docs/integrations/vectorstores/neo4jvector", "Marqo": "https://python.langchain.com/docs/integrations/vectorstores/marqo", "Psychic": "https://python.langchain.com/docs/integrations/document_loaders/psychic", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/web_scraping", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "Vector SQL Retriever with MyScale": "https://python.langchain.com/docs/use_cases/qa_structured/integrations/myscale_vector_sql", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research"}, "google_palm": {"ScaNN": "https://python.langchain.com/docs/integrations/vectorstores/scann"}, "NucliaDB": {"NucliaDB": "https://python.langchain.com/docs/integrations/vectorstores/nucliadb"}, "AttributeInfo": {"Vectara": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/vectara_self_query", "Docugami": "https://python.langchain.com/docs/integrations/document_loaders/docugami", "Perform context-aware text splitting": "https://python.langchain.com/docs/use_cases/question_answering/how_to/document-context-aware-QA", "Milvus": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/milvus_self_query", "Weaviate": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/weaviate_self_query", "DashVector": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/dashvector", "Elasticsearch": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/elasticsearch_self_query", "Chroma": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/chroma_self_query", "Pinecone": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/pinecone", "Supabase": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/supabase_self_query", "Redis": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/redis_self_query", "MyScale": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/myscale_self_query", "Deep Lake": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/activeloop_deeplake_self_query", "Qdrant": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/qdrant_self_query"}, "RedisText": {"Redis": "https://python.langchain.com/docs/integrations/vectorstores/redis"}, "RedisNum": {"Redis": "https://python.langchain.com/docs/integrations/vectorstores/redis"}, "RedisTag": {"Redis": "https://python.langchain.com/docs/integrations/vectorstores/redis"}, "RedisFilter": {"Redis": "https://python.langchain.com/docs/integrations/vectorstores/redis"}, "InMemoryDocstore": {"Annoy": "https://python.langchain.com/docs/integrations/vectorstores/annoy", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "AutoGPT": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/autogpt", "BabyAGI User Guide": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi", "BabyAGI with Tools": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi_with_agent", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times", "Generative Agents in LangChain": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/characters"}, "AtlasDB": {"Atlas": "https://python.langchain.com/docs/integrations/vectorstores/atlas"}, "OpenAIChat": {"Activeloop Deep Lake": "https://python.langchain.com/docs/integrations/vectorstores/activeloop_deeplake"}, "AlibabaCloudOpenSearch": {"Alibaba Cloud OpenSearch": "https://python.langchain.com/docs/integrations/vectorstores/alibabacloud_opensearch"}, "AlibabaCloudOpenSearchSettings": {"Alibaba Cloud OpenSearch": "https://python.langchain.com/docs/integrations/vectorstores/alibabacloud_opensearch"}, "BESVectorStore":{"Baidu Cloud VectorSearch": "https://python.langchain.com/docs/integrations/vectorstores/baiducloud_vector_search"}, "StarRocksSettings": {"StarRocks": "https://python.langchain.com/docs/integrations/vectorstores/starrocks"}, "TokenTextSplitter": {"StarRocks": "https://python.langchain.com/docs/integrations/vectorstores/starrocks", "Split by tokens ": "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/split_by_token"}, "DirectoryLoader": {"StarRocks": "https://python.langchain.com/docs/integrations/vectorstores/starrocks"}, "UnstructuredMarkdownLoader": {"StarRocks": "https://python.langchain.com/docs/integrations/vectorstores/starrocks"}, "ConnectionParams": {"Tencent Cloud VectorDB": "https://python.langchain.com/docs/integrations/vectorstores/tencentvectordb"}, "DocArrayHnswSearch": {"DocArray HnswSearch": "https://python.langchain.com/docs/integrations/vectorstores/docarray_hnsw"}, "MyScaleSettings": {"MyScale": "https://python.langchain.com/docs/integrations/vectorstores/myscale"}, "AzureSearch": {"Azure Cognitive Search": "https://python.langchain.com/docs/integrations/vectorstores/azuresearch"}, "ElasticVectorSearch": {"Elasticsearch": "https://python.langchain.com/docs/integrations/vectorstores/elasticsearch", "Memory in the Multi-Input Chain": "https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs"}, "DocArrayInMemorySearch": {"DocArray InMemorySearch": "https://python.langchain.com/docs/integrations/vectorstores/docarray_in_memory"}, "ZepVectorStore": {"Zep": "https://python.langchain.com/docs/integrations/vectorstores/zep"}, "CollectionConfig": {"Zep": "https://python.langchain.com/docs/integrations/vectorstores/zep"}, "AsyncChromiumLoader": {"Beautiful Soup": "https://python.langchain.com/docs/integrations/document_transformers/beautiful_soup", "Async Chromium": "https://python.langchain.com/docs/integrations/document_loaders/async_chromium", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/web_scraping"}, "BeautifulSoupTransformer": {"Beautiful Soup": "https://python.langchain.com/docs/integrations/document_transformers/beautiful_soup", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/web_scraping"}, "NucliaTextTransformer": {"Nuclia Understanding API document transformer": "https://python.langchain.com/docs/integrations/document_transformers/nuclia_transformer"}, "create_metadata_tagger": {"OpenAI Functions Metadata Tagger": "https://python.langchain.com/docs/integrations/document_transformers/openai_metadata_tagger"}, "AsyncHtmlLoader": {"html2text": "https://python.langchain.com/docs/integrations/document_transformers/html2text", "AsyncHtmlLoader": "https://python.langchain.com/docs/integrations/document_loaders/async_html", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/web_scraping"}, "Html2TextTransformer": {"html2text": "https://python.langchain.com/docs/integrations/document_transformers/html2text", "Async Chromium": "https://python.langchain.com/docs/integrations/document_loaders/async_chromium", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/web_scraping"}, "DoctranPropertyExtractor": {"Doctran Extract Properties": "https://python.langchain.com/docs/integrations/document_transformers/doctran_extract_properties"}, "DoctranQATransformer": {"Doctran Interrogate Documents": "https://python.langchain.com/docs/integrations/document_transformers/doctran_interrogate_document"}, "Blob": {"docai.md": "https://python.langchain.com/docs/integrations/document_transformers/docai", "Embaas": "https://python.langchain.com/docs/integrations/document_loaders/embaas"}, "DocAIParser": {"docai.md": "https://python.langchain.com/docs/integrations/document_transformers/docai"}, "DoctranTextTranslator": {"Doctran Translate Documents": "https://python.langchain.com/docs/integrations/document_transformers/doctran_translate_document"}, "SnowflakeLoader": {"Snowflake": "https://python.langchain.com/docs/integrations/document_loaders/snowflake"}, "AcreomLoader": {"acreom": "https://python.langchain.com/docs/integrations/document_loaders/acreom"}, "ArcGISLoader": {"ArcGIS": "https://python.langchain.com/docs/integrations/document_loaders/arcgis"}, "UnstructuredCSVLoader": {"CSV": "https://python.langchain.com/docs/integrations/document_loaders/csv"}, "XorbitsLoader": {"Xorbits Pandas DataFrame": "https://python.langchain.com/docs/integrations/document_loaders/xorbits"}, "UnstructuredEmailLoader": {"Email": "https://python.langchain.com/docs/integrations/document_loaders/email"}, "OutlookMessageLoader": {"Email": "https://python.langchain.com/docs/integrations/document_loaders/email"}, "AssemblyAIAudioTranscriptLoader": {"AssemblyAI Audio Transcripts": "https://python.langchain.com/docs/integrations/document_loaders/assemblyai"}, "TranscriptFormat": {"AssemblyAI Audio Transcripts": "https://python.langchain.com/docs/integrations/document_loaders/assemblyai"}, "BlockchainDocumentLoader": {"Blockchain": "https://python.langchain.com/docs/integrations/document_loaders/blockchain"}, "BlockchainType": {"Blockchain": "https://python.langchain.com/docs/integrations/document_loaders/blockchain"}, "RecursiveUrlLoader": {"Recursive URL Loader": "https://python.langchain.com/docs/integrations/document_loaders/recursive_url_loader"}, "JoplinLoader": {"Joplin": "https://python.langchain.com/docs/integrations/document_loaders/joplin"}, "AirbyteSalesforceLoader": {"Airbyte Salesforce": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_salesforce"}, "EtherscanLoader": {"Etherscan Loader": "https://python.langchain.com/docs/integrations/document_loaders/Etherscan"}, "AirbyteCDKLoader": {"Airbyte CDK": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_cdk"}, "Docx2txtLoader": {"Microsoft Word": "https://python.langchain.com/docs/integrations/document_loaders/microsoft_word"}, "OpenAIWhisperParser": {"Loading documents from a YouTube url": "https://python.langchain.com/docs/integrations/document_loaders/youtube_audio"}, "YoutubeAudioLoader": {"Loading documents from a YouTube url": "https://python.langchain.com/docs/integrations/document_loaders/youtube_audio"}, "UnstructuredURLLoader": {"URL": "https://python.langchain.com/docs/integrations/document_loaders/url"}, "SeleniumURLLoader": {"URL": "https://python.langchain.com/docs/integrations/document_loaders/url"}, "PlaywrightURLLoader": {"URL": "https://python.langchain.com/docs/integrations/document_loaders/url"}, "OpenCityDataLoader": {"Geopandas": "https://python.langchain.com/docs/integrations/document_loaders/geopandas", "Open City Data": "https://python.langchain.com/docs/integrations/document_loaders/open_city_data"}, "GeoDataFrameLoader": {"Geopandas": "https://python.langchain.com/docs/integrations/document_loaders/geopandas"}, "OBSFileLoader": {"Huawei OBS File": "https://python.langchain.com/docs/integrations/document_loaders/huawei_obs_file"}, "HuggingFaceDatasetLoader": {"HuggingFace dataset": "https://python.langchain.com/docs/integrations/document_loaders/hugging_face_dataset"}, "DropboxLoader": {"Dropbox": "https://python.langchain.com/docs/integrations/document_loaders/dropbox"}, "AirbyteTypeformLoader": {"Airbyte Typeform": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_typeform"}, "MHTMLLoader": {"mhtml": "https://python.langchain.com/docs/integrations/document_loaders/mhtml"}, "NewsURLLoader": {"News URL": "https://python.langchain.com/docs/integrations/document_loaders/news"}, "ImageCaptionLoader": {"Image captions": "https://python.langchain.com/docs/integrations/document_loaders/image_captions"}, "UnstructuredRSTLoader": {"RST": "https://python.langchain.com/docs/integrations/document_loaders/rst"}, "ConversationBufferWindowMemory": {"Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma", "OpaquePrompts": "https://python.langchain.com/docs/integrations/llms/opaqueprompts", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Meta-Prompt": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/meta_prompt", "Create ChatGPT clone": "https://python.langchain.com/docs/modules/agents/how_to/chatgpt_clone"}, "UnstructuredImageLoader": {"Images": "https://python.langchain.com/docs/integrations/document_loaders/image"}, "NucliaLoader": {"Nuclia Understanding API document loader": "https://python.langchain.com/docs/integrations/document_loaders/nuclia"}, "TencentCOSFileLoader": {"Tencent COS File": "https://python.langchain.com/docs/integrations/document_loaders/tencent_cos_file"}, "TomlLoader": {"TOML": "https://python.langchain.com/docs/integrations/document_loaders/toml"}, "UnstructuredAPIFileLoader": {"Unstructured File": "https://python.langchain.com/docs/integrations/document_loaders/unstructured_file"}, "PsychicLoader": {"Psychic": "https://python.langchain.com/docs/integrations/document_loaders/psychic"}, "TencentCOSDirectoryLoader": {"Tencent COS Directory": "https://python.langchain.com/docs/integrations/document_loaders/tencent_cos_directory"}, "GitHubIssuesLoader": {"GitHub": "https://python.langchain.com/docs/integrations/document_loaders/github"}, "UnstructuredOrgModeLoader": {"Org-mode": "https://python.langchain.com/docs/integrations/document_loaders/org_mode"}, "LarkSuiteDocLoader": {"LarkSuite (FeiShu)": "https://python.langchain.com/docs/integrations/document_loaders/larksuite"}, "load_summarize_chain": {"LarkSuite (FeiShu)": "https://python.langchain.com/docs/integrations/document_loaders/larksuite", "LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/summarization"}, "IuguLoader": {"Iugu": "https://python.langchain.com/docs/integrations/document_loaders/iugu"}, "SharePointLoader": {"Microsoft SharePoint": "https://python.langchain.com/docs/integrations/document_loaders/microsoft_sharepoint"}, "UnstructuredEPubLoader": {"EPub ": "https://python.langchain.com/docs/integrations/document_loaders/epub"}, "UnstructuredFileIOLoader": {"Google Drive": "https://python.langchain.com/docs/integrations/document_loaders/google_drive"}, "BrowserlessLoader": {"Browserless": "https://python.langchain.com/docs/integrations/document_loaders/browserless"}, "BibtexLoader": {"BibTeX": "https://python.langchain.com/docs/integrations/document_loaders/bibtex"}, "AirbyteHubspotLoader": {"Airbyte Hubspot": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_hubspot"}, "AirbyteGongLoader": {"Airbyte Gong": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_gong"}, "ReadTheDocsLoader": {"ReadTheDocs Documentation": "https://python.langchain.com/docs/integrations/document_loaders/readthedocs_documentation"}, "PolarsDataFrameLoader": {"Polars DataFrame": "https://python.langchain.com/docs/integrations/document_loaders/polars_dataframe"}, "DataFrameLoader": {"Pandas DataFrame": "https://python.langchain.com/docs/integrations/document_loaders/pandas_dataframe"}, "GoogleApiClient": {"YouTube transcripts": "https://python.langchain.com/docs/integrations/document_loaders/youtube_transcript"}, "ConcurrentLoader": {"Concurrent Loader": "https://python.langchain.com/docs/integrations/document_loaders/concurrent"}, "RSSFeedLoader": {"RSS Feeds": "https://python.langchain.com/docs/integrations/document_loaders/rss"}, "NotebookLoader": {"Jupyter Notebook": "https://python.langchain.com/docs/integrations/document_loaders/jupyter_notebook", "Notebook": "https://python.langchain.com/docs/integrations/document_loaders/example_data/notebook"}, "UnstructuredTSVLoader": {"TSV": "https://python.langchain.com/docs/integrations/document_loaders/tsv"}, "UnstructuredODTLoader": {"Open Document Format (ODT)": "https://python.langchain.com/docs/integrations/document_loaders/odt"}, "EmbaasBlobLoader": {"Embaas": "https://python.langchain.com/docs/integrations/document_loaders/embaas"}, "EmbaasLoader": {"Embaas": "https://python.langchain.com/docs/integrations/document_loaders/embaas"}, "UnstructuredXMLLoader": {"XML": "https://python.langchain.com/docs/integrations/document_loaders/xml"}, "MaxComputeLoader": {"Alibaba Cloud MaxCompute": "https://python.langchain.com/docs/integrations/document_loaders/alibaba_cloud_maxcompute"}, "CubeSemanticLoader": {"Cube Semantic Layer": "https://python.langchain.com/docs/integrations/document_loaders/cube_semantic"}, "UnstructuredExcelLoader": {"Microsoft Excel": "https://python.langchain.com/docs/integrations/document_loaders/excel"}, "AmazonTextractPDFLoader": {"Amazon Textract ": "https://python.langchain.com/docs/integrations/document_loaders/pdf-amazonTextractPDFLoader"}, "Language": {"Source Code": "https://python.langchain.com/docs/integrations/document_loaders/source_code", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding"}, "LanguageParser": {"Source Code": "https://python.langchain.com/docs/integrations/document_loaders/source_code", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding"}, "SRTLoader": {"Subtitle": "https://python.langchain.com/docs/integrations/document_loaders/subtitle"}, "MastodonTootsLoader": {"Mastodon": "https://python.langchain.com/docs/integrations/document_loaders/mastodon"}, "AirbyteShopifyLoader": {"Airbyte Shopify": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_shopify"}, "MergedDataLoader": {"MergeDocLoader": "https://python.langchain.com/docs/integrations/document_loaders/merge_doc_loader"}, "PySparkDataFrameLoader": {"PySpark DataFrame Loader": "https://python.langchain.com/docs/integrations/document_loaders/pyspark_dataframe"}, "AirbyteZendeskSupportLoader": {"Airbyte Zendesk Support": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_zendesk_support"}, "CoNLLULoader": {"CoNLL-U": "https://python.langchain.com/docs/integrations/document_loaders/conll-u"}, "OBSDirectoryLoader": {"Huawei OBS Directory": "https://python.langchain.com/docs/integrations/document_loaders/huawei_obs_directory"}, "FaunaLoader": {"Fauna": "https://python.langchain.com/docs/integrations/document_loaders/fauna"}, "SitemapLoader": {"Sitemap": "https://python.langchain.com/docs/integrations/document_loaders/sitemap"}, "DocumentIntelligenceLoader": {"Azure Document Intelligence": "https://python.langchain.com/docs/integrations/document_loaders/azure_document_intelligence"}, "StochasticAI": {"StochasticAI": "https://python.langchain.com/docs/integrations/llms/stochasticai"}, "FireworksChat": {"Fireworks": "https://python.langchain.com/docs/integrations/llms/fireworks"}, "OctoAIEndpoint": {"OctoAI": "https://python.langchain.com/docs/integrations/llms/octoai"}, "Writer": {"Writer": "https://python.langchain.com/docs/integrations/llms/writer"}, "TextGen": {"TextGen": "https://python.langchain.com/docs/integrations/llms/textgen"}, "ForefrontAI": {"ForefrontAI": "https://python.langchain.com/docs/integrations/llms/forefrontai"}, "MosaicML": {"MosaicML": "https://python.langchain.com/docs/integrations/llms/mosaicml"}, "KoboldApiLLM": {"KoboldAI API": "https://python.langchain.com/docs/integrations/llms/koboldai"}, "CerebriumAI": {"CerebriumAI": "https://python.langchain.com/docs/integrations/llms/cerebriumai"}, "VertexAI": {"Google Vertex AI PaLM ": "https://python.langchain.com/docs/integrations/llms/google_vertex_ai_palm"}, "VertexAIModelGarden": {"Google Vertex AI PaLM ": "https://python.langchain.com/docs/integrations/llms/google_vertex_ai_palm"}, "Ollama": {"Ollama": "https://python.langchain.com/docs/integrations/llms/ollama", "Run LLMs locally": "https://python.langchain.com/docs/guides/local_llms"}, "OpaquePrompts": {"OpaquePrompts": "https://python.langchain.com/docs/integrations/llms/opaqueprompts"}, "RunnableMap": {"OpaquePrompts": "https://python.langchain.com/docs/integrations/llms/opaqueprompts", "interface.md": "https://python.langchain.com/docs/expression_language/interface", "First we add a step to load memory": "https://python.langchain.com/docs/expression_language/cookbook/retrieval", "sql_db.md": "https://python.langchain.com/docs/expression_language/cookbook/sql_db", "prompt_llm_parser.md": "https://python.langchain.com/docs/expression_language/cookbook/prompt_llm_parser", "Adding memory": "https://python.langchain.com/docs/expression_language/cookbook/memory", "multiple_chains.md": "https://python.langchain.com/docs/expression_language/cookbook/multiple_chains"}, "TitanTakeoff": {"Titan Takeoff": "https://python.langchain.com/docs/integrations/llms/titan_takeoff"}, "Databricks": {"Databricks": "https://python.langchain.com/docs/integrations/llms/databricks"}, "QianfanLLMEndpoint": {"Baidu Qianfan": "https://python.langchain.com/docs/integrations/llms/baidu_qianfan_endpoint"}, "VLLM": {"vLLM": "https://python.langchain.com/docs/integrations/llms/vllm"}, "VLLMOpenAI": {"vLLM": "https://python.langchain.com/docs/integrations/llms/vllm"}, "AzureMLOnlineEndpoint": {"Azure ML": "https://python.langchain.com/docs/integrations/llms/azure_ml"}, "ContentFormatterBase": {"Azure ML": "https://python.langchain.com/docs/integrations/llms/azure_ml"}, "DollyContentFormatter": {"Azure ML": "https://python.langchain.com/docs/integrations/llms/azure_ml"}, "load_llm": {"Azure ML": "https://python.langchain.com/docs/integrations/llms/azure_ml", "Serialization": "https://python.langchain.com/docs/modules/model_io/models/llms/llm_serialization"}, "AzureMLEndpointClient": {"Azure ML": "https://python.langchain.com/docs/integrations/llms/azure_ml"}, "MapReduceChain": {"Manifest": "https://python.langchain.com/docs/integrations/llms/manifest", "LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/summarization"}, "ModelLaboratory": {"Manifest": "https://python.langchain.com/docs/integrations/llms/manifest", "Model comparison": "https://python.langchain.com/docs/guides/model_laboratory"}, "Tongyi": {"Tongyi Qwen": "https://python.langchain.com/docs/integrations/llms/tongyi", "DashVector": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/dashvector"}, "InMemoryCache": {"LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching"}, "SQLiteCache": {"LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching"}, "GPTCache": {"LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching"}, "SQLAlchemyCache": {"LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching"}, "GooseAI": {"GooseAI": "https://python.langchain.com/docs/integrations/llms/gooseai"}, "OpenLM": {"OpenLM": "https://python.langchain.com/docs/integrations/llms/openlm"}, "CTranslate2": {"CTranslate2": "https://python.langchain.com/docs/integrations/llms/ctranslate2"}, "HuggingFaceTextGenInference": {"Huggingface TextGen Inference": "https://python.langchain.com/docs/integrations/llms/huggingface_textgen_inference"}, "ChatGLM": {"ChatGLM": "https://python.langchain.com/docs/integrations/llms/chatglm"}, "Replicate": {"Replicate": "https://python.langchain.com/docs/integrations/llms/replicate"}, "DatetimeOutputParser": {"Fallbacks": "https://python.langchain.com/docs/guides/fallbacks", "Datetime parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/datetime"}, "ConditionalPromptSelector": {"Run LLMs locally": "https://python.langchain.com/docs/guides/local_llms"}, "tracing_v2_enabled": {"LangSmith Walkthrough": "https://python.langchain.com/docs/guides/langsmith/walkthrough"}, "wait_for_all_tracers": {"LangSmith Walkthrough": "https://python.langchain.com/docs/guides/langsmith/walkthrough"}, "EvaluatorType": {"LangSmith Walkthrough": "https://python.langchain.com/docs/guides/langsmith/walkthrough", "Criteria Evaluation": "https://python.langchain.com/docs/guides/evaluation/string/criteria_eval_chain"}, "RunEvalConfig": {"LangSmith Walkthrough": "https://python.langchain.com/docs/guides/langsmith/walkthrough"}, "arun_on_dataset": {"LangSmith Walkthrough": "https://python.langchain.com/docs/guides/langsmith/walkthrough"}, "run_on_dataset": {"LangSmith Walkthrough": "https://python.langchain.com/docs/guides/langsmith/walkthrough"}, "load_chain": {"Hugging Face Prompt Injection Identification": "https://python.langchain.com/docs/guides/safety/hugging_face_prompt_injection", "Serialization": "https://python.langchain.com/docs/modules/chains/how_to/serialization", "Loading from LangChainHub": "https://python.langchain.com/docs/modules/chains/how_to/from_hub"}, "FakeListLLM": {"Amazon Comprehend Moderation Chain": "https://python.langchain.com/docs/guides/safety/amazon_comprehend_chain", "Fake LLM": "https://python.langchain.com/docs/modules/model_io/models/llms/fake_llm"}, "load_prompt": {"Amazon Comprehend Moderation Chain": "https://python.langchain.com/docs/guides/safety/amazon_comprehend_chain", "Serialization": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/prompt_serialization"}, "openai": {"OpenAI Adapter": "https://python.langchain.com/docs/guides/adapters/openai"}, "load_evaluator": {"Comparing Chain Outputs": "https://python.langchain.com/docs/guides/evaluation/examples/comparisons", "Agent Trajectory": "https://python.langchain.com/docs/guides/evaluation/trajectory/trajectory_eval", "Pairwise Embedding Distance ": "https://python.langchain.com/docs/guides/evaluation/comparison/pairwise_embedding_distance", "Pairwise String Comparison": "https://python.langchain.com/docs/guides/evaluation/comparison/pairwise_string", "Criteria Evaluation": "https://python.langchain.com/docs/guides/evaluation/string/criteria_eval_chain", "String Distance": "https://python.langchain.com/docs/guides/evaluation/string/string_distance", "Embedding Distance": "https://python.langchain.com/docs/guides/evaluation/string/embedding_distance"}, "load_dataset": {"Comparing Chain Outputs": "https://python.langchain.com/docs/guides/evaluation/examples/comparisons"}, "AgentAction": {"Custom Trajectory Evaluator": "https://python.langchain.com/docs/guides/evaluation/trajectory/custom", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Wikibase Agent": "https://python.langchain.com/docs/use_cases/more/agents/agents/wikibase_agent", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval", "Multiple callback handlers": "https://python.langchain.com/docs/modules/callbacks/multiple_callbacks", "Custom multi-action agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_multi_action_agent", "Custom agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent", "Custom agent with tool retrieval": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent_with_tool_retrieval"}, "AgentTrajectoryEvaluator": {"Custom Trajectory Evaluator": "https://python.langchain.com/docs/guides/evaluation/trajectory/custom"}, "EmbeddingDistance": {"Pairwise Embedding Distance ": "https://python.langchain.com/docs/guides/evaluation/comparison/pairwise_embedding_distance", "Embedding Distance": "https://python.langchain.com/docs/guides/evaluation/string/embedding_distance"}, "PairwiseStringEvaluator": {"Custom Pairwise Evaluator": "https://python.langchain.com/docs/guides/evaluation/comparison/custom"}, "Criteria": {"Criteria Evaluation": "https://python.langchain.com/docs/guides/evaluation/string/criteria_eval_chain"}, "StringEvaluator": {"Custom String Evaluator": "https://python.langchain.com/docs/guides/evaluation/string/custom"}, "StringDistance": {"String Distance": "https://python.langchain.com/docs/guides/evaluation/string/string_distance"}, "WebResearchRetriever": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/web_scraping", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research"}, "ConversationSummaryMemory": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding", "Multiple Memory classes": "https://python.langchain.com/docs/modules/memory/multiple_memory"}, "ConversationSummaryBufferMemory": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Conversation Summary Buffer": "https://python.langchain.com/docs/modules/memory/types/summary_buffer"}, "MessagesPlaceholder": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "Memory in LLMChain": "https://python.langchain.com/docs/modules/memory/adding_memory", "Add Memory to OpenAI Functions Agent": "https://python.langchain.com/docs/modules/agents/how_to/add_memory_openai_functions", "Types of `MessagePromptTemplate`": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/msg_prompt_templates", "Adding memory": "https://python.langchain.com/docs/expression_language/cookbook/memory"}, "StuffDocumentsChain": {"Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/summarization", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "Lost in the middle: The problem with long contexts": "https://python.langchain.com/docs/modules/data_connection/document_transformers/post_retrieval/long_context_reorder"}, "ReduceDocumentsChain": {"Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/summarization"}, "MapReduceDocumentsChain": {"Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/summarization"}, "create_extraction_chain_pydantic": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/extraction"}, "PydanticOutputParser": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/extraction", "MultiQueryRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/MultiQueryRetriever", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research", "Retry parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/retry", "Pydantic (JSON) parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/pydantic"}, "get_openapi_chain": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/apis"}, "APIChain": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/apis"}, "open_meteo_docs": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/apis"}, "tmdb_docs": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/apis"}, "podcast_docs": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/apis"}, "LLMRequestsChain": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/apis"}, "create_tagging_chain_pydantic": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/tagging"}, "MultiQueryRetriever": {"Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "MultiQueryRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/MultiQueryRetriever"}, "MarkdownHeaderTextSplitter": {"Perform context-aware text splitting": "https://python.langchain.com/docs/use_cases/question_answering/how_to/document-context-aware-QA", "MarkdownHeaderTextSplitter": "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/markdown_header_metadata"}, "create_conversational_retrieval_agent": {"Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents"}, "AgentTokenBufferMemory": {"Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents"}, "create_sql_query_chain": {"Multiple Retrieval Sources": "https://python.langchain.com/docs/use_cases/question_answering/how_to/multiple_retrieval", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql"}, "create_citation_fuzzy_match_chain": {"Cite sources": "https://python.langchain.com/docs/use_cases/question_answering/how_to/qa_citations"}, "BaseRetriever": {"Retrieve as you generate with FLARE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/flare"}, "AsyncCallbackManagerForRetrieverRun": {"Retrieve as you generate with FLARE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/flare"}, "CallbackManagerForRetrieverRun": {"Retrieve as you generate with FLARE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/flare"}, "FlareChain": {"Retrieve as you generate with FLARE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/flare"}, "HypotheticalDocumentEmbedder": {"Improve document indexing with HyDE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/hyde"}, "create_qa_with_sources_chain": {"Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa"}, "create_qa_with_structure_chain": {"Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa"}, "NeptuneGraph": {"Neptune Open Cypher QA Chain": "https://python.langchain.com/docs/use_cases/more/graph/neptune_cypher_qa"}, "NeptuneOpenCypherQAChain": {"Neptune Open Cypher QA Chain": "https://python.langchain.com/docs/use_cases/more/graph/neptune_cypher_qa"}, "NebulaGraphQAChain": {"NebulaGraphQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_nebula_qa"}, "NebulaGraph": {"NebulaGraphQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_nebula_qa"}, "MemgraphGraph": {"Memgraph QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_memgraph_qa"}, "KuzuGraph": {"KuzuQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_kuzu_qa"}, "KuzuQAChain": {"KuzuQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_kuzu_qa"}, "HugeGraphQAChain": {"HugeGraph QA Chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_hugegraph_qa"}, "HugeGraph": {"HugeGraph QA Chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_hugegraph_qa"}, "GraphSparqlQAChain": {"GraphSparqlQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_sparql_qa"}, "RdfGraph": {"GraphSparqlQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_sparql_qa"}, "ArangoGraph": {"ArangoDB QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_arangodb_qa"}, "ArangoGraphQAChain": {"ArangoDB QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_arangodb_qa"}, "GraphIndexCreator": {"Graph QA": "https://python.langchain.com/docs/use_cases/more/graph/graph_qa"}, "GraphQAChain": {"Graph QA": "https://python.langchain.com/docs/use_cases/more/graph/graph_qa"}, "NetworkxEntityGraph": {"Graph QA": "https://python.langchain.com/docs/use_cases/more/graph/graph_qa"}, "FalkorDBGraph": {"FalkorDBQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_falkordb_qa"}, "FalkorDBQAChain": {"FalkorDBQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_falkordb_qa"}, "AgentFinish": {"Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Wikibase Agent": "https://python.langchain.com/docs/use_cases/more/agents/agents/wikibase_agent", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval", "Custom multi-action agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_multi_action_agent", "Running Agent as an Iterator": "https://python.langchain.com/docs/modules/agents/how_to/agent_iter", "Custom agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent", "Custom agent with tool retrieval": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent_with_tool_retrieval"}, "BaseSingleActionAgent": {"Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "Custom agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent"}, "FileChatMessageHistory": {"AutoGPT": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/autogpt"}, "BaseLLM": {"BabyAGI User Guide": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi", "BabyAGI with Tools": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi_with_agent", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context"}, "VectorStore": {"BabyAGI User Guide": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi", "BabyAGI with Tools": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi_with_agent"}, "Chain": {"BabyAGI User Guide": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi", "BabyAGI with Tools": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi_with_agent", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Custom chain": "https://python.langchain.com/docs/modules/chains/how_to/custom_chain"}, "BaseTool": {"!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times", "Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore", "Custom functions with OpenAI Functions Agent": "https://python.langchain.com/docs/modules/agents/how_to/custom-functions-with-openai-functions-agent"}, "BaseCombineDocumentsChain": {"!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times"}, "LLMSingleActionAgent": {"Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Wikibase Agent": "https://python.langchain.com/docs/use_cases/more/agents/agents/wikibase_agent", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval", "Custom agent with tool retrieval": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent_with_tool_retrieval"}, "AgentOutputParser": {"Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Wikibase Agent": "https://python.langchain.com/docs/use_cases/more/agents/agents/wikibase_agent", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval", "Custom agent with tool retrieval": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent_with_tool_retrieval"}, "StringPromptTemplate": {"Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Wikibase Agent": "https://python.langchain.com/docs/use_cases/more/agents/agents/wikibase_agent", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval", "Custom agent with tool retrieval": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent_with_tool_retrieval", "Custom prompt template": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/custom_prompt_template", "Connecting to a Feature Store": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/connecting_to_a_feature_store"}, "AIPlugin": {"Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval"}, "SteamshipImageGenerationTool": {"Multi-modal outputs: Image & Text": "https://python.langchain.com/docs/use_cases/more/agents/multi_modal/multi_modal_output_agent"}, "RegexParser": {"Multi-Agent Simulated Environment: Petting Zoo": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/petting_zoo", "Multi-agent decentralized speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_bidding", "Multi-agent authoritarian speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_authoritarian", "Simulated Environment: Gymnasium": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/gymnasium"}, "TimeWeightedVectorStoreRetriever": {"Generative Agents in LangChain": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/characters"}, "LLMBashChain": {"Bash chain": "https://python.langchain.com/docs/use_cases/more/code_writing/llm_bash"}, "BashOutputParser": {"Bash chain": "https://python.langchain.com/docs/use_cases/more/code_writing/llm_bash"}, "BashProcess": {"Bash chain": "https://python.langchain.com/docs/use_cases/more/code_writing/llm_bash"}, "LLMSymbolicMathChain": {"LLM Symbolic Math ": "https://python.langchain.com/docs/use_cases/more/code_writing/llm_symbolic_math"}, "LLMSummarizationCheckerChain": {"Summarization checker chain": "https://python.langchain.com/docs/use_cases/more/self_check/llm_summarization_checker"}, "LLMCheckerChain": {"Self-checking chain": "https://python.langchain.com/docs/use_cases/more/self_check/llm_checker"}, "ElasticsearchDatabaseChain": {"Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "Elasticsearch": "https://python.langchain.com/docs/use_cases/qa_structured/integrations/elasticsearch", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql"}, "SQLRecordManager": {"Indexing": "https://python.langchain.com/docs/modules/data_connection/indexing"}, "index": {"Indexing": "https://python.langchain.com/docs/modules/data_connection/indexing"}, "BaseLoader": {"Indexing": "https://python.langchain.com/docs/modules/data_connection/indexing"}, "InMemoryStore": {"Caching": "https://python.langchain.com/docs/modules/data_connection/text_embedding/caching_embeddings", "MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector", "Parent Document Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/parent_document_retriever"}, "LocalFileStore": {"Caching": "https://python.langchain.com/docs/modules/data_connection/text_embedding/caching_embeddings"}, "RedisStore": {"Caching": "https://python.langchain.com/docs/modules/data_connection/text_embedding/caching_embeddings"}, "CacheBackedEmbeddings": {"Caching": "https://python.langchain.com/docs/modules/data_connection/text_embedding/caching_embeddings"}, "EnsembleRetriever": {"Ensemble Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/ensemble"}, "MultiVectorRetriever": {"MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector"}, "JsonKeyOutputFunctionsParser": {"MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector", "prompt_llm_parser.md": "https://python.langchain.com/docs/expression_language/cookbook/prompt_llm_parser"}, "ParentDocumentRetriever": {"Parent Document Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/parent_document_retriever"}, "SentenceTransformersTokenTextSplitter": {"Split by tokens ": "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/split_by_token"}, "NLTKTextSplitter": {"Split by tokens ": "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/split_by_token"}, "ChatMessageHistory": {"Message Memory in Agent backed by a database": "https://python.langchain.com/docs/modules/memory/agent_with_memory_in_db"}, "BaseMemory": {"Custom Memory": "https://python.langchain.com/docs/modules/memory/custom_memory"}, "ConversationKGMemory": {"Conversation Knowledge Graph": "https://python.langchain.com/docs/modules/memory/types/kg"}, "ConversationTokenBufferMemory": {"Conversation Token Buffer": "https://python.langchain.com/docs/modules/memory/types/token_buffer"}, "tracing_enabled": {"Multiple callback handlers": "https://python.langchain.com/docs/modules/callbacks/multiple_callbacks"}, "FileCallbackHandler": {"Logging to file": "https://python.langchain.com/docs/modules/callbacks/filecallbackhandler"}, "AsyncCallbackHandler": {"Async callbacks": "https://python.langchain.com/docs/modules/callbacks/async_callbacks"}, "StructuredTool": {"Multi-Input Tools": "https://python.langchain.com/docs/modules/agents/tools/multi_input_tool", "Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools"}, "AsyncCallbackManagerForToolRun": {"Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools"}, "CallbackManagerForToolRun": {"Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools"}, "ToolException": {"Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools"}, "format_tool_to_openai_function": {"Tools as OpenAI Functions": "https://python.langchain.com/docs/modules/agents/tools/tools_as_openai_functions"}, "RequestsGetTool": {"Tool Input Schema": "https://python.langchain.com/docs/modules/agents/tools/tool_input_validation"}, "HumanApprovalCallbackHandler": {"Human-in-the-loop Tool Validation": "https://python.langchain.com/docs/modules/agents/tools/human_approval"}, "XMLAgent": {"XML Agent": "https://python.langchain.com/docs/modules/agents/agent_types/xml_agent", "Agents": "https://python.langchain.com/docs/expression_language/cookbook/agent"}, "DocstoreExplorer": {"ReAct document store": "https://python.langchain.com/docs/modules/agents/agent_types/react_docstore"}, "ReadOnlySharedMemory": {"Shared memory across agents and tools": "https://python.langchain.com/docs/modules/agents/how_to/sharedmemory_for_tools"}, "BaseMultiActionAgent": {"Custom multi-action agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_multi_action_agent"}, "FinalStreamingStdOutCallbackHandler": {"Streaming final agent output": "https://python.langchain.com/docs/modules/agents/how_to/streaming_stdout_final_only"}, "LangChainTracer": {"Async API": "https://python.langchain.com/docs/modules/agents/how_to/async_agent"}, "HumanInputChatModel": {"Human input chat model": "https://python.langchain.com/docs/modules/model_io/models/chat/human_input_chat_model"}, "CallbackManagerForLLMRun": {"Custom LLM": "https://python.langchain.com/docs/modules/model_io/models/llms/custom_llm"}, "LLM": {"Custom LLM": "https://python.langchain.com/docs/modules/model_io/models/llms/custom_llm"}, "HumanInputLLM": {"Human input LLM": "https://python.langchain.com/docs/modules/model_io/models/llms/human_input_llm"}, "OutputFixingParser": {"Retry parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/retry"}, "RetryOutputParser": {"Retry parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/retry"}, "RetryWithErrorOutputParser": {"Retry parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/retry"}, "EnumOutputParser": {"Enum parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/enum"}, "MaxMarginalRelevanceExampleSelector": {"Select by maximal marginal relevance (MMR)": "https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/mmr"}, "SemanticSimilarityExampleSelector": {"Select by maximal marginal relevance (MMR)": "https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/mmr", "Few-shot examples for chat models": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/few_shot_examples_chat"}, "FewShotPromptTemplate": {"Select by maximal marginal relevance (MMR)": "https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/mmr", "Select by n-gram overlap": "https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/ngram_overlap"}, "BaseExampleSelector": {"Custom example selector": "https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/custom_example_selector"}, "NGramOverlapExampleSelector": {"Select by n-gram overlap": "https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/ngram_overlap"}, "FewShotChatMessagePromptTemplate": {"Few-shot examples for chat models": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/few_shot_examples_chat"}, "ChatMessagePromptTemplate": {"Types of `MessagePromptTemplate`": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/msg_prompt_templates"}, "MultiPromptChain": {"Router": "https://python.langchain.com/docs/modules/chains/foundational/router"}, "LLMRouterChain": {"Router": "https://python.langchain.com/docs/modules/chains/foundational/router"}, "RouterOutputParser": {"Router": "https://python.langchain.com/docs/modules/chains/foundational/router"}, "EmbeddingRouterChain": {"Router": "https://python.langchain.com/docs/modules/chains/foundational/router"}, "BaseLanguageModel": {"Custom chain": "https://python.langchain.com/docs/modules/chains/how_to/custom_chain"}, "AsyncCallbackManagerForChainRun": {"Custom chain": "https://python.langchain.com/docs/modules/chains/how_to/custom_chain"}, "CallbackManagerForChainRun": {"Custom chain": "https://python.langchain.com/docs/modules/chains/how_to/custom_chain"}, "BasePromptTemplate": {"Custom chain": "https://python.langchain.com/docs/modules/chains/how_to/custom_chain"}, "create_openai_fn_chain": {"Using OpenAI functions": "https://python.langchain.com/docs/modules/chains/how_to/openai_functions"}, "create_structured_output_chain": {"Using OpenAI functions": "https://python.langchain.com/docs/modules/chains/how_to/openai_functions"}, "RunnablePassthrough": {"First we add a step to load memory": "https://python.langchain.com/docs/expression_language/cookbook/retrieval", "prompt_llm_parser.md": "https://python.langchain.com/docs/expression_language/cookbook/prompt_llm_parser", "multiple_chains.md": "https://python.langchain.com/docs/expression_language/cookbook/multiple_chains"}, "format_document": {"First we add a step to load memory": "https://python.langchain.com/docs/expression_language/cookbook/retrieval"}, "RunnableLambda": {"sql_db.md": "https://python.langchain.com/docs/expression_language/cookbook/sql_db", "Run arbitrary functions": "https://python.langchain.com/docs/expression_language/how_to/functions"}, "JsonOutputFunctionsParser": {"prompt_llm_parser.md": "https://python.langchain.com/docs/expression_language/cookbook/prompt_llm_parser"}, "RunnableConfig": {"Run arbitrary functions": "https://python.langchain.com/docs/expression_language/how_to/functions"}, "GoogleSpeechToTextLoader": {"Google Cloud Speech-to-Text": "https://python.langchain.com/docs/integrations/document_loaders/google_speech_to_text"}, "GoogleTranslateTransformer": {"Google Cloud Translation": "https://python.langchain.com/docs/integrations/document_loaders/google_translate"}} +{"SingleFileFacebookMessengerChatLoader": {"Facebook Messenger": "https://python.langchain.com/docs/integrations/chat_loaders/facebook"}, "FolderFacebookMessengerChatLoader": {"Facebook Messenger": "https://python.langchain.com/docs/integrations/chat_loaders/facebook", "Chat loaders": "https://python.langchain.com/docs/integrations/chat_loaders/index"}, "merge_chat_runs": {"Facebook Messenger": "https://python.langchain.com/docs/integrations/chat_loaders/facebook", "Slack": "https://python.langchain.com/docs/integrations/chat_loaders/slack", "WhatsApp": "https://python.langchain.com/docs/integrations/chat_loaders/whatsapp", "iMessage": "https://python.langchain.com/docs/integrations/chat_loaders/imessage", "Telegram": "https://python.langchain.com/docs/integrations/chat_loaders/telegram", "Discord": "https://python.langchain.com/docs/integrations/chat_loaders/discord"}, "map_ai_messages": {"Facebook Messenger": "https://python.langchain.com/docs/integrations/chat_loaders/facebook", "GMail": "https://python.langchain.com/docs/integrations/chat_loaders/gmail", "Slack": "https://python.langchain.com/docs/integrations/chat_loaders/slack", "WhatsApp": "https://python.langchain.com/docs/integrations/chat_loaders/whatsapp", "iMessage": "https://python.langchain.com/docs/integrations/chat_loaders/imessage", "Telegram": "https://python.langchain.com/docs/integrations/chat_loaders/telegram", "Discord": "https://python.langchain.com/docs/integrations/chat_loaders/discord"}, "convert_messages_for_finetuning": {"Facebook Messenger": "https://python.langchain.com/docs/integrations/chat_loaders/facebook", "Chat loaders": "https://python.langchain.com/docs/integrations/chat_loaders/index", "iMessage": "https://python.langchain.com/docs/integrations/chat_loaders/imessage"}, "ChatOpenAI": {"Facebook Messenger": "https://python.langchain.com/docs/integrations/chat_loaders/facebook", "Slack": "https://python.langchain.com/docs/integrations/chat_loaders/slack", "WhatsApp": "https://python.langchain.com/docs/integrations/chat_loaders/whatsapp", "iMessage": "https://python.langchain.com/docs/integrations/chat_loaders/imessage", "Telegram": "https://python.langchain.com/docs/integrations/chat_loaders/telegram", "Discord": "https://python.langchain.com/docs/integrations/chat_loaders/discord", "RePhraseQueryRetriever": "https://python.langchain.com/docs/integrations/retrievers/re_phrase", "Wikipedia": "https://python.langchain.com/docs/integrations/retrievers/wikipedia", "Arxiv": "https://python.langchain.com/docs/integrations/retrievers/arxiv", "ChatGPT Plugins": "https://python.langchain.com/docs/integrations/tools/chatgpt_plugins", "Human as a tool": "https://python.langchain.com/docs/integrations/tools/human_tools", "Yahoo Finance News": "https://python.langchain.com/docs/integrations/tools/yahoo_finance_news", "ArXiv": "https://python.langchain.com/docs/integrations/tools/arxiv", "Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search", "Shell (bash)": "https://python.langchain.com/docs/integrations/tools/bash", "Xata chat memory": "https://python.langchain.com/docs/integrations/memory/xata_chat_message_history", "Dynamodb Chat Message History": "https://python.langchain.com/docs/integrations/memory/dynamodb_chat_message_history", "OpenAI": "https://python.langchain.com/docs/integrations/chat/openai", "LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor", "Context": "https://python.langchain.com/docs/integrations/callbacks/context", "Label Studio": "https://python.langchain.com/docs/integrations/callbacks/labelstudio", "PromptLayer": "https://python.langchain.com/docs/integrations/callbacks/promptlayer", "CnosDB": "https://python.langchain.com/docs/integrations/providers/cnosdb", "Log10": "https://python.langchain.com/docs/integrations/providers/log10", "Flyte": "https://python.langchain.com/docs/integrations/providers/flyte", "Arthur": "https://python.langchain.com/docs/integrations/providers/arthur_tracking", "CSV": "https://python.langchain.com/docs/integrations/toolkits/csv", "Document Comparison": "https://python.langchain.com/docs/integrations/toolkits/document_comparison_toolkit", "Python": "https://python.langchain.com/docs/integrations/toolkits/python", "PowerBI Dataset": "https://python.langchain.com/docs/integrations/toolkits/powerbi", "SQL Database": "https://python.langchain.com/docs/integrations/toolkits/sql_database", "Airbyte Question Answering": "https://python.langchain.com/docs/integrations/toolkits/airbyte_structured_qa", "Github": "https://python.langchain.com/docs/integrations/toolkits/github", "Spark SQL": "https://python.langchain.com/docs/integrations/toolkits/spark_sql", "AINetwork": "https://python.langchain.com/docs/integrations/toolkits/ainetwork", "Pandas Dataframe": "https://python.langchain.com/docs/integrations/toolkits/pandas", "Neo4j Vector Index": "https://python.langchain.com/docs/integrations/vectorstores/neo4jvector", "OpenAI Functions Metadata Tagger": "https://python.langchain.com/docs/integrations/document_transformers/openai_metadata_tagger", "Loading documents from a YouTube url": "https://python.langchain.com/docs/integrations/document_loaders/youtube_audio", "Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma", "Fallbacks": "https://python.langchain.com/docs/guides/fallbacks", "Debugging": "https://python.langchain.com/docs/guides/debugging", "LangSmith Walkthrough": "https://python.langchain.com/docs/guides/langsmith/walkthrough", "Reversible data anonymization with Microsoft Presidio": "https://python.langchain.com/docs/guides/privacy/presidio_data_anonymization/reversible", "Data anonymization with Microsoft Presidio": "https://python.langchain.com/docs/guides/privacy/presidio_data_anonymization/index", "Comparing Chain Outputs": "https://python.langchain.com/docs/guides/evaluation/examples/comparisons", "Agent Trajectory": "https://python.langchain.com/docs/guides/evaluation/trajectory/trajectory_eval", "Custom Trajectory Evaluator": "https://python.langchain.com/docs/guides/evaluation/trajectory/custom", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/tagging", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "Perform context-aware text splitting": "https://python.langchain.com/docs/use_cases/question_answering/how_to/document-context-aware-QA", "Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents", "Multiple Retrieval Sources": "https://python.langchain.com/docs/use_cases/question_answering/how_to/multiple_retrieval", "Cite sources": "https://python.langchain.com/docs/use_cases/question_answering/how_to/qa_citations", "Retrieve as you generate with FLARE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/flare", "Analysis of Twitter the-algorithm source code with LangChain, GPT4 and Activeloop's Deep Lake": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/twitter-the-algorithm-analysis-deeplake", "Use LangChain, GPT and Activeloop's Deep Lake to work with code base": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/code-analysis-deeplake", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "QA using Activeloop's DeepLake": "https://python.langchain.com/docs/use_cases/question_answering/integrations/semantic-search-over-chat", "Neptune Open Cypher QA Chain": "https://python.langchain.com/docs/use_cases/more/graph/neptune_cypher_qa", "NebulaGraphQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_nebula_qa", "Memgraph QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_memgraph_qa", "KuzuQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_kuzu_qa", "HugeGraph QA Chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_hugegraph_qa", "GraphSparqlQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_sparql_qa", "Ontotext GraphDB QA Chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_ontotext_graphdb_qa", "Diffbot Graph Transformer": "https://python.langchain.com/docs/use_cases/more/graph/diffbot_graphtransformer", "ArangoDB QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_arangodb_qa", "Neo4j DB QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_cypher_qa", "FalkorDBQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_falkordb_qa", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "AutoGPT": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/autogpt", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times", "Wikibase Agent": "https://python.langchain.com/docs/use_cases/more/agents/agents/wikibase_agent", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "CAMEL Role-Playing Autonomous Cooperative Agents": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/camel_role_playing", "Multi-Agent Simulated Environment: Petting Zoo": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/petting_zoo", "Multi-agent decentralized speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_bidding", "Multi-agent authoritarian speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_authoritarian", "Generative Agents in LangChain": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/characters", "Two-Player Dungeons & Dragons": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_player_dnd", "Multi-Player Dungeons & Dragons": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multi_player_dnd", "Simulated Environment: Gymnasium": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/gymnasium", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "How to use a SmartLLMChain": "https://python.langchain.com/docs/use_cases/more/self_check/smart_llm", "Vector SQL Retriever with MyScale": "https://python.langchain.com/docs/use_cases/qa_structured/integrations/myscale_vector_sql", "Elasticsearch": "https://python.langchain.com/docs/use_cases/qa_structured/integrations/elasticsearch", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql", "MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector", "MultiQueryRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/MultiQueryRetriever", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research", "Memory in LLMChain": "https://python.langchain.com/docs/modules/memory/adding_memory", "Custom callback handlers": "https://python.langchain.com/docs/modules/callbacks/custom_callbacks", "Async callbacks": "https://python.langchain.com/docs/modules/callbacks/async_callbacks", "Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools", "Tools as OpenAI Functions": "https://python.langchain.com/docs/modules/agents/tools/tools_as_openai_functions", "OpenAI Multi Functions Agent": "https://python.langchain.com/docs/modules/agents/agent_types/openai_multi_functions_agent", "Handle parsing errors": "https://python.langchain.com/docs/modules/agents/how_to/handle_parsing_errors", "Running Agent as an Iterator": "https://python.langchain.com/docs/modules/agents/how_to/agent_iter", "Add Memory to OpenAI Functions Agent": "https://python.langchain.com/docs/modules/agents/how_to/add_memory_openai_functions", "Custom functions with OpenAI Functions Agent": "https://python.langchain.com/docs/modules/agents/how_to/custom-functions-with-openai-functions-agent", "Use ToolKits with OpenAI Functions": "https://python.langchain.com/docs/modules/agents/how_to/use_toolkits_with_openai_functions", "Retry parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/retry", "Pydantic (JSON) parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/pydantic", "Prompt pipelining": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/prompts_pipelining", "Connecting to a Feature Store": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/connecting_to_a_feature_store", "Custom chain": "https://python.langchain.com/docs/modules/chains/how_to/custom_chain", "Using OpenAI functions": "https://python.langchain.com/docs/modules/chains/how_to/openai_functions", "interface.md": "https://python.langchain.com/docs/expression_language/interface", "First we add a step to load memory": "https://python.langchain.com/docs/expression_language/cookbook/retrieval", "sql_db.md": "https://python.langchain.com/docs/expression_language/cookbook/sql_db", "prompt_llm_parser.md": "https://python.langchain.com/docs/expression_language/cookbook/prompt_llm_parser", "Adding memory": "https://python.langchain.com/docs/expression_language/cookbook/memory", "multiple_chains.md": "https://python.langchain.com/docs/expression_language/cookbook/multiple_chains", "Code writing": "https://python.langchain.com/docs/expression_language/cookbook/code_writing", "Using tools": "https://python.langchain.com/docs/expression_language/cookbook/tools", "Configure Runnable traces": "https://python.langchain.com/docs/expression_language/how_to/trace_config"}, "ChatPromptTemplate": {"Facebook Messenger": "https://python.langchain.com/docs/integrations/chat_loaders/facebook", "Chat loaders": "https://python.langchain.com/docs/integrations/chat_loaders/index", "iMessage": "https://python.langchain.com/docs/integrations/chat_loaders/imessage", "Anthropic": "https://python.langchain.com/docs/integrations/chat/anthropic", "\ud83d\ude85 LiteLLM": "https://python.langchain.com/docs/integrations/chat/litellm", "Konko": "https://python.langchain.com/docs/integrations/chat/konko", "OpenAI": "https://python.langchain.com/docs/integrations/chat/openai", "Google Cloud Platform Vertex AI PaLM ": "https://python.langchain.com/docs/integrations/chat/google_vertex_ai_palm", "JinaChat": "https://python.langchain.com/docs/integrations/chat/jinachat", "Context": "https://python.langchain.com/docs/integrations/callbacks/context", "OpenAI Functions Metadata Tagger": "https://python.langchain.com/docs/integrations/document_transformers/openai_metadata_tagger", "Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma", "Fireworks": "https://python.langchain.com/docs/integrations/llms/fireworks", "Fallbacks": "https://python.langchain.com/docs/guides/fallbacks", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/tagging", "Multiple Retrieval Sources": "https://python.langchain.com/docs/use_cases/question_answering/how_to/multiple_retrieval", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "Multi-agent authoritarian speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_authoritarian", "MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector", "Memory in LLMChain": "https://python.langchain.com/docs/modules/memory/adding_memory", "Retry parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/retry", "Pydantic (JSON) parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/pydantic", "Few-shot examples for chat models": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/few_shot_examples_chat", "Prompt pipelining": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/prompts_pipelining", "Using OpenAI functions": "https://python.langchain.com/docs/modules/chains/how_to/openai_functions", "interface.md": "https://python.langchain.com/docs/expression_language/interface", "First we add a step to load memory": "https://python.langchain.com/docs/expression_language/cookbook/retrieval", "sql_db.md": "https://python.langchain.com/docs/expression_language/cookbook/sql_db", "prompt_llm_parser.md": "https://python.langchain.com/docs/expression_language/cookbook/prompt_llm_parser", "Adding memory": "https://python.langchain.com/docs/expression_language/cookbook/memory", "multiple_chains.md": "https://python.langchain.com/docs/expression_language/cookbook/multiple_chains", "Code writing": "https://python.langchain.com/docs/expression_language/cookbook/code_writing", "Using tools": "https://python.langchain.com/docs/expression_language/cookbook/tools", "Adding moderation": "https://python.langchain.com/docs/expression_language/cookbook/moderation"}, "StrOutputParser": {"Facebook Messenger": "https://python.langchain.com/docs/integrations/chat_loaders/facebook", "Chat loaders": "https://python.langchain.com/docs/integrations/chat_loaders/index", "iMessage": "https://python.langchain.com/docs/integrations/chat_loaders/imessage", "OpaquePrompts": "https://python.langchain.com/docs/integrations/llms/opaqueprompts", "Fallbacks": "https://python.langchain.com/docs/guides/fallbacks", "MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector", "First we add a step to load memory": "https://python.langchain.com/docs/expression_language/cookbook/retrieval", "sql_db.md": "https://python.langchain.com/docs/expression_language/cookbook/sql_db", "prompt_llm_parser.md": "https://python.langchain.com/docs/expression_language/cookbook/prompt_llm_parser", "multiple_chains.md": "https://python.langchain.com/docs/expression_language/cookbook/multiple_chains", "Code writing": "https://python.langchain.com/docs/expression_language/cookbook/code_writing", "Using tools": "https://python.langchain.com/docs/expression_language/cookbook/tools", "Configure Runnable traces": "https://python.langchain.com/docs/expression_language/how_to/trace_config"}, "AIMessage": {"Twitter (via Apify)": "https://python.langchain.com/docs/integrations/chat_loaders/twitter", "Zep": "https://python.langchain.com/docs/integrations/retrievers/zep_memorystore", "Zep Memory": "https://python.langchain.com/docs/integrations/memory/zep_memory", "SQL Chat Message History": "https://python.langchain.com/docs/integrations/memory/sql_chat_message_history", "Anthropic": "https://python.langchain.com/docs/integrations/chat/anthropic", "\ud83d\ude85 LiteLLM": "https://python.langchain.com/docs/integrations/chat/litellm", "Konko": "https://python.langchain.com/docs/integrations/chat/konko", "OpenAI": "https://python.langchain.com/docs/integrations/chat/openai", "JinaChat": "https://python.langchain.com/docs/integrations/chat/jinachat", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "CAMEL Role-Playing Autonomous Cooperative Agents": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/camel_role_playing", "Multi-agent decentralized speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_bidding", "Multi-agent authoritarian speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_authoritarian", "Multi-Player Dungeons & Dragons": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multi_player_dnd", "Simulated Environment: Gymnasium": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/gymnasium", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Prompt pipelining": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/prompts_pipelining"}, "convert_message_to_dict": {"Twitter (via Apify)": "https://python.langchain.com/docs/integrations/chat_loaders/twitter"}, "GMailLoader": {"GMail": "https://python.langchain.com/docs/integrations/chat_loaders/gmail"}, "SlackChatLoader": {"Slack": "https://python.langchain.com/docs/integrations/chat_loaders/slack"}, "ChatSession": {"Slack": "https://python.langchain.com/docs/integrations/chat_loaders/slack", "WhatsApp": "https://python.langchain.com/docs/integrations/chat_loaders/whatsapp", "iMessage": "https://python.langchain.com/docs/integrations/chat_loaders/imessage", "Telegram": "https://python.langchain.com/docs/integrations/chat_loaders/telegram", "Discord": "https://python.langchain.com/docs/integrations/chat_loaders/discord"}, "WhatsAppChatLoader": {"WhatsApp": "https://python.langchain.com/docs/integrations/providers/whatsapp", "WhatsApp Chat": "https://python.langchain.com/docs/integrations/document_loaders/whatsapp_chat"}, "IMessageChatLoader": {"iMessage": "https://python.langchain.com/docs/integrations/chat_loaders/imessage"}, "TelegramChatLoader": {"Telegram": "https://python.langchain.com/docs/integrations/chat_loaders/telegram"}, "base": {"Discord": "https://python.langchain.com/docs/integrations/chat_loaders/discord"}, "HuggingFaceBgeEmbeddings": {"BGE on Hugging Face": "https://python.langchain.com/docs/integrations/text_embedding/bge_huggingface"}, "XinferenceEmbeddings": {"Xorbits inference (Xinference)": "https://python.langchain.com/docs/integrations/text_embedding/xinference"}, "DeepInfraEmbeddings": {"DeepInfra": "https://python.langchain.com/docs/integrations/text_embedding/deepinfra"}, "HuggingFaceEmbeddings": {"Hugging Face": "https://python.langchain.com/docs/integrations/providers/huggingface", "Sentence Transformers": "https://python.langchain.com/docs/integrations/text_embedding/sentence_transformers", "LOTR (Merger Retriever)": "https://python.langchain.com/docs/integrations/retrievers/merger_retriever", "ScaNN": "https://python.langchain.com/docs/integrations/vectorstores/scann", "Annoy": "https://python.langchain.com/docs/integrations/vectorstores/annoy", "your local model path": "https://python.langchain.com/docs/integrations/vectorstores/vearch", "Pairwise Embedding Distance ": "https://python.langchain.com/docs/guides/evaluation/comparison/pairwise_embedding_distance", "Embedding Distance": "https://python.langchain.com/docs/guides/evaluation/string/embedding_distance", "Lost in the middle: The problem with long contexts": "https://python.langchain.com/docs/modules/data_connection/document_transformers/post_retrieval/long_context_reorder"}, "HuggingFaceInferenceAPIEmbeddings": {"Hugging Face": "https://python.langchain.com/docs/integrations/text_embedding/huggingfacehub"}, "GPT4AllEmbeddings": {"GPT4All": "https://python.langchain.com/docs/integrations/text_embedding/gpt4all", "Ollama": "https://python.langchain.com/docs/integrations/llms/ollama", "Use local LLMs": "https://python.langchain.com/docs/use_cases/question_answering/how_to/local_retrieval_qa", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research"}, "MosaicMLInstructorEmbeddings": {"MosaicML": "https://python.langchain.com/docs/integrations/text_embedding/mosaicml"}, "OpenAIEmbeddings": {"OpenAI": "https://python.langchain.com/docs/integrations/providers/openai", "AzureOpenAI": "https://python.langchain.com/docs/integrations/text_embedding/azureopenai", "RePhraseQueryRetriever": "https://python.langchain.com/docs/integrations/retrievers/re_phrase", "Cohere Reranker": "https://python.langchain.com/docs/integrations/retrievers/cohere-reranker", "kNN": "https://python.langchain.com/docs/integrations/retrievers/knn", "DocArray Retriever": "https://python.langchain.com/docs/integrations/retrievers/docarray_retriever", "SVM": "https://python.langchain.com/docs/integrations/retrievers/svm", "Pinecone Hybrid Search": "https://python.langchain.com/docs/integrations/retrievers/pinecone_hybrid_search", "LOTR (Merger Retriever)": "https://python.langchain.com/docs/integrations/retrievers/merger_retriever", "Xata chat memory": "https://python.langchain.com/docs/integrations/memory/xata_chat_message_history", "Confident": "https://python.langchain.com/docs/integrations/callbacks/confident", "Azure OpenAI": "https://python.langchain.com/docs/integrations/providers/azure_openai", "Document Comparison": "https://python.langchain.com/docs/integrations/toolkits/document_comparison_toolkit", "Vectorstore": "https://python.langchain.com/docs/integrations/toolkits/vectorstore", "LanceDB": "https://python.langchain.com/docs/integrations/vectorstores/lancedb", "Weaviate": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/weaviate_self_query", "Xata": "https://python.langchain.com/docs/integrations/vectorstores/xata", "Redis": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/redis_self_query", "PGVector": "https://python.langchain.com/docs/integrations/vectorstores/pgvector", "Rockset": "https://python.langchain.com/docs/integrations/vectorstores/rockset", "DingoDB": "https://python.langchain.com/docs/integrations/vectorstores/dingo", "Zilliz": "https://python.langchain.com/docs/integrations/vectorstores/zilliz", "SingleStoreDB": "https://python.langchain.com/docs/integrations/vectorstores/singlestoredb", "Typesense": "https://python.langchain.com/docs/integrations/vectorstores/typesense", "Atlas": "https://python.langchain.com/docs/integrations/vectorstores/atlas", "Activeloop Deep Lake": "https://python.langchain.com/docs/integrations/vectorstores/activeloop_deeplake", "Neo4j Vector Index": "https://python.langchain.com/docs/integrations/vectorstores/neo4jvector", "Chroma": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/chroma_self_query", "Alibaba Cloud OpenSearch": "https://python.langchain.com/docs/integrations/vectorstores/alibabacloud_opensearch", "Baidu Cloud VectorSearch": "https://python.langchain.com/docs/integrations/vectorstores/baiducloud_vector_search", "StarRocks": "https://python.langchain.com/docs/integrations/vectorstores/starrocks", "scikit-learn": "https://python.langchain.com/docs/integrations/vectorstores/sklearn", "DocArray HnswSearch": "https://python.langchain.com/docs/integrations/vectorstores/docarray_hnsw", "MyScale": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/myscale_self_query", "ClickHouse": "https://python.langchain.com/docs/integrations/vectorstores/clickhouse", "Qdrant": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/qdrant_self_query", "Tigris": "https://python.langchain.com/docs/integrations/vectorstores/tigris", "Supabase (Postgres)": "https://python.langchain.com/docs/integrations/vectorstores/supabase", "OpenSearch": "https://python.langchain.com/docs/integrations/vectorstores/opensearch", "Pinecone": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/pinecone", "Azure Cognitive Search": "https://python.langchain.com/docs/integrations/vectorstores/azuresearch", "Cassandra": "https://python.langchain.com/docs/integrations/vectorstores/cassandra", "USearch": "https://python.langchain.com/docs/integrations/vectorstores/usearch", "Milvus": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/milvus_self_query", "Elasticsearch": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/elasticsearch_self_query", "DocArray InMemorySearch": "https://python.langchain.com/docs/integrations/vectorstores/docarray_in_memory", "Postgres Embedding": "https://python.langchain.com/docs/integrations/vectorstores/pgembedding", "Faiss": "https://python.langchain.com/docs/integrations/vectorstores/faiss", "Epsilla": "https://python.langchain.com/docs/integrations/vectorstores/epsilla", "AnalyticDB": "https://python.langchain.com/docs/integrations/vectorstores/analyticdb", "Hologres": "https://python.langchain.com/docs/integrations/vectorstores/hologres", "MongoDB Atlas": "https://python.langchain.com/docs/integrations/vectorstores/mongodb_atlas", "Meilisearch": "https://python.langchain.com/docs/integrations/vectorstores/meilisearch", "Loading documents from a YouTube url": "https://python.langchain.com/docs/integrations/document_loaders/youtube_audio", "Psychic": "https://python.langchain.com/docs/integrations/document_loaders/psychic", "Docugami": "https://python.langchain.com/docs/integrations/document_loaders/docugami", "LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "Perform context-aware text splitting": "https://python.langchain.com/docs/use_cases/question_answering/how_to/document-context-aware-QA", "Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents", "Retrieve from vector stores directly": "https://python.langchain.com/docs/use_cases/question_answering/how_to/vector_db_text_generation", "Retrieve as you generate with FLARE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/flare", "Improve document indexing with HyDE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/hyde", "Analysis of Twitter the-algorithm source code with LangChain, GPT4 and Activeloop's Deep Lake": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/twitter-the-algorithm-analysis-deeplake", "Use LangChain, GPT and Activeloop's Deep Lake to work with code base": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/code-analysis-deeplake", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "QA using Activeloop's DeepLake": "https://python.langchain.com/docs/use_cases/question_answering/integrations/semantic-search-over-chat", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "AutoGPT": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/autogpt", "BabyAGI User Guide": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi", "BabyAGI with Tools": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi_with_agent", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times", "Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval", "Generative Agents in LangChain": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/characters", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql", "Indexing": "https://python.langchain.com/docs/modules/data_connection/indexing", "Caching": "https://python.langchain.com/docs/modules/data_connection/text_embedding/caching_embeddings", "MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector", "MultiQueryRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/MultiQueryRetriever", "Parent Document Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/parent_document_retriever", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research", "Supabase": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/supabase_self_query", "Deep Lake": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/activeloop_deeplake_self_query", "Memory in the Multi-Input Chain": "https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore", "Custom agent with tool retrieval": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent_with_tool_retrieval", "Select by maximal marginal relevance (MMR)": "https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/mmr", "Few-shot examples for chat models": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/few_shot_examples_chat", "Loading from LangChainHub": "https://python.langchain.com/docs/modules/chains/how_to/from_hub", "First we add a step to load memory": "https://python.langchain.com/docs/expression_language/cookbook/retrieval"}, "VertexAIEmbeddings": {"Google Vertex AI PaLM ": "https://python.langchain.com/docs/integrations/text_embedding/google_vertex_ai_palm"}, "BedrockEmbeddings": {"Bedrock": "https://python.langchain.com/docs/integrations/providers/bedrock"}, "LlamaCppEmbeddings": {"Llama-cpp": "https://python.langchain.com/docs/integrations/text_embedding/llamacpp", "Llama.cpp": "https://python.langchain.com/docs/integrations/providers/llamacpp"}, "NLPCloudEmbeddings": {"NLP Cloud": "https://python.langchain.com/docs/integrations/text_embedding/nlp_cloud", "NLPCloud": "https://python.langchain.com/docs/integrations/providers/nlpcloud"}, "SpacyEmbeddings": {"SpaCy": "https://python.langchain.com/docs/integrations/text_embedding/spacy_embedding", "spaCy": "https://python.langchain.com/docs/integrations/providers/spacy"}, "HuggingFaceInstructEmbeddings": {"InstructEmbeddings": "https://python.langchain.com/docs/integrations/text_embedding/instruct_embeddings", "Vector SQL Retriever with MyScale": "https://python.langchain.com/docs/use_cases/qa_structured/integrations/myscale_vector_sql"}, "QianfanEmbeddingsEndpoint": {"Baidu Qianfan": "https://python.langchain.com/docs/integrations/text_embedding/baidu_qianfan_endpoint"}, "CohereEmbeddings": {"Cohere": "https://python.langchain.com/docs/integrations/providers/cohere", "Memory in the Multi-Input Chain": "https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs", "Router": "https://python.langchain.com/docs/modules/chains/foundational/router"}, "EdenAiEmbeddings": {"EDEN AI": "https://python.langchain.com/docs/integrations/text_embedding/edenai"}, "SentenceTransformerEmbeddings": {"Sentence Transformers": "https://python.langchain.com/docs/integrations/text_embedding/sentence_transformers", "sqlite-vss": "https://python.langchain.com/docs/integrations/vectorstores/sqlitevss", "Chroma": "https://python.langchain.com/docs/integrations/vectorstores/chroma"}, "ClarifaiEmbeddings": {"Clarifai": "https://python.langchain.com/docs/integrations/providers/clarifai"}, "AwaEmbeddings": {"AwaDB": "https://python.langchain.com/docs/integrations/providers/awadb"}, "MiniMaxEmbeddings": {"MiniMax": "https://python.langchain.com/docs/integrations/text_embedding/minimax", "Minimax": "https://python.langchain.com/docs/integrations/providers/minimax"}, "FakeEmbeddings": {"Fake Embeddings": "https://python.langchain.com/docs/integrations/text_embedding/fake", "DocArray Retriever": "https://python.langchain.com/docs/integrations/retrievers/docarray_retriever", "Vectara": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/vectara_self_query", "Tair": "https://python.langchain.com/docs/integrations/vectorstores/tair", "Tencent Cloud VectorDB": "https://python.langchain.com/docs/integrations/vectorstores/tencentvectordb"}, "ElasticsearchEmbeddings": {"Elasticsearch": "https://python.langchain.com/docs/integrations/text_embedding/elasticsearch"}, "SelfHostedEmbeddings": {"Self Hosted": "https://python.langchain.com/docs/integrations/text_embedding/self-hosted"}, "SelfHostedHuggingFaceEmbeddings": {"Self Hosted": "https://python.langchain.com/docs/integrations/text_embedding/self-hosted"}, "SelfHostedHuggingFaceInstructEmbeddings": {"Self Hosted": "https://python.langchain.com/docs/integrations/text_embedding/self-hosted"}, "EmbaasEmbeddings": {"Embaas": "https://python.langchain.com/docs/integrations/text_embedding/embaas"}, "JinaEmbeddings": {"Jina": "https://python.langchain.com/docs/integrations/providers/jina"}, "AlephAlphaAsymmetricSemanticEmbedding": {"Aleph Alpha": "https://python.langchain.com/docs/integrations/providers/aleph_alpha"}, "AlephAlphaSymmetricSemanticEmbedding": {"Aleph Alpha": "https://python.langchain.com/docs/integrations/providers/aleph_alpha"}, "DashScopeEmbeddings": {"DashScope": "https://python.langchain.com/docs/integrations/text_embedding/dashscope", "DashVector": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/dashvector"}, "TensorflowHubEmbeddings": {"TensorflowHub": "https://python.langchain.com/docs/integrations/text_embedding/tensorflowhub", "ScaNN": "https://python.langchain.com/docs/integrations/vectorstores/scann"}, "ModelScopeEmbeddings": {"ModelScope": "https://python.langchain.com/docs/integrations/providers/modelscope"}, "SagemakerEndpointEmbeddings": {"SageMaker": "https://python.langchain.com/docs/integrations/text_embedding/sagemaker-endpoint", "SageMaker Endpoint": "https://python.langchain.com/docs/integrations/providers/sagemaker_endpoint"}, "EmbeddingsContentHandler": {"SageMaker": "https://python.langchain.com/docs/integrations/text_embedding/sagemaker-endpoint"}, "LocalAIEmbeddings": {"LocalAI": "https://python.langchain.com/docs/integrations/text_embedding/localai"}, "WebBaseLoader": {"RePhraseQueryRetriever": "https://python.langchain.com/docs/integrations/retrievers/re_phrase", "Ollama": "https://python.langchain.com/docs/integrations/llms/ollama", "Vectorstore": "https://python.langchain.com/docs/integrations/toolkits/vectorstore", "Zep": "https://python.langchain.com/docs/integrations/vectorstores/zep", "WebBaseLoader": "https://python.langchain.com/docs/integrations/document_loaders/web_base", "MergeDocLoader": "https://python.langchain.com/docs/integrations/document_loaders/merge_doc_loader", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/summarization", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "Use local LLMs": "https://python.langchain.com/docs/use_cases/question_answering/how_to/local_retrieval_qa", "MultiQueryRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/MultiQueryRetriever", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore"}, "RecursiveCharacterTextSplitter": {"RePhraseQueryRetriever": "https://python.langchain.com/docs/integrations/retrievers/re_phrase", "Cohere Reranker": "https://python.langchain.com/docs/integrations/retrievers/cohere-reranker", "Ollama": "https://python.langchain.com/docs/integrations/llms/ollama", "Zep": "https://python.langchain.com/docs/integrations/vectorstores/zep", "your local model path": "https://python.langchain.com/docs/integrations/vectorstores/vearch", "Loading documents from a YouTube url": "https://python.langchain.com/docs/integrations/document_loaders/youtube_audio", "Source Code": "https://python.langchain.com/docs/integrations/document_loaders/source_code", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "Perform context-aware text splitting": "https://python.langchain.com/docs/use_cases/question_answering/how_to/document-context-aware-QA", "Use local LLMs": "https://python.langchain.com/docs/use_cases/question_answering/how_to/local_retrieval_qa", "QA using Activeloop's DeepLake": "https://python.langchain.com/docs/use_cases/question_answering/integrations/semantic-search-over-chat", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times", "MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector", "MultiQueryRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/MultiQueryRetriever", "Parent Document Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/parent_document_retriever", "MarkdownHeaderTextSplitter": "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/markdown_header_metadata"}, "Chroma": {"RePhraseQueryRetriever": "https://python.langchain.com/docs/integrations/retrievers/re_phrase", "LOTR (Merger Retriever)": "https://python.langchain.com/docs/integrations/retrievers/merger_retriever", "Ollama": "https://python.langchain.com/docs/integrations/llms/ollama", "Confident": "https://python.langchain.com/docs/integrations/callbacks/confident", "Chroma": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/chroma_self_query", "Vectorstore": "https://python.langchain.com/docs/integrations/toolkits/vectorstore", "StarRocks": "https://python.langchain.com/docs/integrations/vectorstores/starrocks", "Psychic": "https://python.langchain.com/docs/integrations/document_loaders/psychic", "Docugami": "https://python.langchain.com/docs/integrations/document_loaders/docugami", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "Perform context-aware text splitting": "https://python.langchain.com/docs/use_cases/question_answering/how_to/document-context-aware-QA", "Use local LLMs": "https://python.langchain.com/docs/use_cases/question_answering/how_to/local_retrieval_qa", "Retrieve from vector stores directly": "https://python.langchain.com/docs/use_cases/question_answering/how_to/vector_db_text_generation", "Improve document indexing with HyDE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/hyde", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector", "MultiQueryRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/MultiQueryRetriever", "Parent Document Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/parent_document_retriever", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research", "Lost in the middle: The problem with long contexts": "https://python.langchain.com/docs/modules/data_connection/document_transformers/post_retrieval/long_context_reorder", "Memory in the Multi-Input Chain": "https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore", "Few-shot examples for chat models": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/few_shot_examples_chat", "Router": "https://python.langchain.com/docs/modules/chains/foundational/router", "Loading from LangChainHub": "https://python.langchain.com/docs/modules/chains/how_to/from_hub"}, "RePhraseQueryRetriever": {"RePhraseQueryRetriever": "https://python.langchain.com/docs/integrations/retrievers/re_phrase"}, "PromptTemplate": {"RePhraseQueryRetriever": "https://python.langchain.com/docs/integrations/retrievers/re_phrase", "Zapier Natural Language Actions": "https://python.langchain.com/docs/integrations/tools/zapier", "Dall-E Image Generator": "https://python.langchain.com/docs/integrations/tools/dalle_image_generator", "Streamlit Chat Message History": "https://python.langchain.com/docs/integrations/memory/streamlit_chat_message_history", "Context": "https://python.langchain.com/docs/integrations/callbacks/context", "Argilla": "https://python.langchain.com/docs/integrations/callbacks/argilla", "Comet": "https://python.langchain.com/docs/integrations/providers/comet_tracking", "Aim": "https://python.langchain.com/docs/integrations/providers/aim_tracking", "Weights & Biases": "https://python.langchain.com/docs/integrations/providers/wandb_tracking", "SageMaker Tracking": "https://python.langchain.com/docs/integrations/providers/sagemaker_tracking", "Rebuff": "https://python.langchain.com/docs/integrations/providers/rebuff", "MLflow": "https://python.langchain.com/docs/integrations/providers/mlflow_tracking", "Flyte": "https://python.langchain.com/docs/integrations/providers/flyte", "Vectara Text Generation": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_text_generation", "Natural Language APIs": "https://python.langchain.com/docs/integrations/toolkits/openapi_nla", "your local model path": "https://python.langchain.com/docs/integrations/vectorstores/vearch", "Google Drive": "https://python.langchain.com/docs/integrations/document_loaders/google_drive", "Google Vertex AI PaLM ": "https://python.langchain.com/docs/integrations/llms/google_vertex_ai_palm", "Predibase": "https://python.langchain.com/docs/integrations/llms/predibase", "Hugging Face Local Pipelines": "https://python.langchain.com/docs/integrations/llms/huggingface_pipelines", "Eden AI": "https://python.langchain.com/docs/integrations/llms/edenai", "Fallbacks": "https://python.langchain.com/docs/guides/fallbacks", "Reversible data anonymization with Microsoft Presidio": "https://python.langchain.com/docs/guides/privacy/presidio_data_anonymization/reversible", "Data anonymization with Microsoft Presidio": "https://python.langchain.com/docs/guides/privacy/presidio_data_anonymization/index", "Removing logical fallacies from model output": "https://python.langchain.com/docs/guides/safety/logical_fallacy_chain", "Amazon Comprehend Moderation Chain": "https://python.langchain.com/docs/guides/safety/amazon_comprehend_chain", "Pairwise String Comparison": "https://python.langchain.com/docs/guides/evaluation/comparison/pairwise_string", "Criteria Evaluation": "https://python.langchain.com/docs/guides/evaluation/string/criteria_eval_chain", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/apis", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "Retrieve from vector stores directly": "https://python.langchain.com/docs/use_cases/question_answering/how_to/vector_db_text_generation", "Improve document indexing with HyDE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/hyde", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "Neo4j DB QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_cypher_qa", "Multi-agent authoritarian speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_authoritarian", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Bash chain": "https://python.langchain.com/docs/use_cases/more/code_writing/llm_bash", "How to use a SmartLLMChain": "https://python.langchain.com/docs/use_cases/more/self_check/smart_llm", "Elasticsearch": "https://python.langchain.com/docs/use_cases/qa_structured/integrations/elasticsearch", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql", "MultiQueryRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/MultiQueryRetriever", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research", "Lost in the middle: The problem with long contexts": "https://python.langchain.com/docs/modules/data_connection/document_transformers/post_retrieval/long_context_reorder", "Custom Memory": "https://python.langchain.com/docs/modules/memory/custom_memory", "Memory in the Multi-Input Chain": "https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs", "Memory in LLMChain": "https://python.langchain.com/docs/modules/memory/adding_memory", "Multiple Memory classes": "https://python.langchain.com/docs/modules/memory/multiple_memory", "Customizing Conversational Memory": "https://python.langchain.com/docs/modules/memory/conversational_customization", "Conversation Knowledge Graph": "https://python.langchain.com/docs/modules/memory/types/kg", "Logging to file": "https://python.langchain.com/docs/modules/callbacks/filecallbackhandler", "Retry parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/retry", "Datetime parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/datetime", "Pydantic (JSON) parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/pydantic", "Select by maximal marginal relevance (MMR)": "https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/mmr", "Select by n-gram overlap": "https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/ngram_overlap", "Prompt pipelining": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/prompts_pipelining", "Template formats": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/formats", "Connecting to a Feature Store": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/connecting_to_a_feature_store", "Router": "https://python.langchain.com/docs/modules/chains/foundational/router", "Transformation": "https://python.langchain.com/docs/modules/chains/foundational/transformation", "Custom chain": "https://python.langchain.com/docs/modules/chains/how_to/custom_chain", "Async API": "https://python.langchain.com/docs/modules/chains/how_to/async_chain", "First we add a step to load memory": "https://python.langchain.com/docs/expression_language/cookbook/retrieval", "Configure Runnable traces": "https://python.langchain.com/docs/expression_language/how_to/trace_config"}, "ElasticSearchBM25Retriever": {"ElasticSearch BM25": "https://python.langchain.com/docs/integrations/retrievers/elastic_search_bm25"}, "ZepMemory": {"Zep": "https://python.langchain.com/docs/integrations/retrievers/zep_memorystore", "Zep Memory": "https://python.langchain.com/docs/integrations/memory/zep_memory"}, "CombinedMemory": {"Zep": "https://python.langchain.com/docs/integrations/retrievers/zep_memorystore", "Multiple Memory classes": "https://python.langchain.com/docs/modules/memory/multiple_memory"}, "VectorStoreRetrieverMemory": {"Zep": "https://python.langchain.com/docs/integrations/retrievers/zep_memorystore"}, "HumanMessage": {"Zep": "https://python.langchain.com/docs/integrations/retrievers/zep_memorystore", "Zep Memory": "https://python.langchain.com/docs/integrations/memory/zep_memory", "SQL Chat Message History": "https://python.langchain.com/docs/integrations/memory/sql_chat_message_history", "AzureML Chat Online Endpoint": "https://python.langchain.com/docs/integrations/chat/azureml_chat_endpoint", "Anthropic": "https://python.langchain.com/docs/integrations/chat/anthropic", "\ud83d\ude85 LiteLLM": "https://python.langchain.com/docs/integrations/chat/litellm", "Konko": "https://python.langchain.com/docs/integrations/chat/konko", "OpenAI": "https://python.langchain.com/docs/integrations/chat/openai", "Google Cloud Platform Vertex AI PaLM ": "https://python.langchain.com/docs/integrations/chat/google_vertex_ai_palm", "Bedrock Chat": "https://python.langchain.com/docs/integrations/chat/bedrock", "JinaChat": "https://python.langchain.com/docs/integrations/chat/jinachat", "Ollama": "https://python.langchain.com/docs/integrations/chat/ollama", "Azure": "https://python.langchain.com/docs/integrations/chat/azure_chat_openai", "Baidu Qianfan": "https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint", "ERNIE-Bot Chat": "https://python.langchain.com/docs/integrations/chat/ernie", "PromptLayer ChatOpenAI": "https://python.langchain.com/docs/integrations/chat/promptlayer_chatopenai", "Anyscale": "https://python.langchain.com/docs/integrations/chat/anyscale", "Anthropic Functions": "https://python.langchain.com/docs/integrations/chat/anthropic_functions", "LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor", "Context": "https://python.langchain.com/docs/integrations/callbacks/context", "Label Studio": "https://python.langchain.com/docs/integrations/callbacks/labelstudio", "PromptLayer": "https://python.langchain.com/docs/integrations/callbacks/promptlayer", "Log10": "https://python.langchain.com/docs/integrations/providers/log10", "MLflow AI Gateway": "https://python.langchain.com/docs/integrations/providers/mlflow_ai_gateway", "Flyte": "https://python.langchain.com/docs/integrations/providers/flyte", "Arthur": "https://python.langchain.com/docs/integrations/providers/arthur_tracking", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "CAMEL Role-Playing Autonomous Cooperative Agents": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/camel_role_playing", "Multi-Agent Simulated Environment: Petting Zoo": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/petting_zoo", "Multi-agent decentralized speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_bidding", "Multi-agent authoritarian speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_authoritarian", "Two-Player Dungeons & Dragons": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_player_dnd", "Multi-Player Dungeons & Dragons": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multi_player_dnd", "Simulated Environment: Gymnasium": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/gymnasium", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Custom callback handlers": "https://python.langchain.com/docs/modules/callbacks/custom_callbacks", "Async callbacks": "https://python.langchain.com/docs/modules/callbacks/async_callbacks", "Tools as OpenAI Functions": "https://python.langchain.com/docs/modules/agents/tools/tools_as_openai_functions", "Prompt pipelining": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/prompts_pipelining", "Using OpenAI functions": "https://python.langchain.com/docs/modules/chains/how_to/openai_functions"}, "ZepRetriever": {"Zep": "https://python.langchain.com/docs/integrations/providers/zep", "Zep Memory": "https://python.langchain.com/docs/integrations/memory/zep_memory"}, "VespaRetriever": {"Vespa": "https://python.langchain.com/docs/integrations/providers/vespa"}, "AmazonKendraRetriever": {"Amazon Kendra": "https://python.langchain.com/docs/integrations/retrievers/amazon_kendra_retriever"}, "TextLoader": {"Cohere Reranker": "https://python.langchain.com/docs/integrations/retrievers/cohere-reranker", "Confident": "https://python.langchain.com/docs/integrations/callbacks/confident", "Elasticsearch": "https://python.langchain.com/docs/integrations/vectorstores/elasticsearch", "Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat", "Vectorstore": "https://python.langchain.com/docs/integrations/toolkits/vectorstore", "LanceDB": "https://python.langchain.com/docs/integrations/vectorstores/lancedb", "sqlite-vss": "https://python.langchain.com/docs/integrations/vectorstores/sqlitevss", "Weaviate": "https://python.langchain.com/docs/integrations/vectorstores/weaviate", "DashVector": "https://python.langchain.com/docs/integrations/vectorstores/dashvector", "ScaNN": "https://python.langchain.com/docs/integrations/vectorstores/scann", "Xata": "https://python.langchain.com/docs/integrations/vectorstores/xata", "Vectara": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/vectara_self_query", "PGVector": "https://python.langchain.com/docs/integrations/vectorstores/pgvector", "Rockset": "https://python.langchain.com/docs/integrations/vectorstores/rockset", "DingoDB": "https://python.langchain.com/docs/integrations/vectorstores/dingo", "Zilliz": "https://python.langchain.com/docs/integrations/vectorstores/zilliz", "SingleStoreDB": "https://python.langchain.com/docs/integrations/vectorstores/singlestoredb", "Annoy": "https://python.langchain.com/docs/integrations/vectorstores/annoy", "Typesense": "https://python.langchain.com/docs/integrations/vectorstores/typesense", "Atlas": "https://python.langchain.com/docs/integrations/vectorstores/atlas", "Activeloop Deep Lake": "https://python.langchain.com/docs/integrations/vectorstores/activeloop_deeplake", "Neo4j Vector Index": "https://python.langchain.com/docs/integrations/vectorstores/neo4jvector", "Tair": "https://python.langchain.com/docs/integrations/vectorstores/tair", "Chroma": "https://python.langchain.com/docs/integrations/vectorstores/chroma", "Alibaba Cloud OpenSearch": "https://python.langchain.com/docs/integrations/vectorstores/alibabacloud_opensearch", "Baidu Cloud VectorSearch": "https://python.langchain.com/docs/integrations/vectorstores/baiducloud_vector_search", "StarRocks": "https://python.langchain.com/docs/integrations/vectorstores/starrocks", "scikit-learn": "https://python.langchain.com/docs/integrations/vectorstores/sklearn", "Tencent Cloud VectorDB": "https://python.langchain.com/docs/integrations/vectorstores/tencentvectordb", "DocArray HnswSearch": "https://python.langchain.com/docs/integrations/vectorstores/docarray_hnsw", "MyScale": "https://python.langchain.com/docs/integrations/vectorstores/myscale", "ClickHouse": "https://python.langchain.com/docs/integrations/vectorstores/clickhouse", "Qdrant": "https://python.langchain.com/docs/integrations/vectorstores/qdrant", "Tigris": "https://python.langchain.com/docs/integrations/vectorstores/tigris", "AwaDB": "https://python.langchain.com/docs/integrations/vectorstores/awadb", "Supabase (Postgres)": "https://python.langchain.com/docs/integrations/vectorstores/supabase", "OpenSearch": "https://python.langchain.com/docs/integrations/vectorstores/opensearch", "Pinecone": "https://python.langchain.com/docs/integrations/vectorstores/pinecone", "BagelDB": "https://python.langchain.com/docs/integrations/vectorstores/bageldb", "Azure Cognitive Search": "https://python.langchain.com/docs/integrations/vectorstores/azuresearch", "Cassandra": "https://python.langchain.com/docs/integrations/vectorstores/cassandra", "USearch": "https://python.langchain.com/docs/integrations/vectorstores/usearch", "Milvus": "https://python.langchain.com/docs/integrations/vectorstores/milvus", "Marqo": "https://python.langchain.com/docs/integrations/vectorstores/marqo", "DocArray InMemorySearch": "https://python.langchain.com/docs/integrations/vectorstores/docarray_in_memory", "Postgres Embedding": "https://python.langchain.com/docs/integrations/vectorstores/pgembedding", "Faiss": "https://python.langchain.com/docs/integrations/vectorstores/faiss", "Epsilla": "https://python.langchain.com/docs/integrations/vectorstores/epsilla", "AnalyticDB": "https://python.langchain.com/docs/integrations/vectorstores/analyticdb", "Hologres": "https://python.langchain.com/docs/integrations/vectorstores/hologres", "your local model path": "https://python.langchain.com/docs/integrations/vectorstores/vearch", "MongoDB Atlas": "https://python.langchain.com/docs/integrations/vectorstores/mongodb_atlas", "Meilisearch": "https://python.langchain.com/docs/integrations/vectorstores/meilisearch", "Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents", "Analysis of Twitter the-algorithm source code with LangChain, GPT4 and Activeloop's Deep Lake": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/twitter-the-algorithm-analysis-deeplake", "Use LangChain, GPT and Activeloop's Deep Lake to work with code base": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/code-analysis-deeplake", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "QA using Activeloop's DeepLake": "https://python.langchain.com/docs/use_cases/question_answering/integrations/semantic-search-over-chat", "Graph QA": "https://python.langchain.com/docs/use_cases/more/graph/graph_qa", "Caching": "https://python.langchain.com/docs/modules/data_connection/text_embedding/caching_embeddings", "MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector", "Parent Document Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/parent_document_retriever", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore", "Loading from LangChainHub": "https://python.langchain.com/docs/modules/chains/how_to/from_hub"}, "FAISS": {"Cohere Reranker": "https://python.langchain.com/docs/integrations/retrievers/cohere-reranker", "Facebook Faiss": "https://python.langchain.com/docs/integrations/providers/facebook_faiss", "Document Comparison": "https://python.langchain.com/docs/integrations/toolkits/document_comparison_toolkit", "Faiss": "https://python.langchain.com/docs/integrations/vectorstores/faiss", "Loading documents from a YouTube url": "https://python.langchain.com/docs/integrations/document_loaders/youtube_audio", "Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "AutoGPT": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/autogpt", "BabyAGI User Guide": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi", "BabyAGI with Tools": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi_with_agent", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times", "Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval", "Generative Agents in LangChain": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/characters", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql", "Caching": "https://python.langchain.com/docs/modules/data_connection/text_embedding/caching_embeddings", "Ensemble Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/ensemble", "Custom agent with tool retrieval": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent_with_tool_retrieval", "Select by maximal marginal relevance (MMR)": "https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/mmr", "First we add a step to load memory": "https://python.langchain.com/docs/expression_language/cookbook/retrieval"}, "OpenAI": {"Cohere Reranker": "https://python.langchain.com/docs/integrations/retrievers/cohere-reranker", "Google Serper": "https://python.langchain.com/docs/integrations/providers/google_serper", "Human as a tool": "https://python.langchain.com/docs/integrations/tools/human_tools", "OpenWeatherMap": "https://python.langchain.com/docs/integrations/tools/openweathermap", "Search Tools": "https://python.langchain.com/docs/integrations/tools/search_tools", "Zapier Natural Language Actions": "https://python.langchain.com/docs/integrations/tools/zapier", "Gradio": "https://python.langchain.com/docs/integrations/tools/gradio_tools", "SceneXplain": "https://python.langchain.com/docs/integrations/tools/sceneXplain", "Dall-E Image Generator": "https://python.langchain.com/docs/integrations/tools/dalle_image_generator", "Entity Memory with SQLite storage": "https://python.langchain.com/docs/integrations/memory/entity_memory_with_sqlite", "Streamlit Chat Message History": "https://python.langchain.com/docs/integrations/memory/streamlit_chat_message_history", "Confident": "https://python.langchain.com/docs/integrations/callbacks/confident", "LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor", "Label Studio": "https://python.langchain.com/docs/integrations/callbacks/labelstudio", "Argilla": "https://python.langchain.com/docs/integrations/callbacks/argilla", "PromptLayer": "https://python.langchain.com/docs/integrations/callbacks/promptlayer", "Streamlit": "https://python.langchain.com/docs/integrations/callbacks/.ipynb_checkpoints/streamlit-checkpoint", "Infino": "https://python.langchain.com/docs/integrations/callbacks/infino", "Comet": "https://python.langchain.com/docs/integrations/providers/comet_tracking", "Aim": "https://python.langchain.com/docs/integrations/providers/aim_tracking", "Weights & Biases": "https://python.langchain.com/docs/integrations/providers/wandb_tracking", "SageMaker Tracking": "https://python.langchain.com/docs/integrations/providers/sagemaker_tracking", "OpenAI": "https://python.langchain.com/docs/integrations/llms/openai", "Rebuff": "https://python.langchain.com/docs/integrations/providers/rebuff", "MLflow": "https://python.langchain.com/docs/integrations/providers/mlflow_tracking", "Helicone": "https://python.langchain.com/docs/integrations/providers/helicone", "Shale Protocol": "https://python.langchain.com/docs/integrations/providers/shaleprotocol", "WhyLabs": "https://python.langchain.com/docs/integrations/providers/whylabs_profiling", "WandB Tracing": "https://python.langchain.com/docs/integrations/providers/wandb_tracing", "ClearML": "https://python.langchain.com/docs/integrations/providers/clearml_tracking", "Ray Serve": "https://python.langchain.com/docs/integrations/providers/ray_serve", "Log, Trace, and Monitor": "https://python.langchain.com/docs/integrations/providers/portkey/logging_tracing_portkey", "Portkey": "https://python.langchain.com/docs/integrations/providers/portkey/index", "Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat", "Vectara Text Generation": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_text_generation", "CSV": "https://python.langchain.com/docs/integrations/toolkits/csv", "Xorbits": "https://python.langchain.com/docs/integrations/toolkits/xorbits", "Jira": "https://python.langchain.com/docs/integrations/toolkits/jira", "Spark Dataframe": "https://python.langchain.com/docs/integrations/toolkits/spark", "Python": "https://python.langchain.com/docs/integrations/toolkits/python", "SQL Database": "https://python.langchain.com/docs/integrations/toolkits/sql_database", "Natural Language APIs": "https://python.langchain.com/docs/integrations/toolkits/openapi_nla", "JSON": "https://python.langchain.com/docs/integrations/toolkits/json", "Github": "https://python.langchain.com/docs/integrations/toolkits/github", "Pandas Dataframe": "https://python.langchain.com/docs/integrations/toolkits/pandas", "OpenAPI": "https://python.langchain.com/docs/integrations/toolkits/openapi", "Gitlab": "https://python.langchain.com/docs/integrations/toolkits/gitlab", "Vectara": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/vectara_self_query", "Psychic": "https://python.langchain.com/docs/integrations/document_loaders/psychic", "Docugami": "https://python.langchain.com/docs/integrations/document_loaders/docugami", "Amazon Textract ": "https://python.langchain.com/docs/integrations/document_loaders/pdf-amazonTextractPDFLoader", "OpaquePrompts": "https://python.langchain.com/docs/integrations/llms/opaqueprompts", "LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching", "Fallbacks": "https://python.langchain.com/docs/guides/fallbacks", "Removing logical fallacies from model output": "https://python.langchain.com/docs/guides/safety/logical_fallacy_chain", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/apis", "Perform context-aware text splitting": "https://python.langchain.com/docs/use_cases/question_answering/how_to/document-context-aware-QA", "Retrieve from vector stores directly": "https://python.langchain.com/docs/use_cases/question_answering/how_to/vector_db_text_generation", "Retrieve as you generate with FLARE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/flare", "Improve document indexing with HyDE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/hyde", "QA using Activeloop's DeepLake": "https://python.langchain.com/docs/use_cases/question_answering/integrations/semantic-search-over-chat", "Graph QA": "https://python.langchain.com/docs/use_cases/more/graph/graph_qa", "Tree of Thought (ToT) example": "https://python.langchain.com/docs/use_cases/more/graph/tot", "HuggingGPT": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/hugginggpt", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Bash chain": "https://python.langchain.com/docs/use_cases/more/code_writing/llm_bash", "LLM Symbolic Math ": "https://python.langchain.com/docs/use_cases/more/code_writing/llm_symbolic_math", "Summarization checker chain": "https://python.langchain.com/docs/use_cases/more/self_check/llm_summarization_checker", "Self-checking chain": "https://python.langchain.com/docs/use_cases/more/self_check/llm_checker", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "Vector SQL Retriever with MyScale": "https://python.langchain.com/docs/use_cases/qa_structured/integrations/myscale_vector_sql", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql", "Milvus": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/milvus_self_query", "Weaviate": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/weaviate_self_query", "Elasticsearch": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/elasticsearch_self_query", "Chroma": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/chroma_self_query", "Pinecone": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/pinecone", "Supabase": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/supabase_self_query", "Redis": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/redis_self_query", "MyScale": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/myscale_self_query", "Deep Lake": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/activeloop_deeplake_self_query", "Qdrant": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/qdrant_self_query", "Lost in the middle: The problem with long contexts": "https://python.langchain.com/docs/modules/data_connection/document_transformers/post_retrieval/long_context_reorder", "Memory in the Multi-Input Chain": "https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs", "Memory in LLMChain": "https://python.langchain.com/docs/modules/memory/adding_memory", "Multiple Memory classes": "https://python.langchain.com/docs/modules/memory/multiple_memory", "Customizing Conversational Memory": "https://python.langchain.com/docs/modules/memory/conversational_customization", "Conversation Knowledge Graph": "https://python.langchain.com/docs/modules/memory/types/kg", "Conversation Token Buffer": "https://python.langchain.com/docs/modules/memory/types/token_buffer", "Conversation Summary Buffer": "https://python.langchain.com/docs/modules/memory/types/summary_buffer", "Multiple callback handlers": "https://python.langchain.com/docs/modules/callbacks/multiple_callbacks", "Token counting": "https://python.langchain.com/docs/modules/callbacks/token_counting", "Logging to file": "https://python.langchain.com/docs/modules/callbacks/filecallbackhandler", "Multi-Input Tools": "https://python.langchain.com/docs/modules/agents/tools/multi_input_tool", "Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools", "Tool Input Schema": "https://python.langchain.com/docs/modules/agents/tools/tool_input_validation", "Human-in-the-loop Tool Validation": "https://python.langchain.com/docs/modules/agents/tools/human_approval", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore", "Access intermediate steps": "https://python.langchain.com/docs/modules/agents/how_to/intermediate_steps", "Timeouts for agents": "https://python.langchain.com/docs/modules/agents/how_to/max_time_limit", "Streaming final agent output": "https://python.langchain.com/docs/modules/agents/how_to/streaming_stdout_final_only", "Cap the max number of iterations": "https://python.langchain.com/docs/modules/agents/how_to/max_iterations", "Async API": "https://python.langchain.com/docs/modules/chains/how_to/async_chain", "Tracking token usage": "https://python.langchain.com/docs/modules/model_io/models/llms/token_usage_tracking", "Serialization": "https://python.langchain.com/docs/modules/model_io/models/llms/llm_serialization", "Retry parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/retry", "Datetime parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/datetime", "Pydantic (JSON) parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/pydantic", "Router": "https://python.langchain.com/docs/modules/chains/foundational/router", "Transformation": "https://python.langchain.com/docs/modules/chains/foundational/transformation", "Adding moderation": "https://python.langchain.com/docs/expression_language/cookbook/moderation"}, "ContextualCompressionRetriever": {"Cohere Reranker": "https://python.langchain.com/docs/integrations/retrievers/cohere-reranker", "LOTR (Merger Retriever)": "https://python.langchain.com/docs/integrations/retrievers/merger_retriever"}, "CohereRerank": {"Cohere Reranker": "https://python.langchain.com/docs/integrations/retrievers/cohere-reranker", "Cohere": "https://python.langchain.com/docs/integrations/providers/cohere"}, "RetrievalQA": {"Cohere Reranker": "https://python.langchain.com/docs/integrations/retrievers/cohere-reranker", "Ollama": "https://python.langchain.com/docs/integrations/llms/ollama", "Confident": "https://python.langchain.com/docs/integrations/callbacks/confident", "Document Comparison": "https://python.langchain.com/docs/integrations/toolkits/document_comparison_toolkit", "ScaNN": "https://python.langchain.com/docs/integrations/vectorstores/scann", "Activeloop Deep Lake": "https://python.langchain.com/docs/integrations/vectorstores/activeloop_deeplake", "StarRocks": "https://python.langchain.com/docs/integrations/vectorstores/starrocks", "your local model path": "https://python.langchain.com/docs/integrations/vectorstores/vearch", "Loading documents from a YouTube url": "https://python.langchain.com/docs/integrations/document_loaders/youtube_audio", "Docugami": "https://python.langchain.com/docs/integrations/document_loaders/docugami", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "Perform context-aware text splitting": "https://python.langchain.com/docs/use_cases/question_answering/how_to/document-context-aware-QA", "Use local LLMs": "https://python.langchain.com/docs/use_cases/question_answering/how_to/local_retrieval_qa", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "QA using Activeloop's DeepLake": "https://python.langchain.com/docs/use_cases/question_answering/integrations/semantic-search-over-chat", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore"}, "KNNRetriever": {"kNN": "https://python.langchain.com/docs/integrations/retrievers/knn"}, "WikipediaRetriever": {"Wikipedia": "https://python.langchain.com/docs/integrations/providers/wikipedia"}, "ConversationalRetrievalChain": {"Wikipedia": "https://python.langchain.com/docs/integrations/retrievers/wikipedia", "Arxiv": "https://python.langchain.com/docs/integrations/retrievers/arxiv", "Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat", "Vectara": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/vectara_self_query", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding", "Analysis of Twitter the-algorithm source code with LangChain, GPT4 and Activeloop's Deep Lake": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/twitter-the-algorithm-analysis-deeplake", "Use LangChain, GPT and Activeloop's Deep Lake to work with code base": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/code-analysis-deeplake", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "QA using Activeloop's DeepLake": "https://python.langchain.com/docs/use_cases/question_answering/integrations/semantic-search-over-chat"}, "MetalRetriever": {"Metal": "https://python.langchain.com/docs/integrations/providers/metal"}, "CSVLoader": {"ChatGPT Plugin": "https://python.langchain.com/docs/integrations/retrievers/chatgpt-plugin", "CSV": "https://python.langchain.com/docs/integrations/document_loaders/csv"}, "Document": {"ChatGPT Plugin": "https://python.langchain.com/docs/integrations/retrievers/chatgpt-plugin", "Weaviate Hybrid Search": "https://python.langchain.com/docs/integrations/retrievers/weaviate-hybrid", "BM25": "https://python.langchain.com/docs/integrations/retrievers/bm25", "TF-IDF": "https://python.langchain.com/docs/integrations/retrievers/tf_idf", "Apify": "https://python.langchain.com/docs/integrations/tools/apify", "Vectara Text Generation": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_text_generation", "PGVector": "https://python.langchain.com/docs/integrations/vectorstores/pgvector", "Annoy": "https://python.langchain.com/docs/integrations/vectorstores/annoy", "Neo4j Vector Index": "https://python.langchain.com/docs/integrations/vectorstores/neo4jvector", "Postgres Embedding": "https://python.langchain.com/docs/integrations/vectorstores/pgembedding", "Faiss": "https://python.langchain.com/docs/integrations/vectorstores/faiss", "Nuclia Understanding API document transformer": "https://python.langchain.com/docs/integrations/document_transformers/nuclia_transformer", "OpenAI Functions Metadata Tagger": "https://python.langchain.com/docs/integrations/document_transformers/openai_metadata_tagger", "Doctran Extract Properties": "https://python.langchain.com/docs/integrations/document_transformers/doctran_extract_properties", "Doctran Interrogate Documents": "https://python.langchain.com/docs/integrations/document_transformers/doctran_interrogate_document", "Doctran Translate Documents": "https://python.langchain.com/docs/integrations/document_transformers/doctran_translate_document", "TensorFlow Datasets": "https://python.langchain.com/docs/integrations/document_loaders/tensorflow_datasets", "Airbyte Salesforce": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_salesforce", "Airbyte CDK": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_cdk", "Airbyte Stripe": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_stripe", "Copy Paste": "https://python.langchain.com/docs/integrations/document_loaders/copypaste", "Airbyte Typeform": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_typeform", "Apify Dataset": "https://python.langchain.com/docs/integrations/document_loaders/apify_dataset", "Docugami": "https://python.langchain.com/docs/integrations/document_loaders/docugami", "Airbyte Hubspot": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_hubspot", "Airbyte Gong": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_gong", "Airbyte Shopify": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_shopify", "Airbyte Zendesk Support": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_zendesk_support", "SageMakerEndpoint": "https://python.langchain.com/docs/integrations/llms/sagemaker", "LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching", "Retrieve from vector stores directly": "https://python.langchain.com/docs/use_cases/question_answering/how_to/vector_db_text_generation", "Multiple Retrieval Sources": "https://python.langchain.com/docs/use_cases/question_answering/how_to/multiple_retrieval", "Retrieve as you generate with FLARE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/flare", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times", "Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql", "Indexing": "https://python.langchain.com/docs/modules/data_connection/indexing", "MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector", "Milvus": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/milvus_self_query", "Weaviate": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/weaviate_self_query", "Vectara": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/vectara_self_query", "DashVector": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/dashvector", "Elasticsearch": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/elasticsearch_self_query", "Chroma": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/chroma_self_query", "Pinecone": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/pinecone", "Supabase": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/supabase_self_query", "Redis": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/redis_self_query", "MyScale": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/myscale_self_query", "Deep Lake": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/activeloop_deeplake_self_query", "Qdrant": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/qdrant_self_query", "Memory in the Multi-Input Chain": "https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs", "Custom agent with tool retrieval": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent_with_tool_retrieval"}, "ChatGPTPluginRetriever": {"ChatGPT Plugin": "https://python.langchain.com/docs/integrations/retrievers/chatgpt-plugin", "OpenAI": "https://python.langchain.com/docs/integrations/providers/openai"}, "GoogleVertexAISearchRetriever": {"Google Vertex AI Search": "https://python.langchain.com/docs/integrations/retrievers/google_vertex_ai_search"}, "DocArrayRetriever": {"DocArray Retriever": "https://python.langchain.com/docs/integrations/retrievers/docarray_retriever"}, "SVMRetriever": {"SVM": "https://python.langchain.com/docs/integrations/retrievers/svm", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering"}, "PineconeHybridSearchRetriever": {"Pinecone Hybrid Search": "https://python.langchain.com/docs/integrations/retrievers/pinecone_hybrid_search"}, "PubMedRetriever": {"PubMed": "https://python.langchain.com/docs/integrations/providers/pubmed"}, "WeaviateHybridSearchRetriever": {"Weaviate Hybrid Search": "https://python.langchain.com/docs/integrations/retrievers/weaviate-hybrid"}, "ArxivRetriever": {"Arxiv": "https://python.langchain.com/docs/integrations/providers/arxiv"}, "BM25Retriever": {"BM25": "https://python.langchain.com/docs/integrations/retrievers/bm25", "Ensemble Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/ensemble"}, "AzureCognitiveSearchRetriever": {"Azure Cognitive Search": "https://python.langchain.com/docs/integrations/providers/azure_cognitive_search_"}, "ChaindeskRetriever": {"Chaindesk": "https://python.langchain.com/docs/integrations/providers/chaindesk"}, "MergerRetriever": {"LOTR (Merger Retriever)": "https://python.langchain.com/docs/integrations/retrievers/merger_retriever"}, "EmbeddingsRedundantFilter": {"LOTR (Merger Retriever)": "https://python.langchain.com/docs/integrations/retrievers/merger_retriever"}, "EmbeddingsClusteringFilter": {"LOTR (Merger Retriever)": "https://python.langchain.com/docs/integrations/retrievers/merger_retriever"}, "DocumentCompressorPipeline": {"LOTR (Merger Retriever)": "https://python.langchain.com/docs/integrations/retrievers/merger_retriever"}, "LongContextReorder": {"LOTR (Merger Retriever)": "https://python.langchain.com/docs/integrations/retrievers/merger_retriever", "Lost in the middle: The problem with long contexts": "https://python.langchain.com/docs/modules/data_connection/document_transformers/post_retrieval/long_context_reorder"}, "TFIDFRetriever": {"TF-IDF": "https://python.langchain.com/docs/integrations/retrievers/tf_idf"}, "load_tools": {"ChatGPT Plugins": "https://python.langchain.com/docs/integrations/tools/chatgpt_plugins", "Human as a tool": "https://python.langchain.com/docs/integrations/tools/human_tools", "AWS Lambda": "https://python.langchain.com/docs/integrations/tools/awslambda", "Google Drive": "https://python.langchain.com/docs/integrations/tools/google_drive", "Requests": "https://python.langchain.com/docs/integrations/tools/requests", "OpenWeatherMap": "https://python.langchain.com/docs/integrations/providers/openweathermap", "Search Tools": "https://python.langchain.com/docs/integrations/tools/search_tools", "Eleven Labs Text2Speech": "https://python.langchain.com/docs/integrations/tools/eleven_labs_tts", "ArXiv": "https://python.langchain.com/docs/integrations/tools/arxiv", "GraphQL": "https://python.langchain.com/docs/integrations/tools/graphql", "SceneXplain": "https://python.langchain.com/docs/integrations/tools/sceneXplain", "Dall-E Image Generator": "https://python.langchain.com/docs/integrations/tools/dalle_image_generator", "LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor", "Argilla": "https://python.langchain.com/docs/integrations/callbacks/argilla", "Streamlit": "https://python.langchain.com/docs/integrations/callbacks/.ipynb_checkpoints/streamlit-checkpoint", "SerpAPI": "https://python.langchain.com/docs/integrations/providers/serpapi", "Comet": "https://python.langchain.com/docs/integrations/providers/comet_tracking", "Aim": "https://python.langchain.com/docs/integrations/providers/aim_tracking", "Golden": "https://python.langchain.com/docs/integrations/providers/golden", "Weights & Biases": "https://python.langchain.com/docs/integrations/providers/wandb_tracking", "SageMaker Tracking": "https://python.langchain.com/docs/integrations/providers/sagemaker_tracking", "Wolfram Alpha": "https://python.langchain.com/docs/integrations/providers/wolfram_alpha", "MLflow": "https://python.langchain.com/docs/integrations/providers/mlflow_tracking", "DataForSEO": "https://python.langchain.com/docs/integrations/providers/dataforseo", "SearxNG Search API": "https://python.langchain.com/docs/integrations/providers/searx", "Google Serper": "https://python.langchain.com/docs/integrations/providers/google_serper", "Flyte": "https://python.langchain.com/docs/integrations/providers/flyte", "WandB Tracing": "https://python.langchain.com/docs/integrations/providers/wandb_tracing", "ClearML": "https://python.langchain.com/docs/integrations/providers/clearml_tracking", "Google Search": "https://python.langchain.com/docs/integrations/providers/google_search", "Log, Trace, and Monitor": "https://python.langchain.com/docs/integrations/providers/portkey/logging_tracing_portkey", "Portkey": "https://python.langchain.com/docs/integrations/providers/portkey/index", "Google Drive tool": "https://python.langchain.com/docs/integrations/toolkits/google_drive", "Bittensor": "https://python.langchain.com/docs/integrations/llms/bittensor", "Amazon API Gateway": "https://python.langchain.com/docs/integrations/llms/amazon_api_gateway", "Debugging": "https://python.langchain.com/docs/guides/debugging", "LangSmith Walkthrough": "https://python.langchain.com/docs/guides/langsmith/walkthrough", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Multiple callback handlers": "https://python.langchain.com/docs/modules/callbacks/multiple_callbacks", "Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools", "Human-in-the-loop Tool Validation": "https://python.langchain.com/docs/modules/agents/tools/human_approval", "Access intermediate steps": "https://python.langchain.com/docs/modules/agents/how_to/intermediate_steps", "Timeouts for agents": "https://python.langchain.com/docs/modules/agents/how_to/max_time_limit", "Streaming final agent output": "https://python.langchain.com/docs/modules/agents/how_to/streaming_stdout_final_only", "Cap the max number of iterations": "https://python.langchain.com/docs/modules/agents/how_to/max_iterations", "Async API": "https://python.langchain.com/docs/modules/agents/how_to/async_agent", "Human input chat model": "https://python.langchain.com/docs/modules/model_io/models/chat/human_input_chat_model", "Fake LLM": "https://python.langchain.com/docs/modules/model_io/models/llms/fake_llm", "Tracking token usage": "https://python.langchain.com/docs/modules/model_io/models/llms/token_usage_tracking", "Human input LLM": "https://python.langchain.com/docs/modules/model_io/models/llms/human_input_llm"}, "initialize_agent": {"ChatGPT Plugins": "https://python.langchain.com/docs/integrations/tools/chatgpt_plugins", "Google Serper": "https://python.langchain.com/docs/integrations/providers/google_serper", "Human as a tool": "https://python.langchain.com/docs/integrations/tools/human_tools", "Yahoo Finance News": "https://python.langchain.com/docs/integrations/tools/yahoo_finance_news", "AWS Lambda": "https://python.langchain.com/docs/integrations/tools/awslambda", "Google Drive": "https://python.langchain.com/docs/integrations/tools/google_drive", "OpenWeatherMap": "https://python.langchain.com/docs/integrations/tools/openweathermap", "Search Tools": "https://python.langchain.com/docs/integrations/tools/search_tools", "Eleven Labs Text2Speech": "https://python.langchain.com/docs/integrations/tools/eleven_labs_tts", "Zapier Natural Language Actions": "https://python.langchain.com/docs/integrations/tools/zapier", "ArXiv": "https://python.langchain.com/docs/integrations/tools/arxiv", "Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search", "GraphQL": "https://python.langchain.com/docs/integrations/tools/graphql", "Gradio": "https://python.langchain.com/docs/integrations/tools/gradio_tools", "SceneXplain": "https://python.langchain.com/docs/integrations/tools/sceneXplain", "Eden AI": "https://python.langchain.com/docs/integrations/tools/edenai_tools", "Dall-E Image Generator": "https://python.langchain.com/docs/integrations/tools/dalle_image_generator", "Shell (bash)": "https://python.langchain.com/docs/integrations/tools/bash", "Zep Memory": "https://python.langchain.com/docs/integrations/memory/zep_memory", "Xata chat memory": "https://python.langchain.com/docs/integrations/memory/xata_chat_message_history", "Dynamodb Chat Message History": "https://python.langchain.com/docs/integrations/memory/dynamodb_chat_message_history", "LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor", "Argilla": "https://python.langchain.com/docs/integrations/callbacks/argilla", "Streamlit": "https://python.langchain.com/docs/integrations/callbacks/.ipynb_checkpoints/streamlit-checkpoint", "Comet": "https://python.langchain.com/docs/integrations/providers/comet_tracking", "Aim": "https://python.langchain.com/docs/integrations/providers/aim_tracking", "Weights & Biases": "https://python.langchain.com/docs/integrations/providers/wandb_tracking", "SageMaker Tracking": "https://python.langchain.com/docs/integrations/providers/sagemaker_tracking", "MLflow": "https://python.langchain.com/docs/integrations/providers/mlflow_tracking", "Flyte": "https://python.langchain.com/docs/integrations/providers/flyte", "WandB Tracing": "https://python.langchain.com/docs/integrations/providers/wandb_tracing", "ClearML": "https://python.langchain.com/docs/integrations/providers/clearml_tracking", "Log, Trace, and Monitor": "https://python.langchain.com/docs/integrations/providers/portkey/logging_tracing_portkey", "Portkey": "https://python.langchain.com/docs/integrations/providers/portkey/index", "Jira": "https://python.langchain.com/docs/integrations/toolkits/jira", "Document Comparison": "https://python.langchain.com/docs/integrations/toolkits/document_comparison_toolkit", "Azure Cognitive Services": "https://python.langchain.com/docs/integrations/toolkits/azure_cognitive_services", "Natural Language APIs": "https://python.langchain.com/docs/integrations/toolkits/openapi_nla", "Gmail": "https://python.langchain.com/docs/integrations/toolkits/gmail", "Github": "https://python.langchain.com/docs/integrations/toolkits/github", "Google Drive tool": "https://python.langchain.com/docs/integrations/toolkits/google_drive", "AINetwork": "https://python.langchain.com/docs/integrations/toolkits/ainetwork", "PlayWright Browser": "https://python.langchain.com/docs/integrations/toolkits/playwright", "Office365": "https://python.langchain.com/docs/integrations/toolkits/office365", "MultiOn": "https://python.langchain.com/docs/integrations/toolkits/multion", "Amadeus": "https://python.langchain.com/docs/integrations/toolkits/amadeus", "Gitlab": "https://python.langchain.com/docs/integrations/toolkits/gitlab", "Bittensor": "https://python.langchain.com/docs/integrations/llms/bittensor", "Amazon API Gateway": "https://python.langchain.com/docs/integrations/llms/amazon_api_gateway", "Debugging": "https://python.langchain.com/docs/guides/debugging", "LangSmith Walkthrough": "https://python.langchain.com/docs/guides/langsmith/walkthrough", "Hugging Face Prompt Injection Identification": "https://python.langchain.com/docs/guides/safety/hugging_face_prompt_injection", "Comparing Chain Outputs": "https://python.langchain.com/docs/guides/evaluation/examples/comparisons", "Agent Trajectory": "https://python.langchain.com/docs/guides/evaluation/trajectory/trajectory_eval", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "Multi-modal outputs: Image & Text": "https://python.langchain.com/docs/use_cases/more/agents/multi_modal/multi_modal_output_agent", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Multiple callback handlers": "https://python.langchain.com/docs/modules/callbacks/multiple_callbacks", "Multi-Input Tools": "https://python.langchain.com/docs/modules/agents/tools/multi_input_tool", "Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools", "Tool Input Schema": "https://python.langchain.com/docs/modules/agents/tools/tool_input_validation", "Human-in-the-loop Tool Validation": "https://python.langchain.com/docs/modules/agents/tools/human_approval", "Self-ask with search": "https://python.langchain.com/docs/modules/agents/agent_types/self_ask_with_search", "ReAct document store": "https://python.langchain.com/docs/modules/agents/agent_types/react_docstore", "OpenAI Multi Functions Agent": "https://python.langchain.com/docs/modules/agents/agent_types/openai_multi_functions_agent", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore", "Access intermediate steps": "https://python.langchain.com/docs/modules/agents/how_to/intermediate_steps", "Handle parsing errors": "https://python.langchain.com/docs/modules/agents/how_to/handle_parsing_errors", "Running Agent as an Iterator": "https://python.langchain.com/docs/modules/agents/how_to/agent_iter", "Timeouts for agents": "https://python.langchain.com/docs/modules/agents/how_to/max_time_limit", "Streaming final agent output": "https://python.langchain.com/docs/modules/agents/how_to/streaming_stdout_final_only", "Add Memory to OpenAI Functions Agent": "https://python.langchain.com/docs/modules/agents/how_to/add_memory_openai_functions", "Cap the max number of iterations": "https://python.langchain.com/docs/modules/agents/how_to/max_iterations", "Custom functions with OpenAI Functions Agent": "https://python.langchain.com/docs/modules/agents/how_to/custom-functions-with-openai-functions-agent", "Async API": "https://python.langchain.com/docs/modules/agents/how_to/async_agent", "Use ToolKits with OpenAI Functions": "https://python.langchain.com/docs/modules/agents/how_to/use_toolkits_with_openai_functions", "Human input chat model": "https://python.langchain.com/docs/modules/model_io/models/chat/human_input_chat_model", "Fake LLM": "https://python.langchain.com/docs/modules/model_io/models/llms/fake_llm", "Tracking token usage": "https://python.langchain.com/docs/modules/model_io/models/llms/token_usage_tracking", "Human input LLM": "https://python.langchain.com/docs/modules/model_io/models/llms/human_input_llm"}, "AgentType": {"ChatGPT Plugins": "https://python.langchain.com/docs/integrations/tools/chatgpt_plugins", "Google Serper": "https://python.langchain.com/docs/integrations/providers/google_serper", "Human as a tool": "https://python.langchain.com/docs/integrations/tools/human_tools", "Yahoo Finance News": "https://python.langchain.com/docs/integrations/tools/yahoo_finance_news", "AWS Lambda": "https://python.langchain.com/docs/integrations/tools/awslambda", "Google Drive": "https://python.langchain.com/docs/integrations/tools/google_drive", "OpenWeatherMap": "https://python.langchain.com/docs/integrations/tools/openweathermap", "Search Tools": "https://python.langchain.com/docs/integrations/tools/search_tools", "Eleven Labs Text2Speech": "https://python.langchain.com/docs/integrations/tools/eleven_labs_tts", "Zapier Natural Language Actions": "https://python.langchain.com/docs/integrations/tools/zapier", "ArXiv": "https://python.langchain.com/docs/integrations/tools/arxiv", "Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search", "GraphQL": "https://python.langchain.com/docs/integrations/tools/graphql", "Eden AI": "https://python.langchain.com/docs/integrations/tools/edenai_tools", "Shell (bash)": "https://python.langchain.com/docs/integrations/tools/bash", "Zep Memory": "https://python.langchain.com/docs/integrations/memory/zep_memory", "Xata chat memory": "https://python.langchain.com/docs/integrations/memory/xata_chat_message_history", "Dynamodb Chat Message History": "https://python.langchain.com/docs/integrations/memory/dynamodb_chat_message_history", "LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor", "Argilla": "https://python.langchain.com/docs/integrations/callbacks/argilla", "Streamlit": "https://python.langchain.com/docs/integrations/callbacks/.ipynb_checkpoints/streamlit-checkpoint", "Aim": "https://python.langchain.com/docs/integrations/providers/aim_tracking", "Weights & Biases": "https://python.langchain.com/docs/integrations/providers/wandb_tracking", "MLflow": "https://python.langchain.com/docs/integrations/providers/mlflow_tracking", "Flyte": "https://python.langchain.com/docs/integrations/providers/flyte", "WandB Tracing": "https://python.langchain.com/docs/integrations/providers/wandb_tracing", "ClearML": "https://python.langchain.com/docs/integrations/providers/clearml_tracking", "Log, Trace, and Monitor": "https://python.langchain.com/docs/integrations/providers/portkey/logging_tracing_portkey", "Portkey": "https://python.langchain.com/docs/integrations/providers/portkey/index", "CSV": "https://python.langchain.com/docs/integrations/toolkits/csv", "Jira": "https://python.langchain.com/docs/integrations/toolkits/jira", "Document Comparison": "https://python.langchain.com/docs/integrations/toolkits/document_comparison_toolkit", "Python": "https://python.langchain.com/docs/integrations/toolkits/python", "Azure Cognitive Services": "https://python.langchain.com/docs/integrations/toolkits/azure_cognitive_services", "SQL Database": "https://python.langchain.com/docs/integrations/toolkits/sql_database", "Natural Language APIs": "https://python.langchain.com/docs/integrations/toolkits/openapi_nla", "Gmail": "https://python.langchain.com/docs/integrations/toolkits/gmail", "Airbyte Question Answering": "https://python.langchain.com/docs/integrations/toolkits/airbyte_structured_qa", "Github": "https://python.langchain.com/docs/integrations/toolkits/github", "Google Drive tool": "https://python.langchain.com/docs/integrations/toolkits/google_drive", "AINetwork": "https://python.langchain.com/docs/integrations/toolkits/ainetwork", "PlayWright Browser": "https://python.langchain.com/docs/integrations/toolkits/playwright", "Office365": "https://python.langchain.com/docs/integrations/toolkits/office365", "Pandas Dataframe": "https://python.langchain.com/docs/integrations/toolkits/pandas", "MultiOn": "https://python.langchain.com/docs/integrations/toolkits/multion", "Amadeus": "https://python.langchain.com/docs/integrations/toolkits/amadeus", "Gitlab": "https://python.langchain.com/docs/integrations/toolkits/gitlab", "Bittensor": "https://python.langchain.com/docs/integrations/llms/bittensor", "Amazon API Gateway": "https://python.langchain.com/docs/integrations/llms/amazon_api_gateway", "Debugging": "https://python.langchain.com/docs/guides/debugging", "LangSmith Walkthrough": "https://python.langchain.com/docs/guides/langsmith/walkthrough", "Hugging Face Prompt Injection Identification": "https://python.langchain.com/docs/guides/safety/hugging_face_prompt_injection", "Comparing Chain Outputs": "https://python.langchain.com/docs/guides/evaluation/examples/comparisons", "Agent Trajectory": "https://python.langchain.com/docs/guides/evaluation/trajectory/trajectory_eval", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "Multi-modal outputs: Image & Text": "https://python.langchain.com/docs/use_cases/more/agents/multi_modal/multi_modal_output_agent", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql", "Multiple callback handlers": "https://python.langchain.com/docs/modules/callbacks/multiple_callbacks", "Multi-Input Tools": "https://python.langchain.com/docs/modules/agents/tools/multi_input_tool", "Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools", "Tool Input Schema": "https://python.langchain.com/docs/modules/agents/tools/tool_input_validation", "Human-in-the-loop Tool Validation": "https://python.langchain.com/docs/modules/agents/tools/human_approval", "Self-ask with search": "https://python.langchain.com/docs/modules/agents/agent_types/self_ask_with_search", "ReAct document store": "https://python.langchain.com/docs/modules/agents/agent_types/react_docstore", "OpenAI Multi Functions Agent": "https://python.langchain.com/docs/modules/agents/agent_types/openai_multi_functions_agent", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore", "Access intermediate steps": "https://python.langchain.com/docs/modules/agents/how_to/intermediate_steps", "Handle parsing errors": "https://python.langchain.com/docs/modules/agents/how_to/handle_parsing_errors", "Running Agent as an Iterator": "https://python.langchain.com/docs/modules/agents/how_to/agent_iter", "Timeouts for agents": "https://python.langchain.com/docs/modules/agents/how_to/max_time_limit", "Streaming final agent output": "https://python.langchain.com/docs/modules/agents/how_to/streaming_stdout_final_only", "Add Memory to OpenAI Functions Agent": "https://python.langchain.com/docs/modules/agents/how_to/add_memory_openai_functions", "Cap the max number of iterations": "https://python.langchain.com/docs/modules/agents/how_to/max_iterations", "Custom functions with OpenAI Functions Agent": "https://python.langchain.com/docs/modules/agents/how_to/custom-functions-with-openai-functions-agent", "Async API": "https://python.langchain.com/docs/modules/agents/how_to/async_agent", "Use ToolKits with OpenAI Functions": "https://python.langchain.com/docs/modules/agents/how_to/use_toolkits_with_openai_functions", "Human input chat model": "https://python.langchain.com/docs/modules/model_io/models/chat/human_input_chat_model", "Fake LLM": "https://python.langchain.com/docs/modules/model_io/models/llms/fake_llm", "Tracking token usage": "https://python.langchain.com/docs/modules/model_io/models/llms/token_usage_tracking", "Human input LLM": "https://python.langchain.com/docs/modules/model_io/models/llms/human_input_llm"}, "AIPluginTool": {"ChatGPT Plugins": "https://python.langchain.com/docs/integrations/tools/chatgpt_plugins"}, "DataForSeoAPIWrapper": {"DataForSeo": "https://python.langchain.com/docs/integrations/tools/dataforseo", "DataForSEO": "https://python.langchain.com/docs/integrations/providers/dataforseo"}, "Tool": {"DataForSeo": "https://python.langchain.com/docs/integrations/tools/dataforseo", "Google Serper": "https://python.langchain.com/docs/integrations/providers/google_serper", "SerpAPI": "https://python.langchain.com/docs/integrations/tools/serpapi", "Google Search": "https://python.langchain.com/docs/integrations/tools/google_search", "Zep Memory": "https://python.langchain.com/docs/integrations/memory/zep_memory", "Dynamodb Chat Message History": "https://python.langchain.com/docs/integrations/memory/dynamodb_chat_message_history", "SageMaker Tracking": "https://python.langchain.com/docs/integrations/providers/sagemaker_tracking", "Document Comparison": "https://python.langchain.com/docs/integrations/toolkits/document_comparison_toolkit", "Natural Language APIs": "https://python.langchain.com/docs/integrations/toolkits/openapi_nla", "Github": "https://python.langchain.com/docs/integrations/toolkits/github", "Bittensor": "https://python.langchain.com/docs/integrations/llms/bittensor", "Pydantic compatibility": "https://python.langchain.com/docs/guides/pydantic_compatibility", "Comparing Chain Outputs": "https://python.langchain.com/docs/guides/evaluation/examples/comparisons", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "AutoGPT": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/autogpt", "BabyAGI with Tools": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi_with_agent", "Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Wikibase Agent": "https://python.langchain.com/docs/use_cases/more/agents/agents/wikibase_agent", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Message Memory in Agent backed by a database": "https://python.langchain.com/docs/modules/memory/agent_with_memory_in_db", "Memory in Agent": "https://python.langchain.com/docs/modules/memory/agent_with_memory", "Multi-Input Tools": "https://python.langchain.com/docs/modules/agents/tools/multi_input_tool", "Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools", "Self-ask with search": "https://python.langchain.com/docs/modules/agents/agent_types/self_ask_with_search", "ReAct document store": "https://python.langchain.com/docs/modules/agents/agent_types/react_docstore", "OpenAI Multi Functions Agent": "https://python.langchain.com/docs/modules/agents/agent_types/openai_multi_functions_agent", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore", "Custom MRKL agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_mrkl_agent", "Handle parsing errors": "https://python.langchain.com/docs/modules/agents/how_to/handle_parsing_errors", "Shared memory across agents and tools": "https://python.langchain.com/docs/modules/agents/how_to/sharedmemory_for_tools", "Custom multi-action agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_multi_action_agent", "Running Agent as an Iterator": "https://python.langchain.com/docs/modules/agents/how_to/agent_iter", "Timeouts for agents": "https://python.langchain.com/docs/modules/agents/how_to/max_time_limit", "Add Memory to OpenAI Functions Agent": "https://python.langchain.com/docs/modules/agents/how_to/add_memory_openai_functions", "Cap the max number of iterations": "https://python.langchain.com/docs/modules/agents/how_to/max_iterations", "Custom agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent", "Use ToolKits with OpenAI Functions": "https://python.langchain.com/docs/modules/agents/how_to/use_toolkits_with_openai_functions", "Custom agent with tool retrieval": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent_with_tool_retrieval"}, "SearxSearchWrapper": {"SearxNG Search": "https://python.langchain.com/docs/integrations/tools/searx_search", "SearxNG Search API": "https://python.langchain.com/docs/integrations/providers/searx"}, "GoogleSerperAPIWrapper": {"Google Serper": "https://python.langchain.com/docs/integrations/providers/google_serper", "Retrieve as you generate with FLARE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/flare"}, "GooglePlacesTool": {"Google Places": "https://python.langchain.com/docs/integrations/tools/google_places"}, "HumanInputRun": {"Human as a tool": "https://python.langchain.com/docs/integrations/tools/human_tools", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times"}, "NucliaUnderstandingAPI": {"Nuclia Understanding": "https://python.langchain.com/docs/integrations/tools/nuclia", "Nuclia Understanding API document transformer": "https://python.langchain.com/docs/integrations/document_transformers/nuclia_transformer", "Nuclia Understanding API document loader": "https://python.langchain.com/docs/integrations/document_loaders/nuclia"}, "YahooFinanceNewsTool": {"Yahoo Finance News": "https://python.langchain.com/docs/integrations/tools/yahoo_finance_news"}, "TwilioAPIWrapper": {"Twilio": "https://python.langchain.com/docs/integrations/tools/twilio"}, "IFTTTWebhook": {"IFTTT WebHooks": "https://python.langchain.com/docs/integrations/tools/ifttt"}, "WikipediaQueryRun": {"Wikipedia": "https://python.langchain.com/docs/integrations/tools/wikipedia"}, "WikipediaAPIWrapper": {"Wikipedia": "https://python.langchain.com/docs/integrations/tools/wikipedia", "Zep Memory": "https://python.langchain.com/docs/integrations/memory/zep_memory"}, "AlphaVantageAPIWrapper": {"Alpha Vantage": "https://python.langchain.com/docs/integrations/tools/alpha_vantage"}, "TextRequestsWrapper": {"Requests": "https://python.langchain.com/docs/integrations/tools/requests", "JSON": "https://python.langchain.com/docs/integrations/toolkits/json", "OpenAPI": "https://python.langchain.com/docs/integrations/toolkits/openapi", "Tool Input Schema": "https://python.langchain.com/docs/modules/agents/tools/tool_input_validation"}, "OpenWeatherMapAPIWrapper": {"OpenWeatherMap": "https://python.langchain.com/docs/integrations/providers/openweathermap"}, "PubmedQueryRun": {"PubMed": "https://python.langchain.com/docs/integrations/tools/pubmed"}, "YouTubeSearchTool": {"YouTube": "https://python.langchain.com/docs/integrations/tools/youtube"}, "ElevenLabsText2SpeechTool": {"Eleven Labs Text2Speech": "https://python.langchain.com/docs/integrations/tools/eleven_labs_tts"}, "VectorstoreIndexCreator": {"Apify": "https://python.langchain.com/docs/integrations/tools/apify", "HuggingFace dataset": "https://python.langchain.com/docs/integrations/document_loaders/hugging_face_dataset", "Spreedly": "https://python.langchain.com/docs/integrations/document_loaders/spreedly", "Image captions": "https://python.langchain.com/docs/integrations/document_loaders/image_captions", "Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma", "Apify Dataset": "https://python.langchain.com/docs/integrations/document_loaders/apify_dataset", "Iugu": "https://python.langchain.com/docs/integrations/document_loaders/iugu", "Stripe": "https://python.langchain.com/docs/integrations/document_loaders/stripe", "Modern Treasury": "https://python.langchain.com/docs/integrations/document_loaders/modern_treasury", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "Multiple Retrieval Sources": "https://python.langchain.com/docs/use_cases/question_answering/how_to/multiple_retrieval"}, "ApifyWrapper": {"Apify": "https://python.langchain.com/docs/integrations/providers/apify"}, "ZapierToolkit": {"Zapier Natural Language Actions": "https://python.langchain.com/docs/integrations/tools/zapier"}, "ZapierNLAWrapper": {"Zapier Natural Language Actions": "https://python.langchain.com/docs/integrations/tools/zapier"}, "LLMChain": {"Zapier Natural Language Actions": "https://python.langchain.com/docs/integrations/tools/zapier", "Dall-E Image Generator": "https://python.langchain.com/docs/integrations/tools/dalle_image_generator", "Streamlit Chat Message History": "https://python.langchain.com/docs/integrations/memory/streamlit_chat_message_history", "Argilla": "https://python.langchain.com/docs/integrations/callbacks/argilla", "Comet": "https://python.langchain.com/docs/integrations/providers/comet_tracking", "Aim": "https://python.langchain.com/docs/integrations/providers/aim_tracking", "Weights & Biases": "https://python.langchain.com/docs/integrations/providers/wandb_tracking", "SageMaker Tracking": "https://python.langchain.com/docs/integrations/providers/sagemaker_tracking", "Rebuff": "https://python.langchain.com/docs/integrations/providers/rebuff", "MLflow": "https://python.langchain.com/docs/integrations/providers/mlflow_tracking", "Flyte": "https://python.langchain.com/docs/integrations/providers/flyte", "Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat", "Vectara Text Generation": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_text_generation", "Natural Language APIs": "https://python.langchain.com/docs/integrations/toolkits/openapi_nla", "JSON": "https://python.langchain.com/docs/integrations/toolkits/json", "Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma", "Predibase": "https://python.langchain.com/docs/integrations/llms/predibase", "Eden AI": "https://python.langchain.com/docs/integrations/llms/edenai", "Azure ML": "https://python.langchain.com/docs/integrations/llms/azure_ml", "Removing logical fallacies from model output": "https://python.langchain.com/docs/guides/safety/logical_fallacy_chain", "Amazon Comprehend Moderation Chain": "https://python.langchain.com/docs/guides/safety/amazon_comprehend_chain", "Custom Trajectory Evaluator": "https://python.langchain.com/docs/guides/evaluation/trajectory/custom", "Custom Pairwise Evaluator": "https://python.langchain.com/docs/guides/evaluation/comparison/custom", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/apis", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/summarization", "Retrieve from vector stores directly": "https://python.langchain.com/docs/use_cases/question_answering/how_to/vector_db_text_generation", "Improve document indexing with HyDE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/hyde", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "Multi-agent authoritarian speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_authoritarian", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research", "Lost in the middle: The problem with long contexts": "https://python.langchain.com/docs/modules/data_connection/document_transformers/post_retrieval/long_context_reorder", "Memory in LLMChain": "https://python.langchain.com/docs/modules/memory/adding_memory", "Logging to file": "https://python.langchain.com/docs/modules/callbacks/filecallbackhandler", "XML Agent": "https://python.langchain.com/docs/modules/agents/agent_types/xml_agent", "Datetime parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/datetime", "Prompt pipelining": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/prompts_pipelining", "Connecting to a Feature Store": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/connecting_to_a_feature_store", "Router": "https://python.langchain.com/docs/modules/chains/foundational/router", "Transformation": "https://python.langchain.com/docs/modules/chains/foundational/transformation", "Async API": "https://python.langchain.com/docs/modules/chains/how_to/async_chain"}, "TransformChain": {"Zapier Natural Language Actions": "https://python.langchain.com/docs/integrations/tools/zapier", "Rebuff": "https://python.langchain.com/docs/integrations/providers/rebuff", "Transformation": "https://python.langchain.com/docs/modules/chains/foundational/transformation"}, "SimpleSequentialChain": {"Zapier Natural Language Actions": "https://python.langchain.com/docs/integrations/tools/zapier", "SageMaker Tracking": "https://python.langchain.com/docs/integrations/providers/sagemaker_tracking", "Rebuff": "https://python.langchain.com/docs/integrations/providers/rebuff", "Baseten": "https://python.langchain.com/docs/integrations/llms/baseten", "Predibase": "https://python.langchain.com/docs/integrations/llms/predibase", "Eden AI": "https://python.langchain.com/docs/integrations/llms/edenai", "Replicate": "https://python.langchain.com/docs/integrations/llms/replicate", "Transformation": "https://python.langchain.com/docs/modules/chains/foundational/transformation"}, "ZapierNLARunAction": {"Zapier Natural Language Actions": "https://python.langchain.com/docs/integrations/tools/zapier"}, "GoldenQueryAPIWrapper": {"Golden Query": "https://python.langchain.com/docs/integrations/tools/golden_query", "Golden": "https://python.langchain.com/docs/integrations/providers/golden"}, "ArxivAPIWrapper": {"ArXiv": "https://python.langchain.com/docs/integrations/tools/arxiv"}, "tool": {"Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search", "LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor", "JSONFormer": "https://python.langchain.com/docs/integrations/llms/jsonformer_experimental", "Agent Trajectory": "https://python.langchain.com/docs/guides/evaluation/trajectory/trajectory_eval", "Agents": "https://python.langchain.com/docs/expression_language/cookbook/agent", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times", "Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools", "XML Agent": "https://python.langchain.com/docs/modules/agents/agent_types/xml_agent"}, "OpenAIFunctionsAgent": {"Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search", "LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor", "Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents"}, "SystemMessage": {"Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search", "SQL Chat Message History": "https://python.langchain.com/docs/integrations/memory/sql_chat_message_history", "Anthropic": "https://python.langchain.com/docs/integrations/chat/anthropic", "\ud83d\ude85 LiteLLM": "https://python.langchain.com/docs/integrations/chat/litellm", "Konko": "https://python.langchain.com/docs/integrations/chat/konko", "OpenAI": "https://python.langchain.com/docs/integrations/chat/openai", "Google Cloud Platform Vertex AI PaLM ": "https://python.langchain.com/docs/integrations/chat/google_vertex_ai_palm", "JinaChat": "https://python.langchain.com/docs/integrations/chat/jinachat", "Anyscale": "https://python.langchain.com/docs/integrations/chat/anyscale", "LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor", "Context": "https://python.langchain.com/docs/integrations/callbacks/context", "Label Studio": "https://python.langchain.com/docs/integrations/callbacks/labelstudio", "MLflow AI Gateway": "https://python.langchain.com/docs/integrations/providers/mlflow_ai_gateway", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "CAMEL Role-Playing Autonomous Cooperative Agents": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/camel_role_playing", "Multi-Agent Simulated Environment: Petting Zoo": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/petting_zoo", "Multi-agent decentralized speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_bidding", "Multi-agent authoritarian speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_authoritarian", "Two-Player Dungeons & Dragons": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_player_dnd", "Multi-Player Dungeons & Dragons": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multi_player_dnd", "Simulated Environment: Gymnasium": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/gymnasium", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Memory in LLMChain": "https://python.langchain.com/docs/modules/memory/adding_memory", "Use ToolKits with OpenAI Functions": "https://python.langchain.com/docs/modules/agents/how_to/use_toolkits_with_openai_functions", "Prompt pipelining": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/prompts_pipelining", "Using OpenAI functions": "https://python.langchain.com/docs/modules/chains/how_to/openai_functions"}, "AgentExecutor": {"Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search", "LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor", "Jina": "https://python.langchain.com/docs/integrations/providers/jina", "PowerBI Dataset": "https://python.langchain.com/docs/integrations/toolkits/powerbi", "SQL Database": "https://python.langchain.com/docs/integrations/toolkits/sql_database", "JSON": "https://python.langchain.com/docs/integrations/toolkits/json", "Bittensor": "https://python.langchain.com/docs/integrations/llms/bittensor", "Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents", "Agents": "https://python.langchain.com/docs/expression_language/cookbook/agent", "BabyAGI with Tools": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi_with_agent", "Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Wikibase Agent": "https://python.langchain.com/docs/use_cases/more/agents/agents/wikibase_agent", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql", "Message Memory in Agent backed by a database": "https://python.langchain.com/docs/modules/memory/agent_with_memory_in_db", "Memory in Agent": "https://python.langchain.com/docs/modules/memory/agent_with_memory", "XML Agent": "https://python.langchain.com/docs/modules/agents/agent_types/xml_agent", "Custom MRKL agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_mrkl_agent", "Shared memory across agents and tools": "https://python.langchain.com/docs/modules/agents/how_to/sharedmemory_for_tools", "Custom multi-action agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_multi_action_agent", "Running Agent as an Iterator": "https://python.langchain.com/docs/modules/agents/how_to/agent_iter", "Custom agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent", "Custom agent with tool retrieval": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent_with_tool_retrieval"}, "MetaphorSearchAPIWrapper": {"Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search"}, "PlayWrightBrowserToolkit": {"Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search", "PlayWright Browser": "https://python.langchain.com/docs/integrations/toolkits/playwright"}, "create_async_playwright_browser": {"Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search", "PlayWright Browser": "https://python.langchain.com/docs/integrations/toolkits/playwright"}, "MetaphorSearchResults": {"Metaphor Search": "https://python.langchain.com/docs/integrations/tools/metaphor_search"}, "SerpAPIWrapper": {"SerpAPI": "https://python.langchain.com/docs/integrations/providers/serpapi", "Bittensor": "https://python.langchain.com/docs/integrations/llms/bittensor", "AutoGPT": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/autogpt"}, "GraphQLAPIWrapper": {"GraphQL": "https://python.langchain.com/docs/integrations/tools/graphql"}, "DuckDuckGoSearchRun": {"DuckDuckGo Search": "https://python.langchain.com/docs/integrations/tools/ddg", "Github": "https://python.langchain.com/docs/integrations/toolkits/github", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times", "Using tools": "https://python.langchain.com/docs/expression_language/cookbook/tools"}, "DuckDuckGoSearchResults": {"DuckDuckGo Search": "https://python.langchain.com/docs/integrations/tools/ddg"}, "DuckDuckGoSearchAPIWrapper": {"DuckDuckGo Search": "https://python.langchain.com/docs/integrations/tools/ddg"}, "ConversationBufferMemory": {"Gradio": "https://python.langchain.com/docs/integrations/tools/gradio_tools", "SceneXplain": "https://python.langchain.com/docs/integrations/tools/sceneXplain", "Xata chat memory": "https://python.langchain.com/docs/integrations/memory/xata_chat_message_history", "Streamlit Chat Message History": "https://python.langchain.com/docs/integrations/memory/streamlit_chat_message_history", "Dynamodb Chat Message History": "https://python.langchain.com/docs/integrations/memory/dynamodb_chat_message_history", "Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat", "Bittensor": "https://python.langchain.com/docs/integrations/llms/bittensor", "Bedrock": "https://python.langchain.com/docs/integrations/llms/bedrock", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Message Memory in Agent backed by a database": "https://python.langchain.com/docs/modules/memory/agent_with_memory_in_db", "Memory in the Multi-Input Chain": "https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs", "Memory in LLMChain": "https://python.langchain.com/docs/modules/memory/adding_memory", "Multiple Memory classes": "https://python.langchain.com/docs/modules/memory/multiple_memory", "Customizing Conversational Memory": "https://python.langchain.com/docs/modules/memory/conversational_customization", "Memory in Agent": "https://python.langchain.com/docs/modules/memory/agent_with_memory", "Shared memory across agents and tools": "https://python.langchain.com/docs/modules/agents/how_to/sharedmemory_for_tools", "Add Memory to OpenAI Functions Agent": "https://python.langchain.com/docs/modules/agents/how_to/add_memory_openai_functions", "First we add a step to load memory": "https://python.langchain.com/docs/expression_language/cookbook/retrieval", "Adding memory": "https://python.langchain.com/docs/expression_language/cookbook/memory"}, "SceneXplainTool": {"SceneXplain": "https://python.langchain.com/docs/integrations/tools/sceneXplain"}, "WolframAlphaAPIWrapper": {"Wolfram Alpha": "https://python.langchain.com/docs/integrations/providers/wolfram_alpha"}, "load_huggingface_tool": {"HuggingFace Hub Tools": "https://python.langchain.com/docs/integrations/tools/huggingface_tools"}, "EdenAiSpeechToTextTool": {"Eden AI": "https://python.langchain.com/docs/integrations/tools/edenai_tools"}, "EdenAiTextToSpeechTool": {"Eden AI": "https://python.langchain.com/docs/integrations/tools/edenai_tools"}, "EdenAiExplicitImageTool": {"Eden AI": "https://python.langchain.com/docs/integrations/tools/edenai_tools"}, "EdenAiObjectDetectionTool": {"Eden AI": "https://python.langchain.com/docs/integrations/tools/edenai_tools"}, "EdenAiParsingIDTool": {"Eden AI": "https://python.langchain.com/docs/integrations/tools/edenai_tools"}, "EdenAiParsingInvoiceTool": {"Eden AI": "https://python.langchain.com/docs/integrations/tools/edenai_tools"}, "EdenAiTextModerationTool": {"Eden AI": "https://python.langchain.com/docs/integrations/tools/edenai_tools"}, "EdenAI": {"Eden AI": "https://python.langchain.com/docs/integrations/llms/edenai"}, "GoogleSearchAPIWrapper": {"Google Search": "https://python.langchain.com/docs/integrations/providers/google_search", "Bittensor": "https://python.langchain.com/docs/integrations/llms/bittensor", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/web_scraping", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research", "Message Memory in Agent backed by a database": "https://python.langchain.com/docs/modules/memory/agent_with_memory_in_db", "Memory in Agent": "https://python.langchain.com/docs/modules/memory/agent_with_memory", "Shared memory across agents and tools": "https://python.langchain.com/docs/modules/agents/how_to/sharedmemory_for_tools"}, "BingSearchAPIWrapper": {"Bing Search": "https://python.langchain.com/docs/integrations/tools/bing_search"}, "DallEAPIWrapper": {"Dall-E Image Generator": "https://python.langchain.com/docs/integrations/tools/dalle_image_generator"}, "ShellTool": {"Shell (bash)": "https://python.langchain.com/docs/integrations/tools/bash", "Human-in-the-loop Tool Validation": "https://python.langchain.com/docs/modules/agents/tools/human_approval"}, "ReadFileTool": {"File System": "https://python.langchain.com/docs/integrations/tools/filesystem", "AutoGPT": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/autogpt", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times"}, "CopyFileTool": {"File System": "https://python.langchain.com/docs/integrations/tools/filesystem"}, "DeleteFileTool": {"File System": "https://python.langchain.com/docs/integrations/tools/filesystem"}, "MoveFileTool": {"File System": "https://python.langchain.com/docs/integrations/tools/filesystem", "Tools as OpenAI Functions": "https://python.langchain.com/docs/modules/agents/tools/tools_as_openai_functions"}, "WriteFileTool": {"File System": "https://python.langchain.com/docs/integrations/tools/filesystem", "AutoGPT": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/autogpt", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times"}, "ListDirectoryTool": {"File System": "https://python.langchain.com/docs/integrations/tools/filesystem"}, "FileManagementToolkit": {"File System": "https://python.langchain.com/docs/integrations/tools/filesystem"}, "BraveSearch": {"Brave Search": "https://python.langchain.com/docs/integrations/providers/brave_search"}, "RedisChatMessageHistory": {"Redis Chat Message History": "https://python.langchain.com/docs/integrations/memory/redis_chat_message_history", "Message Memory in Agent backed by a database": "https://python.langchain.com/docs/modules/memory/agent_with_memory_in_db"}, "ConversationChain": {"Entity Memory with SQLite storage": "https://python.langchain.com/docs/integrations/memory/entity_memory_with_sqlite", "Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma", "Bedrock": "https://python.langchain.com/docs/integrations/llms/bedrock", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools", "Multiple Memory classes": "https://python.langchain.com/docs/modules/memory/multiple_memory", "Customizing Conversational Memory": "https://python.langchain.com/docs/modules/memory/conversational_customization", "Conversation Knowledge Graph": "https://python.langchain.com/docs/modules/memory/types/kg", "Conversation Token Buffer": "https://python.langchain.com/docs/modules/memory/types/token_buffer", "Conversation Summary Buffer": "https://python.langchain.com/docs/modules/memory/types/summary_buffer", "Router": "https://python.langchain.com/docs/modules/chains/foundational/router"}, "ConversationEntityMemory": {"Entity Memory with SQLite storage": "https://python.langchain.com/docs/integrations/memory/entity_memory_with_sqlite"}, "SQLiteEntityStore": {"Entity Memory with SQLite storage": "https://python.langchain.com/docs/integrations/memory/entity_memory_with_sqlite"}, "ENTITY_MEMORY_CONVERSATION_TEMPLATE": {"Entity Memory with SQLite storage": "https://python.langchain.com/docs/integrations/memory/entity_memory_with_sqlite"}, "PostgresChatMessageHistory": {"Postgres Chat Message History": "https://python.langchain.com/docs/integrations/memory/postgres_chat_message_history"}, "MomentoChatMessageHistory": {"Momento Chat Message History": "https://python.langchain.com/docs/integrations/memory/momento_chat_message_history"}, "MongoDBChatMessageHistory": {"Mongodb Chat Message History": "https://python.langchain.com/docs/integrations/memory/mongodb_chat_message_history"}, "XataChatMessageHistory": {"Xata chat memory": "https://python.langchain.com/docs/integrations/memory/xata_chat_message_history"}, "XataVectorStore": {"Xata chat memory": "https://python.langchain.com/docs/integrations/memory/xata_chat_message_history", "Xata": "https://python.langchain.com/docs/integrations/vectorstores/xata"}, "create_retriever_tool": {"Xata chat memory": "https://python.langchain.com/docs/integrations/memory/xata_chat_message_history", "Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql"}, "CassandraChatMessageHistory": {"Cassandra Chat Message History": "https://python.langchain.com/docs/integrations/memory/cassandra_chat_message_history", "Cassandra": "https://python.langchain.com/docs/integrations/providers/cassandra"}, "SQLChatMessageHistory": {"SQL Chat Message History": "https://python.langchain.com/docs/integrations/memory/sql_chat_message_history"}, "BaseMessage": {"SQL Chat Message History": "https://python.langchain.com/docs/integrations/memory/sql_chat_message_history", "CAMEL Role-Playing Autonomous Cooperative Agents": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/camel_role_playing", "Multi-agent decentralized speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_bidding", "Multi-agent authoritarian speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_authoritarian", "Multi-Player Dungeons & Dragons": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multi_player_dnd", "Simulated Environment: Gymnasium": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/gymnasium", "Agent Debates with Tools": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/two_agent_debate_tools"}, "BaseMessageConverter": {"SQL Chat Message History": "https://python.langchain.com/docs/integrations/memory/sql_chat_message_history"}, "MotorheadMemory": {"Mot\u00f6rhead Memory": "https://python.langchain.com/docs/integrations/memory/motorhead_memory", "Mot\u00f6rhead Memory (Managed)": "https://python.langchain.com/docs/integrations/memory/motorhead_memory_managed"}, "StreamlitChatMessageHistory": {"Streamlit Chat Message History": "https://python.langchain.com/docs/integrations/memory/streamlit_chat_message_history"}, "DynamoDBChatMessageHistory": {"Dynamodb Chat Message History": "https://python.langchain.com/docs/integrations/memory/dynamodb_chat_message_history"}, "PythonREPL": {"Dynamodb Chat Message History": "https://python.langchain.com/docs/integrations/memory/dynamodb_chat_message_history", "Python": "https://python.langchain.com/docs/integrations/toolkits/python", "Code writing": "https://python.langchain.com/docs/expression_language/cookbook/code_writing"}, "RocksetChatMessageHistory": {"Rockset Chat Message History": "https://python.langchain.com/docs/integrations/memory/rockset_chat_message_history"}, "AzureMLChatOnlineEndpoint": {"AzureML Chat Online Endpoint": "https://python.langchain.com/docs/integrations/chat/azureml_chat_endpoint"}, "LlamaContentFormatter": {"AzureML Chat Online Endpoint": "https://python.langchain.com/docs/integrations/chat/azureml_chat_endpoint"}, "ChatAnthropic": {"Anthropic": "https://python.langchain.com/docs/integrations/chat/anthropic", "Log10": "https://python.langchain.com/docs/integrations/providers/log10", "PlayWright Browser": "https://python.langchain.com/docs/integrations/toolkits/playwright", "Fallbacks": "https://python.langchain.com/docs/guides/fallbacks", "Agent Trajectory": "https://python.langchain.com/docs/guides/evaluation/trajectory/trajectory_eval", "Custom Pairwise Evaluator": "https://python.langchain.com/docs/guides/evaluation/comparison/custom", "Pairwise String Comparison": "https://python.langchain.com/docs/guides/evaluation/comparison/pairwise_string", "Criteria Evaluation": "https://python.langchain.com/docs/guides/evaluation/string/criteria_eval_chain", "XML Agent": "https://python.langchain.com/docs/modules/agents/agent_types/xml_agent", "Few-shot examples for chat models": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/few_shot_examples_chat", "Agents": "https://python.langchain.com/docs/expression_language/cookbook/agent"}, "SystemMessagePromptTemplate": {"Anthropic": "https://python.langchain.com/docs/integrations/chat/anthropic", "\ud83d\ude85 LiteLLM": "https://python.langchain.com/docs/integrations/chat/litellm", "Konko": "https://python.langchain.com/docs/integrations/chat/konko", "OpenAI": "https://python.langchain.com/docs/integrations/chat/openai", "Google Cloud Platform Vertex AI PaLM ": "https://python.langchain.com/docs/integrations/chat/google_vertex_ai_palm", "JinaChat": "https://python.langchain.com/docs/integrations/chat/jinachat", "Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "CAMEL Role-Playing Autonomous Cooperative Agents": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/camel_role_playing", "Code writing": "https://python.langchain.com/docs/expression_language/cookbook/code_writing"}, "AIMessagePromptTemplate": {"Anthropic": "https://python.langchain.com/docs/integrations/chat/anthropic", "\ud83d\ude85 LiteLLM": "https://python.langchain.com/docs/integrations/chat/litellm", "Konko": "https://python.langchain.com/docs/integrations/chat/konko", "OpenAI": "https://python.langchain.com/docs/integrations/chat/openai", "JinaChat": "https://python.langchain.com/docs/integrations/chat/jinachat", "Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma"}, "HumanMessagePromptTemplate": {"Anthropic": "https://python.langchain.com/docs/integrations/chat/anthropic", "\ud83d\ude85 LiteLLM": "https://python.langchain.com/docs/integrations/chat/litellm", "Konko": "https://python.langchain.com/docs/integrations/chat/konko", "OpenAI": "https://python.langchain.com/docs/integrations/chat/openai", "Google Cloud Platform Vertex AI PaLM ": "https://python.langchain.com/docs/integrations/chat/google_vertex_ai_palm", "JinaChat": "https://python.langchain.com/docs/integrations/chat/jinachat", "Context": "https://python.langchain.com/docs/integrations/callbacks/context", "Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma", "Fireworks": "https://python.langchain.com/docs/integrations/llms/fireworks", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/extraction", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "CAMEL Role-Playing Autonomous Cooperative Agents": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/camel_role_playing", "Multi-agent authoritarian speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_authoritarian", "Memory in LLMChain": "https://python.langchain.com/docs/modules/memory/adding_memory", "Retry parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/retry", "Pydantic (JSON) parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/pydantic", "Prompt pipelining": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/prompts_pipelining", "Using OpenAI functions": "https://python.langchain.com/docs/modules/chains/how_to/openai_functions", "Code writing": "https://python.langchain.com/docs/expression_language/cookbook/code_writing"}, "CallbackManager": {"Anthropic": "https://python.langchain.com/docs/integrations/chat/anthropic", "\ud83d\ude85 LiteLLM": "https://python.langchain.com/docs/integrations/chat/litellm", "Ollama": "https://python.langchain.com/docs/integrations/llms/ollama", "Llama.cpp": "https://python.langchain.com/docs/integrations/llms/llamacpp", "Titan Takeoff": "https://python.langchain.com/docs/integrations/llms/titan_takeoff", "Run LLMs locally": "https://python.langchain.com/docs/guides/local_llms", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding", "Use local LLMs": "https://python.langchain.com/docs/use_cases/question_answering/how_to/local_retrieval_qa", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research"}, "StreamingStdOutCallbackHandler": {"Anthropic": "https://python.langchain.com/docs/integrations/chat/anthropic", "\ud83d\ude85 LiteLLM": "https://python.langchain.com/docs/integrations/chat/litellm", "Ollama": "https://python.langchain.com/docs/integrations/llms/ollama", "GPT4All": "https://python.langchain.com/docs/integrations/llms/gpt4all", "Arthur": "https://python.langchain.com/docs/integrations/providers/arthur_tracking", "Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat", "TextGen": "https://python.langchain.com/docs/integrations/llms/textgen", "Llama.cpp": "https://python.langchain.com/docs/integrations/llms/llamacpp", "Titan Takeoff": "https://python.langchain.com/docs/integrations/llms/titan_takeoff", "Eden AI": "https://python.langchain.com/docs/integrations/llms/edenai", "C Transformers": "https://python.langchain.com/docs/integrations/llms/ctransformers", "Huggingface TextGen Inference": "https://python.langchain.com/docs/integrations/llms/huggingface_textgen_inference", "Replicate": "https://python.langchain.com/docs/integrations/llms/replicate", "Run LLMs locally": "https://python.langchain.com/docs/guides/local_llms", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding", "Use local LLMs": "https://python.langchain.com/docs/use_cases/question_answering/how_to/local_retrieval_qa", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research"}, "ChatLiteLLM": {"\ud83d\ude85 LiteLLM": "https://python.langchain.com/docs/integrations/chat/litellm"}, "create_tagging_chain": {"Llama API": "https://python.langchain.com/docs/integrations/chat/llama_api", "Anthropic Functions": "https://python.langchain.com/docs/integrations/chat/anthropic_functions", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/tagging"}, "ChatKonko": {"Konko": "https://python.langchain.com/docs/integrations/chat/konko"}, "ChatVertexAI": {"Google Cloud Platform Vertex AI PaLM ": "https://python.langchain.com/docs/integrations/chat/google_vertex_ai_palm"}, "BedrockChat": {"Bedrock Chat": "https://python.langchain.com/docs/integrations/chat/bedrock"}, "JinaChat": {"JinaChat": "https://python.langchain.com/docs/integrations/chat/jinachat"}, "ChatOllama": {"Ollama": "https://python.langchain.com/docs/integrations/chat/ollama"}, "LLMResult": {"Ollama": "https://python.langchain.com/docs/integrations/llms/ollama", "Async callbacks": "https://python.langchain.com/docs/modules/callbacks/async_callbacks"}, "BaseCallbackHandler": {"Ollama": "https://python.langchain.com/docs/integrations/llms/ollama", "Custom callback handlers": "https://python.langchain.com/docs/modules/callbacks/custom_callbacks", "Multiple callback handlers": "https://python.langchain.com/docs/modules/callbacks/multiple_callbacks", "Async callbacks": "https://python.langchain.com/docs/modules/callbacks/async_callbacks", "Streaming final agent output": "https://python.langchain.com/docs/modules/agents/how_to/streaming_stdout_final_only"}, "AzureChatOpenAI": {"Azure": "https://python.langchain.com/docs/integrations/chat/azure_chat_openai", "Azure OpenAI": "https://python.langchain.com/docs/integrations/providers/azure_openai"}, "get_openai_callback": {"Azure": "https://python.langchain.com/docs/integrations/chat/azure_chat_openai", "Token counting": "https://python.langchain.com/docs/modules/callbacks/token_counting", "Tracking token usage": "https://python.langchain.com/docs/modules/model_io/models/llms/token_usage_tracking", "Run arbitrary functions": "https://python.langchain.com/docs/expression_language/how_to/functions"}, "QianfanChatEndpoint": {"Baidu Qianfan": "https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint"}, "ErnieBotChat": {"ERNIE-Bot Chat": "https://python.langchain.com/docs/integrations/chat/ernie"}, "PromptLayerChatOpenAI": {"PromptLayer ChatOpenAI": "https://python.langchain.com/docs/integrations/chat/promptlayer_chatopenai"}, "ChatAnyscale": {"Anyscale": "https://python.langchain.com/docs/integrations/chat/anyscale"}, "create_extraction_chain": {"Anthropic Functions": "https://python.langchain.com/docs/integrations/chat/anthropic_functions", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/extraction"}, "DeepEvalCallbackHandler": {"Confident": "https://python.langchain.com/docs/integrations/callbacks/confident"}, "CharacterTextSplitter": {"Confident": "https://python.langchain.com/docs/integrations/callbacks/confident", "Hugging Face": "https://python.langchain.com/docs/integrations/providers/huggingface", "OpenAI": "https://python.langchain.com/docs/integrations/providers/openai", "Elasticsearch": "https://python.langchain.com/docs/integrations/vectorstores/elasticsearch", "Vectara Text Generation": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_text_generation", "Document Comparison": "https://python.langchain.com/docs/integrations/toolkits/document_comparison_toolkit", "Vectorstore": "https://python.langchain.com/docs/integrations/toolkits/vectorstore", "LanceDB": "https://python.langchain.com/docs/integrations/vectorstores/lancedb", "sqlite-vss": "https://python.langchain.com/docs/integrations/vectorstores/sqlitevss", "Weaviate": "https://python.langchain.com/docs/integrations/vectorstores/weaviate", "DashVector": "https://python.langchain.com/docs/integrations/vectorstores/dashvector", "ScaNN": "https://python.langchain.com/docs/integrations/vectorstores/scann", "Xata": "https://python.langchain.com/docs/integrations/vectorstores/xata", "Vectara": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/vectara_self_query", "PGVector": "https://python.langchain.com/docs/integrations/vectorstores/pgvector", "Rockset": "https://python.langchain.com/docs/integrations/vectorstores/rockset", "DingoDB": "https://python.langchain.com/docs/integrations/vectorstores/dingo", "Zilliz": "https://python.langchain.com/docs/integrations/vectorstores/zilliz", "SingleStoreDB": "https://python.langchain.com/docs/integrations/vectorstores/singlestoredb", "Annoy": "https://python.langchain.com/docs/integrations/vectorstores/annoy", "Typesense": "https://python.langchain.com/docs/integrations/vectorstores/typesense", "Activeloop Deep Lake": "https://python.langchain.com/docs/integrations/vectorstores/activeloop_deeplake", "Neo4j Vector Index": "https://python.langchain.com/docs/integrations/vectorstores/neo4jvector", "Tair": "https://python.langchain.com/docs/integrations/vectorstores/tair", "Chroma": "https://python.langchain.com/docs/integrations/vectorstores/chroma", "Alibaba Cloud OpenSearch": "https://python.langchain.com/docs/integrations/vectorstores/alibabacloud_opensearch", "Baidu Cloud VectorSearch": "https://python.langchain.com/docs/integrations/vectorstores/baiducloud_vector_search", "StarRocks": "https://python.langchain.com/docs/integrations/vectorstores/starrocks", "scikit-learn": "https://python.langchain.com/docs/integrations/vectorstores/sklearn", "Tencent Cloud VectorDB": "https://python.langchain.com/docs/integrations/vectorstores/tencentvectordb", "DocArray HnswSearch": "https://python.langchain.com/docs/integrations/vectorstores/docarray_hnsw", "MyScale": "https://python.langchain.com/docs/integrations/vectorstores/myscale", "ClickHouse": "https://python.langchain.com/docs/integrations/vectorstores/clickhouse", "Qdrant": "https://python.langchain.com/docs/integrations/vectorstores/qdrant", "Tigris": "https://python.langchain.com/docs/integrations/vectorstores/tigris", "AwaDB": "https://python.langchain.com/docs/integrations/vectorstores/awadb", "Supabase (Postgres)": "https://python.langchain.com/docs/integrations/vectorstores/supabase", "OpenSearch": "https://python.langchain.com/docs/integrations/vectorstores/opensearch", "Pinecone": "https://python.langchain.com/docs/integrations/vectorstores/pinecone", "BagelDB": "https://python.langchain.com/docs/integrations/vectorstores/bageldb", "Azure Cognitive Search": "https://python.langchain.com/docs/integrations/vectorstores/azuresearch", "Cassandra": "https://python.langchain.com/docs/integrations/vectorstores/cassandra", "USearch": "https://python.langchain.com/docs/integrations/vectorstores/usearch", "Milvus": "https://python.langchain.com/docs/integrations/vectorstores/milvus", "Marqo": "https://python.langchain.com/docs/integrations/vectorstores/marqo", "DocArray InMemorySearch": "https://python.langchain.com/docs/integrations/vectorstores/docarray_in_memory", "Postgres Embedding": "https://python.langchain.com/docs/integrations/vectorstores/pgembedding", "Faiss": "https://python.langchain.com/docs/integrations/vectorstores/faiss", "Epsilla": "https://python.langchain.com/docs/integrations/vectorstores/epsilla", "AnalyticDB": "https://python.langchain.com/docs/integrations/vectorstores/analyticdb", "Hologres": "https://python.langchain.com/docs/integrations/vectorstores/hologres", "MongoDB Atlas": "https://python.langchain.com/docs/integrations/vectorstores/mongodb_atlas", "Meilisearch": "https://python.langchain.com/docs/integrations/vectorstores/meilisearch", "Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma", "Psychic": "https://python.langchain.com/docs/integrations/document_loaders/psychic", "Manifest": "https://python.langchain.com/docs/integrations/llms/manifest", "LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/summarization", "Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents", "Retrieve from vector stores directly": "https://python.langchain.com/docs/use_cases/question_answering/how_to/vector_db_text_generation", "Improve document indexing with HyDE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/hyde", "Analysis of Twitter the-algorithm source code with LangChain, GPT4 and Activeloop's Deep Lake": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/twitter-the-algorithm-analysis-deeplake", "Use LangChain, GPT and Activeloop's Deep Lake to work with code base": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/code-analysis-deeplake", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "QA using Activeloop's DeepLake": "https://python.langchain.com/docs/use_cases/question_answering/integrations/semantic-search-over-chat", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Indexing": "https://python.langchain.com/docs/modules/data_connection/indexing", "Caching": "https://python.langchain.com/docs/modules/data_connection/text_embedding/caching_embeddings", "Split by tokens ": "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/split_by_token", "Memory in the Multi-Input Chain": "https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore", "Loading from LangChainHub": "https://python.langchain.com/docs/modules/chains/how_to/from_hub"}, "LLMonitorCallbackHandler": {"LLMonitor": "https://python.langchain.com/docs/integrations/callbacks/llmonitor"}, "ContextCallbackHandler": {"Context": "https://python.langchain.com/docs/integrations/callbacks/context"}, "LabelStudioCallbackHandler": {"Label Studio": "https://python.langchain.com/docs/integrations/callbacks/labelstudio"}, "ArgillaCallbackHandler": {"Argilla": "https://python.langchain.com/docs/integrations/providers/argilla"}, "StdOutCallbackHandler": {"Argilla": "https://python.langchain.com/docs/integrations/callbacks/argilla", "Comet": "https://python.langchain.com/docs/integrations/providers/comet_tracking", "Aim": "https://python.langchain.com/docs/integrations/providers/aim_tracking", "Weights & Biases": "https://python.langchain.com/docs/integrations/providers/wandb_tracking", "ClearML": "https://python.langchain.com/docs/integrations/providers/clearml_tracking", "OpaquePrompts": "https://python.langchain.com/docs/integrations/llms/opaqueprompts", "Vector SQL Retriever with MyScale": "https://python.langchain.com/docs/use_cases/qa_structured/integrations/myscale_vector_sql", "Async API": "https://python.langchain.com/docs/modules/agents/how_to/async_agent", "Custom chain": "https://python.langchain.com/docs/modules/chains/how_to/custom_chain"}, "PromptLayerCallbackHandler": {"PromptLayer": "https://python.langchain.com/docs/integrations/callbacks/promptlayer"}, "GPT4All": {"PromptLayer": "https://python.langchain.com/docs/integrations/callbacks/promptlayer", "GPT4All": "https://python.langchain.com/docs/integrations/llms/gpt4all", "Run LLMs locally": "https://python.langchain.com/docs/guides/local_llms", "Use local LLMs": "https://python.langchain.com/docs/use_cases/question_answering/how_to/local_retrieval_qa"}, "StreamlitCallbackHandler": {"Streamlit": "https://python.langchain.com/docs/integrations/callbacks/.ipynb_checkpoints/streamlit-checkpoint", "GPT4All": "https://python.langchain.com/docs/integrations/providers/gpt4all"}, "InfinoCallbackHandler": {"Infino": "https://python.langchain.com/docs/integrations/providers/infino"}, "FigmaFileLoader": {"Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma"}, "AzureOpenAI": {"Azure OpenAI": "https://python.langchain.com/docs/integrations/llms/azure_openai", "OpenAI": "https://python.langchain.com/docs/integrations/providers/openai"}, "MyScale": {"MyScale": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/myscale_self_query"}, "Baseten": {"Baseten": "https://python.langchain.com/docs/integrations/llms/baseten"}, "WeatherDataLoader": {"Weather": "https://python.langchain.com/docs/integrations/document_loaders/weather"}, "Tair": {"Tair": "https://python.langchain.com/docs/integrations/vectorstores/tair"}, "UnstructuredWordDocumentLoader": {"Microsoft Word": "https://python.langchain.com/docs/integrations/document_loaders/microsoft_word"}, "CollegeConfidentialLoader": {"College Confidential": "https://python.langchain.com/docs/integrations/document_loaders/college_confidential"}, "RWKV": {"RWKV-4": "https://python.langchain.com/docs/integrations/providers/rwkv"}, "GoogleDriveLoader": {"Google Drive": "https://python.langchain.com/docs/integrations/document_loaders/google_drive"}, "Fireworks": {"Fireworks": "https://python.langchain.com/docs/integrations/llms/fireworks"}, "DeepLake": {"Activeloop Deep Lake": "https://python.langchain.com/docs/integrations/vectorstores/activeloop_deeplake", "Analysis of Twitter the-algorithm source code with LangChain, GPT4 and Activeloop's Deep Lake": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/twitter-the-algorithm-analysis-deeplake", "Use LangChain, GPT and Activeloop's Deep Lake to work with code base": "https://python.langchain.com/docs/use_cases/question_answering/how_to/code/code-analysis-deeplake", "QA using Activeloop's DeepLake": "https://python.langchain.com/docs/use_cases/question_answering/integrations/semantic-search-over-chat", "Deep Lake": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/activeloop_deeplake_self_query"}, "AmazonAPIGateway": {"Amazon API Gateway": "https://python.langchain.com/docs/integrations/llms/amazon_api_gateway"}, "UnstructuredPowerPointLoader": {"Microsoft PowerPoint": "https://python.langchain.com/docs/integrations/document_loaders/microsoft_powerpoint"}, "CometCallbackHandler": {"Comet": "https://python.langchain.com/docs/integrations/providers/comet_tracking"}, "CTransformers": {"C Transformers": "https://python.langchain.com/docs/integrations/llms/ctransformers"}, "BiliBiliLoader": {"BiliBili": "https://python.langchain.com/docs/integrations/document_loaders/bilibili"}, "MongoDBAtlasVectorSearch": {"MongoDB Atlas": "https://python.langchain.com/docs/integrations/vectorstores/mongodb_atlas"}, "SupabaseVectorStore": {"Supabase (Postgres)": "https://python.langchain.com/docs/integrations/vectorstores/supabase", "Supabase": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/supabase_self_query"}, "DiffbotLoader": {"Diffbot": "https://python.langchain.com/docs/integrations/document_loaders/diffbot"}, "DeepSparse": {"DeepSparse": "https://python.langchain.com/docs/integrations/llms/deepsparse"}, "AimCallbackHandler": {"Aim": "https://python.langchain.com/docs/integrations/providers/aim_tracking"}, "ModernTreasuryLoader": {"Modern Treasury": "https://python.langchain.com/docs/integrations/document_loaders/modern_treasury"}, "FacebookChatLoader": {"Facebook Chat": "https://python.langchain.com/docs/integrations/document_loaders/facebook_chat"}, "Banana": {"Banana": "https://python.langchain.com/docs/integrations/llms/banana"}, "HuggingFacePipeline": {"Hugging Face": "https://python.langchain.com/docs/integrations/providers/huggingface", "Hugging Face Local Pipelines": "https://python.langchain.com/docs/integrations/llms/huggingface_pipelines", "RELLM": "https://python.langchain.com/docs/integrations/llms/rellm_experimental", "JSONFormer": "https://python.langchain.com/docs/integrations/llms/jsonformer_experimental"}, "HuggingFaceHub": {"Hugging Face": "https://python.langchain.com/docs/integrations/providers/huggingface"}, "HuggingFaceHubEmbeddings": {"Hugging Face": "https://python.langchain.com/docs/integrations/providers/huggingface"}, "DocugamiLoader": {"Docugami": "https://python.langchain.com/docs/integrations/document_loaders/docugami"}, "GutenbergLoader": {"Gutenberg": "https://python.langchain.com/docs/integrations/document_loaders/gutenberg"}, "AzureBlobStorageContainerLoader": {"Azure Blob Storage": "https://python.langchain.com/docs/integrations/providers/azure_blob_storage", "Azure Blob Storage Container": "https://python.langchain.com/docs/integrations/document_loaders/azure_blob_storage_container"}, "AzureBlobStorageFileLoader": {"Azure Blob Storage": "https://python.langchain.com/docs/integrations/providers/azure_blob_storage", "Azure Blob Storage File": "https://python.langchain.com/docs/integrations/document_loaders/azure_blob_storage_file"}, "WikipediaLoader": {"Wikipedia": "https://python.langchain.com/docs/integrations/document_loaders/wikipedia", "Diffbot Graph Transformer": "https://python.langchain.com/docs/use_cases/more/graph/diffbot_graphtransformer"}, "ConfluenceLoader": {"Confluence": "https://python.langchain.com/docs/integrations/document_loaders/confluence"}, "Predibase": {"Predibase": "https://python.langchain.com/docs/integrations/llms/predibase"}, "Beam": {"Beam": "https://python.langchain.com/docs/integrations/llms/beam"}, "GrobidParser": {"Grobid": "https://python.langchain.com/docs/integrations/document_loaders/grobid"}, "GenericLoader": {"Grobid": "https://python.langchain.com/docs/integrations/document_loaders/grobid", "Loading documents from a YouTube url": "https://python.langchain.com/docs/integrations/document_loaders/youtube_audio", "Source Code": "https://python.langchain.com/docs/integrations/document_loaders/source_code", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding"}, "Typesense": {"Typesense": "https://python.langchain.com/docs/integrations/vectorstores/typesense"}, "Hologres": {"Hologres": "https://python.langchain.com/docs/integrations/vectorstores/hologres"}, "AI21": {"AI21 Labs": "https://python.langchain.com/docs/integrations/providers/ai21", "AI21": "https://python.langchain.com/docs/integrations/llms/ai21"}, "WandbCallbackHandler": {"Weights & Biases": "https://python.langchain.com/docs/integrations/providers/wandb_tracking"}, "ObsidianLoader": {"Obsidian": "https://python.langchain.com/docs/integrations/document_loaders/obsidian"}, "create_sql_agent": {"CnosDB": "https://python.langchain.com/docs/integrations/providers/cnosdb", "SQL Database": "https://python.langchain.com/docs/integrations/toolkits/sql_database", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql"}, "SQLDatabaseToolkit": {"CnosDB": "https://python.langchain.com/docs/integrations/providers/cnosdb", "SQL Database": "https://python.langchain.com/docs/integrations/toolkits/sql_database", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql", "Use ToolKits with OpenAI Functions": "https://python.langchain.com/docs/modules/agents/how_to/use_toolkits_with_openai_functions"}, "SageMakerCallbackHandler": {"SageMaker Tracking": "https://python.langchain.com/docs/integrations/providers/sagemaker_tracking"}, "OpenAIModerationChain": {"OpenAI": "https://python.langchain.com/docs/integrations/providers/openai", "Adding moderation": "https://python.langchain.com/docs/expression_language/cookbook/moderation"}, "ChatGPTLoader": {"OpenAI": "https://python.langchain.com/docs/integrations/providers/openai", "ChatGPT Data": "https://python.langchain.com/docs/integrations/document_loaders/chatgpt_loader"}, "Nebula": {"Nebula": "https://python.langchain.com/docs/integrations/providers/symblai_nebula", "Nebula (Symbl.ai)": "https://python.langchain.com/docs/integrations/llms/symblai_nebula"}, "AZLyricsLoader": {"AZLyrics": "https://python.langchain.com/docs/integrations/document_loaders/azlyrics"}, "ToMarkdownLoader": {"2Markdown": "https://python.langchain.com/docs/integrations/document_loaders/tomarkdown"}, "DingoDB": {"DingoDB": "https://python.langchain.com/docs/integrations/vectorstores/dingo"}, "GitLoader": {"Git": "https://python.langchain.com/docs/integrations/document_loaders/git"}, "MlflowAIGateway": {"MLflow AI Gateway": "https://python.langchain.com/docs/integrations/providers/mlflow_ai_gateway"}, "MlflowAIGatewayEmbeddings": {"MLflow AI Gateway": "https://python.langchain.com/docs/integrations/providers/mlflow_ai_gateway"}, "ChatMLflowAIGateway": {"MLflow AI Gateway": "https://python.langchain.com/docs/integrations/providers/mlflow_ai_gateway"}, "SingleStoreDB": {"SingleStoreDB": "https://python.langchain.com/docs/integrations/vectorstores/singlestoredb"}, "Tigris": {"Tigris": "https://python.langchain.com/docs/integrations/vectorstores/tigris"}, "Bedrock": {"Bedrock": "https://python.langchain.com/docs/integrations/llms/bedrock"}, "Meilisearch": {"Meilisearch": "https://python.langchain.com/docs/integrations/vectorstores/meilisearch"}, "S3DirectoryLoader": {"AWS S3 Directory": "https://python.langchain.com/docs/integrations/document_loaders/aws_s3_directory"}, "S3FileLoader": {"AWS S3 Directory": "https://python.langchain.com/docs/integrations/providers/aws_s3", "AWS S3 File": "https://python.langchain.com/docs/integrations/document_loaders/aws_s3_file"}, "SQLDatabase": {"Rebuff": "https://python.langchain.com/docs/integrations/providers/rebuff", "SQL Database": "https://python.langchain.com/docs/integrations/toolkits/sql_database", "Multiple Retrieval Sources": "https://python.langchain.com/docs/use_cases/question_answering/how_to/multiple_retrieval", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "Vector SQL Retriever with MyScale": "https://python.langchain.com/docs/use_cases/qa_structured/integrations/myscale_vector_sql", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql", "sql_db.md": "https://python.langchain.com/docs/expression_language/cookbook/sql_db"}, "Weaviate": {"Weaviate": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/weaviate_self_query"}, "Clickhouse": {"ClickHouse": "https://python.langchain.com/docs/integrations/vectorstores/clickhouse"}, "ClickhouseSettings": {"ClickHouse": "https://python.langchain.com/docs/integrations/vectorstores/clickhouse"}, "AirbyteJSONLoader": {"Airbyte": "https://python.langchain.com/docs/integrations/providers/airbyte", "Airbyte JSON": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_json"}, "TelegramChatFileLoader": {"Telegram": "https://python.langchain.com/docs/integrations/document_loaders/telegram"}, "TelegramChatApiLoader": {"Telegram": "https://python.langchain.com/docs/integrations/document_loaders/telegram"}, "PredictionGuard": {"Prediction Guard": "https://python.langchain.com/docs/integrations/llms/predictionguard"}, "ScaNN": {"ScaNN": "https://python.langchain.com/docs/integrations/vectorstores/scann"}, "NotionDirectoryLoader": {"Notion DB": "https://python.langchain.com/docs/integrations/providers/notion", "Notion DB 1/2": "https://python.langchain.com/docs/integrations/document_loaders/notion", "Perform context-aware text splitting": "https://python.langchain.com/docs/use_cases/question_answering/how_to/document-context-aware-QA"}, "NotionDBLoader": {"Notion DB": "https://python.langchain.com/docs/integrations/providers/notion", "Notion DB 2/2": "https://python.langchain.com/docs/integrations/document_loaders/notiondb"}, "MWDumpLoader": {"MediaWikiDump": "https://python.langchain.com/docs/integrations/document_loaders/mediawikidump"}, "BraveSearchLoader": {"Brave Search": "https://python.langchain.com/docs/integrations/document_loaders/brave_search"}, "StarRocks": {"StarRocks": "https://python.langchain.com/docs/integrations/vectorstores/starrocks"}, "ElasticsearchStore": {"Elasticsearch": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/elasticsearch_self_query", "Indexing": "https://python.langchain.com/docs/modules/data_connection/indexing"}, "DatadogLogsLoader": {"Datadog Logs": "https://python.langchain.com/docs/integrations/document_loaders/datadog_logs"}, "ApifyDatasetLoader": {"Apify": "https://python.langchain.com/docs/integrations/providers/apify", "Apify Dataset": "https://python.langchain.com/docs/integrations/document_loaders/apify_dataset"}, "NLPCloud": {"NLPCloud": "https://python.langchain.com/docs/integrations/providers/nlpcloud", "NLP Cloud": "https://python.langchain.com/docs/integrations/llms/nlpcloud"}, "Milvus": {"Milvus": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/milvus_self_query", "Zilliz": "https://python.langchain.com/docs/integrations/vectorstores/zilliz"}, "Qdrant": {"Qdrant": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/qdrant_self_query"}, "GitbookLoader": {"GitBook": "https://python.langchain.com/docs/integrations/document_loaders/gitbook"}, "OpenSearchVectorSearch": {"OpenSearch": "https://python.langchain.com/docs/integrations/vectorstores/opensearch"}, "Pinecone": {"Pinecone": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/pinecone"}, "Rockset": {"Rockset": "https://python.langchain.com/docs/integrations/vectorstores/rockset"}, "RocksetLoader": {"Rockset": "https://python.langchain.com/docs/integrations/document_loaders/rockset"}, "Minimax": {"Minimax": "https://python.langchain.com/docs/integrations/llms/minimax"}, "UnstructuredFileLoader": {"Unstructured": "https://python.langchain.com/docs/integrations/providers/unstructured", "Unstructured File": "https://python.langchain.com/docs/integrations/document_loaders/unstructured_file"}, "SelfHostedPipeline": {"Runhouse": "https://python.langchain.com/docs/integrations/llms/runhouse"}, "SelfHostedHuggingFaceLLM": {"Runhouse": "https://python.langchain.com/docs/integrations/llms/runhouse"}, "MlflowCallbackHandler": {"MLflow": "https://python.langchain.com/docs/integrations/providers/mlflow_tracking"}, "SpreedlyLoader": {"Spreedly": "https://python.langchain.com/docs/integrations/document_loaders/spreedly"}, "OpenLLM": {"OpenLLM": "https://python.langchain.com/docs/integrations/llms/openllm"}, "PubMedLoader": {"PubMed": "https://python.langchain.com/docs/integrations/document_loaders/pubmed"}, "SearxSearchResults": {"SearxNG Search API": "https://python.langchain.com/docs/integrations/providers/searx"}, "SpacyTextSplitter": {"spaCy": "https://python.langchain.com/docs/integrations/providers/spacy", "Atlas": "https://python.langchain.com/docs/integrations/vectorstores/atlas", "Split by tokens ": "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/split_by_token"}, "Modal": {"Modal": "https://python.langchain.com/docs/integrations/llms/modal"}, "PGEmbedding": {"Postgres Embedding": "https://python.langchain.com/docs/integrations/vectorstores/pgembedding"}, "Xinference": {"Xorbits Inference (Xinference)": "https://python.langchain.com/docs/integrations/llms/xinference"}, "IFixitLoader": {"iFixit": "https://python.langchain.com/docs/integrations/document_loaders/ifixit"}, "AlephAlpha": {"Aleph Alpha": "https://python.langchain.com/docs/integrations/llms/aleph_alpha"}, "PipelineAI": {"PipelineAI": "https://python.langchain.com/docs/integrations/llms/pipelineai"}, "Epsilla": {"Epsilla": "https://python.langchain.com/docs/integrations/vectorstores/epsilla"}, "LlamaCpp": {"Llama.cpp": "https://python.langchain.com/docs/integrations/llms/llamacpp", "Run LLMs locally": "https://python.langchain.com/docs/guides/local_llms", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding", "Use local LLMs": "https://python.langchain.com/docs/use_cases/question_answering/how_to/local_retrieval_qa", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research"}, "AwaDB": {"AwaDB": "https://python.langchain.com/docs/integrations/vectorstores/awadb"}, "ArxivLoader": {"Arxiv": "https://python.langchain.com/docs/integrations/document_loaders/arxiv"}, "Anyscale": {"Anyscale": "https://python.langchain.com/docs/integrations/llms/anyscale"}, "AINetworkToolkit": {"AINetwork": "https://python.langchain.com/docs/integrations/toolkits/ainetwork"}, "StripeLoader": {"Stripe": "https://python.langchain.com/docs/integrations/document_loaders/stripe"}, "Bagel": {"BagelDB": "https://python.langchain.com/docs/integrations/vectorstores/bageldb"}, "BlackboardLoader": {"Blackboard": "https://python.langchain.com/docs/integrations/document_loaders/blackboard"}, "LanceDB": {"LanceDB": "https://python.langchain.com/docs/integrations/vectorstores/lancedb"}, "OneDriveLoader": {"Microsoft OneDrive": "https://python.langchain.com/docs/integrations/document_loaders/microsoft_onedrive"}, "AnalyticDB": {"AnalyticDB": "https://python.langchain.com/docs/integrations/vectorstores/analyticdb"}, "YoutubeLoader": {"YouTube": "https://python.langchain.com/docs/integrations/providers/youtube", "YouTube transcripts": "https://python.langchain.com/docs/integrations/document_loaders/youtube_transcript"}, "GoogleApiYoutubeLoader": {"YouTube": "https://python.langchain.com/docs/integrations/providers/youtube", "YouTube transcripts": "https://python.langchain.com/docs/integrations/document_loaders/youtube_transcript"}, "PromptLayerOpenAI": {"PromptLayer": "https://python.langchain.com/docs/integrations/providers/promptlayer", "PromptLayer OpenAI": "https://python.langchain.com/docs/integrations/llms/promptlayer_openai"}, "USearch": {"USearch": "https://python.langchain.com/docs/integrations/vectorstores/usearch"}, "WhyLabsCallbackHandler": {"WhyLabs": "https://python.langchain.com/docs/integrations/providers/whylabs_profiling"}, "FlyteCallbackHandler": {"Flyte": "https://python.langchain.com/docs/integrations/providers/flyte"}, "wandb_tracing_enabled": {"WandB Tracing": "https://python.langchain.com/docs/integrations/providers/wandb_tracing"}, "ManifestWrapper": {"Hazy Research": "https://python.langchain.com/docs/integrations/providers/hazy_research", "Manifest": "https://python.langchain.com/docs/integrations/llms/manifest"}, "Marqo": {"Marqo": "https://python.langchain.com/docs/integrations/vectorstores/marqo"}, "IMSDbLoader": {"IMSDb": "https://python.langchain.com/docs/integrations/document_loaders/imsdb"}, "PGVector": {"PGVector": "https://python.langchain.com/docs/integrations/vectorstores/pgvector"}, "DeepInfra": {"DeepInfra": "https://python.langchain.com/docs/integrations/llms/deepinfra"}, "ZeroShotAgent": {"Jina": "https://python.langchain.com/docs/integrations/providers/jina", "Bittensor": "https://python.langchain.com/docs/integrations/llms/bittensor", "BabyAGI with Tools": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi_with_agent", "Message Memory in Agent backed by a database": "https://python.langchain.com/docs/modules/memory/agent_with_memory_in_db", "Memory in Agent": "https://python.langchain.com/docs/modules/memory/agent_with_memory", "Custom MRKL agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_mrkl_agent", "Shared memory across agents and tools": "https://python.langchain.com/docs/modules/agents/how_to/sharedmemory_for_tools"}, "RedditPostsLoader": {"Reddit": "https://python.langchain.com/docs/integrations/document_loaders/reddit"}, "TrelloLoader": {"Trello": "https://python.langchain.com/docs/integrations/document_loaders/trello"}, "SKLearnVectorStore": {"scikit-learn": "https://python.langchain.com/docs/integrations/vectorstores/sklearn"}, "EverNoteLoader": {"EverNote": "https://python.langchain.com/docs/integrations/document_loaders/evernote"}, "TwitterTweetLoader": {"Twitter": "https://python.langchain.com/docs/integrations/document_loaders/twitter"}, "DiscordChatLoader": {"Discord": "https://python.langchain.com/docs/integrations/document_loaders/discord"}, "RedisCache": {"Redis": "https://python.langchain.com/docs/integrations/providers/redis", "LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching"}, "RedisSemanticCache": {"Redis": "https://python.langchain.com/docs/integrations/providers/redis", "LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching"}, "Redis": {"Redis": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/redis_self_query"}, "SelfQueryRetriever": {"Chroma": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/chroma_self_query", "Vectara": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/vectara_self_query", "Docugami": "https://python.langchain.com/docs/integrations/document_loaders/docugami", "Perform context-aware text splitting": "https://python.langchain.com/docs/use_cases/question_answering/how_to/document-context-aware-QA", "Milvus": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/milvus_self_query", "Weaviate": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/weaviate_self_query", "DashVector": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/dashvector", "Elasticsearch": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/elasticsearch_self_query", "Pinecone": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/pinecone", "Supabase": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/supabase_self_query", "Redis": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/redis_self_query", "MyScale": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/myscale_self_query", "Deep Lake": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/activeloop_deeplake_self_query", "Qdrant": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/qdrant_self_query"}, "MatchingEngine": {"Google Vertex AI MatchingEngine": "https://python.langchain.com/docs/integrations/vectorstores/matchingengine"}, "ClearMLCallbackHandler": {"ClearML": "https://python.langchain.com/docs/integrations/providers/clearml_tracking"}, "Cohere": {"Cohere": "https://python.langchain.com/docs/integrations/llms/cohere"}, "SlackDirectoryLoader": {"Slack": "https://python.langchain.com/docs/integrations/document_loaders/slack"}, "LLMContentHandler": {"SageMaker Endpoint": "https://python.langchain.com/docs/integrations/providers/sagemaker_endpoint", "SageMakerEndpoint": "https://python.langchain.com/docs/integrations/llms/sagemaker", "Amazon Comprehend Moderation Chain": "https://python.langchain.com/docs/guides/safety/amazon_comprehend_chain"}, "ContentHandlerBase": {"SageMaker Endpoint": "https://python.langchain.com/docs/integrations/providers/sagemaker_endpoint"}, "HNLoader": {"Hacker News": "https://python.langchain.com/docs/integrations/document_loaders/hacker_news"}, "Annoy": {"Annoy": "https://python.langchain.com/docs/integrations/vectorstores/annoy"}, "DashVector": {"DashVector": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/dashvector"}, "Cassandra": {"Cassandra": "https://python.langchain.com/docs/integrations/vectorstores/cassandra"}, "TencentVectorDB": {"TencentVectorDB": "https://python.langchain.com/docs/integrations/providers/tencentvectordb", "Tencent Cloud VectorDB": "https://python.langchain.com/docs/integrations/vectorstores/tencentvectordb"}, "Vearch": {"Vearch": "https://python.langchain.com/docs/integrations/providers/vearch"}, "GCSDirectoryLoader": {"Google Cloud Storage": "https://python.langchain.com/docs/integrations/providers/google_cloud_storage", "Google Cloud Storage Directory": "https://python.langchain.com/docs/integrations/document_loaders/google_cloud_storage_directory"}, "GCSFileLoader": {"Google Cloud Storage": "https://python.langchain.com/docs/integrations/providers/google_cloud_storage", "Google Cloud Storage File": "https://python.langchain.com/docs/integrations/document_loaders/google_cloud_storage_file"}, "ArthurCallbackHandler": {"Arthur": "https://python.langchain.com/docs/integrations/providers/arthur_tracking"}, "DuckDBLoader": {"DuckDB": "https://python.langchain.com/docs/integrations/document_loaders/duckdb"}, "Petals": {"Petals": "https://python.langchain.com/docs/integrations/llms/petals"}, "MomentoCache": {"Momento": "https://python.langchain.com/docs/integrations/providers/momento", "LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching"}, "NIBittensorLLM": {"NIBittensor": "https://python.langchain.com/docs/integrations/providers/bittensor", "Bittensor": "https://python.langchain.com/docs/integrations/llms/bittensor"}, "Neo4jVector": {"Neo4j": "https://python.langchain.com/docs/integrations/providers/neo4j", "Neo4j Vector Index": "https://python.langchain.com/docs/integrations/vectorstores/neo4jvector"}, "Neo4jGraph": {"Neo4j": "https://python.langchain.com/docs/integrations/providers/neo4j", "Diffbot Graph Transformer": "https://python.langchain.com/docs/use_cases/more/graph/diffbot_graphtransformer", "Neo4j DB QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_cypher_qa"}, "GraphCypherQAChain": {"Neo4j": "https://python.langchain.com/docs/integrations/providers/neo4j", "Memgraph QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_memgraph_qa", "Diffbot Graph Transformer": "https://python.langchain.com/docs/use_cases/more/graph/diffbot_graphtransformer", "Neo4j DB QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_cypher_qa"}, "AirtableLoader": {"Airtable": "https://python.langchain.com/docs/integrations/document_loaders/airtable"}, "TensorflowDatasetLoader": {"TensorFlow Datasets": "https://python.langchain.com/docs/integrations/document_loaders/tensorflow_datasets"}, "Clarifai": {"Clarifai": "https://python.langchain.com/docs/integrations/llms/clarifai"}, "BigQueryLoader": {"Google BigQuery": "https://python.langchain.com/docs/integrations/document_loaders/google_bigquery"}, "RoamLoader": {"Roam": "https://python.langchain.com/docs/integrations/document_loaders/roam"}, "Portkey": {"Log, Trace, and Monitor": "https://python.langchain.com/docs/integrations/providers/portkey/logging_tracing_portkey", "Portkey": "https://python.langchain.com/docs/integrations/providers/portkey/index"}, "Vectara": {"Vectara": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/vectara_self_query", "Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat", "Vectara Text Generation": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_text_generation"}, "VectaraRetriever": {"Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat"}, "load_qa_chain": {"Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat", "Amazon Textract ": "https://python.langchain.com/docs/integrations/document_loaders/pdf-amazonTextractPDFLoader", "SageMakerEndpoint": "https://python.langchain.com/docs/integrations/llms/sagemaker", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "Use local LLMs": "https://python.langchain.com/docs/use_cases/question_answering/how_to/local_retrieval_qa", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research", "Memory in the Multi-Input Chain": "https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs"}, "CONDENSE_QUESTION_PROMPT": {"Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat"}, "load_qa_with_sources_chain": {"Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times"}, "QA_PROMPT": {"Chat Over Documents with Vectara": "https://python.langchain.com/docs/integrations/providers/vectara/vectara_chat"}, "create_csv_agent": {"CSV": "https://python.langchain.com/docs/integrations/toolkits/csv"}, "create_xorbits_agent": {"Xorbits": "https://python.langchain.com/docs/integrations/toolkits/xorbits"}, "JiraToolkit": {"Jira": "https://python.langchain.com/docs/integrations/toolkits/jira"}, "JiraAPIWrapper": {"Jira": "https://python.langchain.com/docs/integrations/toolkits/jira"}, "create_spark_dataframe_agent": {"Spark Dataframe": "https://python.langchain.com/docs/integrations/toolkits/spark"}, "PyPDFLoader": {"Document Comparison": "https://python.langchain.com/docs/integrations/toolkits/document_comparison_toolkit", "Google Cloud Storage File": "https://python.langchain.com/docs/integrations/document_loaders/google_cloud_storage_file", "MergeDocLoader": "https://python.langchain.com/docs/integrations/document_loaders/merge_doc_loader", "QA using Activeloop's DeepLake": "https://python.langchain.com/docs/use_cases/question_answering/integrations/semantic-search-over-chat"}, "create_python_agent": {"Python": "https://python.langchain.com/docs/integrations/toolkits/python"}, "PythonREPLTool": {"Python": "https://python.langchain.com/docs/integrations/toolkits/python"}, "create_pbi_agent": {"PowerBI Dataset": "https://python.langchain.com/docs/integrations/toolkits/powerbi"}, "PowerBIToolkit": {"PowerBI Dataset": "https://python.langchain.com/docs/integrations/toolkits/powerbi"}, "PowerBIDataset": {"PowerBI Dataset": "https://python.langchain.com/docs/integrations/toolkits/powerbi"}, "AzureCognitiveServicesToolkit": {"Azure Cognitive Services": "https://python.langchain.com/docs/integrations/toolkits/azure_cognitive_services"}, "Requests": {"Natural Language APIs": "https://python.langchain.com/docs/integrations/toolkits/openapi_nla"}, "APIOperation": {"Natural Language APIs": "https://python.langchain.com/docs/integrations/toolkits/openapi_nla"}, "OpenAPISpec": {"Natural Language APIs": "https://python.langchain.com/docs/integrations/toolkits/openapi_nla"}, "NLAToolkit": {"Natural Language APIs": "https://python.langchain.com/docs/integrations/toolkits/openapi_nla", "Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval"}, "GmailToolkit": {"Gmail": "https://python.langchain.com/docs/integrations/toolkits/gmail"}, "build_resource_service": {"Gmail": "https://python.langchain.com/docs/integrations/toolkits/gmail"}, "get_gmail_credentials": {"Gmail": "https://python.langchain.com/docs/integrations/toolkits/gmail"}, "create_json_agent": {"JSON": "https://python.langchain.com/docs/integrations/toolkits/json"}, "JsonToolkit": {"JSON": "https://python.langchain.com/docs/integrations/toolkits/json"}, "JsonSpec": {"JSON": "https://python.langchain.com/docs/integrations/toolkits/json", "OpenAPI": "https://python.langchain.com/docs/integrations/toolkits/openapi"}, "AirbyteStripeLoader": {"Airbyte Question Answering": "https://python.langchain.com/docs/integrations/toolkits/airbyte_structured_qa", "Airbyte Stripe": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_stripe"}, "create_pandas_dataframe_agent": {"Airbyte Question Answering": "https://python.langchain.com/docs/integrations/toolkits/airbyte_structured_qa", "Pandas Dataframe": "https://python.langchain.com/docs/integrations/toolkits/pandas", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times"}, "GitHubToolkit": {"Github": "https://python.langchain.com/docs/integrations/toolkits/github"}, "GitHubAPIWrapper": {"Github": "https://python.langchain.com/docs/integrations/toolkits/github"}, "GitHubAction": {"Github": "https://python.langchain.com/docs/integrations/toolkits/github"}, "create_spark_sql_agent": {"Spark SQL": "https://python.langchain.com/docs/integrations/toolkits/spark_sql"}, "SparkSQLToolkit": {"Spark SQL": "https://python.langchain.com/docs/integrations/toolkits/spark_sql"}, "SparkSQL": {"Spark SQL": "https://python.langchain.com/docs/integrations/toolkits/spark_sql"}, "create_sync_playwright_browser": {"PlayWright Browser": "https://python.langchain.com/docs/integrations/toolkits/playwright"}, "O365Toolkit": {"Office365": "https://python.langchain.com/docs/integrations/toolkits/office365"}, "MultionToolkit": {"MultiOn": "https://python.langchain.com/docs/integrations/toolkits/multion"}, "AmadeusToolkit": {"Amadeus": "https://python.langchain.com/docs/integrations/toolkits/amadeus"}, "create_vectorstore_agent": {"Vectorstore": "https://python.langchain.com/docs/integrations/toolkits/vectorstore"}, "VectorStoreToolkit": {"Vectorstore": "https://python.langchain.com/docs/integrations/toolkits/vectorstore"}, "VectorStoreInfo": {"Vectorstore": "https://python.langchain.com/docs/integrations/toolkits/vectorstore"}, "create_vectorstore_router_agent": {"Vectorstore": "https://python.langchain.com/docs/integrations/toolkits/vectorstore"}, "VectorStoreRouterToolkit": {"Vectorstore": "https://python.langchain.com/docs/integrations/toolkits/vectorstore"}, "reduce_openapi_spec": {"OpenAPI": "https://python.langchain.com/docs/integrations/toolkits/openapi"}, "RequestsWrapper": {"OpenAPI": "https://python.langchain.com/docs/integrations/toolkits/openapi"}, "create_openapi_agent": {"OpenAPI": "https://python.langchain.com/docs/integrations/toolkits/openapi"}, "OpenAPIToolkit": {"OpenAPI": "https://python.langchain.com/docs/integrations/toolkits/openapi"}, "GitLabToolkit": {"Gitlab": "https://python.langchain.com/docs/integrations/toolkits/gitlab"}, "GitLabAPIWrapper": {"Gitlab": "https://python.langchain.com/docs/integrations/toolkits/gitlab"}, "SQLiteVSS": {"sqlite-vss": "https://python.langchain.com/docs/integrations/vectorstores/sqlitevss"}, "RetrievalQAWithSourcesChain": {"Weaviate": "https://python.langchain.com/docs/integrations/vectorstores/weaviate", "Neo4j Vector Index": "https://python.langchain.com/docs/integrations/vectorstores/neo4jvector", "Marqo": "https://python.langchain.com/docs/integrations/vectorstores/marqo", "Psychic": "https://python.langchain.com/docs/integrations/document_loaders/psychic", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/web_scraping", "Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "Vector SQL Retriever with MyScale": "https://python.langchain.com/docs/use_cases/qa_structured/integrations/myscale_vector_sql", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research"}, "google_palm": {"ScaNN": "https://python.langchain.com/docs/integrations/vectorstores/scann"}, "NucliaDB": {"NucliaDB": "https://python.langchain.com/docs/integrations/vectorstores/nucliadb"}, "AttributeInfo": {"Vectara": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/vectara_self_query", "Docugami": "https://python.langchain.com/docs/integrations/document_loaders/docugami", "Perform context-aware text splitting": "https://python.langchain.com/docs/use_cases/question_answering/how_to/document-context-aware-QA", "Milvus": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/milvus_self_query", "Weaviate": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/weaviate_self_query", "DashVector": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/dashvector", "Elasticsearch": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/elasticsearch_self_query", "Chroma": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/chroma_self_query", "Pinecone": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/pinecone", "Supabase": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/supabase_self_query", "Redis": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/redis_self_query", "MyScale": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/myscale_self_query", "Deep Lake": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/activeloop_deeplake_self_query", "Qdrant": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/qdrant_self_query"}, "RedisText": {"Redis": "https://python.langchain.com/docs/integrations/vectorstores/redis"}, "RedisNum": {"Redis": "https://python.langchain.com/docs/integrations/vectorstores/redis"}, "RedisTag": {"Redis": "https://python.langchain.com/docs/integrations/vectorstores/redis"}, "RedisFilter": {"Redis": "https://python.langchain.com/docs/integrations/vectorstores/redis"}, "InMemoryDocstore": {"Annoy": "https://python.langchain.com/docs/integrations/vectorstores/annoy", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "AutoGPT": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/autogpt", "BabyAGI User Guide": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi", "BabyAGI with Tools": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi_with_agent", "!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times", "Generative Agents in LangChain": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/characters"}, "AtlasDB": {"Atlas": "https://python.langchain.com/docs/integrations/vectorstores/atlas"}, "OpenAIChat": {"Activeloop Deep Lake": "https://python.langchain.com/docs/integrations/vectorstores/activeloop_deeplake"}, "AlibabaCloudOpenSearch": {"Alibaba Cloud OpenSearch": "https://python.langchain.com/docs/integrations/vectorstores/alibabacloud_opensearch"}, "AlibabaCloudOpenSearchSettings": {"Alibaba Cloud OpenSearch": "https://python.langchain.com/docs/integrations/vectorstores/alibabacloud_opensearch"}, "BESVectorStore":{"Baidu Cloud VectorSearch": "https://python.langchain.com/docs/integrations/vectorstores/baiducloud_vector_search"}, "StarRocksSettings": {"StarRocks": "https://python.langchain.com/docs/integrations/vectorstores/starrocks"}, "TokenTextSplitter": {"StarRocks": "https://python.langchain.com/docs/integrations/vectorstores/starrocks", "Split by tokens ": "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/split_by_token"}, "DirectoryLoader": {"StarRocks": "https://python.langchain.com/docs/integrations/vectorstores/starrocks"}, "UnstructuredMarkdownLoader": {"StarRocks": "https://python.langchain.com/docs/integrations/vectorstores/starrocks"}, "ConnectionParams": {"Tencent Cloud VectorDB": "https://python.langchain.com/docs/integrations/vectorstores/tencentvectordb"}, "DocArrayHnswSearch": {"DocArray HnswSearch": "https://python.langchain.com/docs/integrations/vectorstores/docarray_hnsw"}, "MyScaleSettings": {"MyScale": "https://python.langchain.com/docs/integrations/vectorstores/myscale"}, "AzureSearch": {"Azure Cognitive Search": "https://python.langchain.com/docs/integrations/vectorstores/azuresearch"}, "ElasticVectorSearch": {"Elasticsearch": "https://python.langchain.com/docs/integrations/vectorstores/elasticsearch", "Memory in the Multi-Input Chain": "https://python.langchain.com/docs/modules/memory/adding_memory_chain_multiple_inputs"}, "DocArrayInMemorySearch": {"DocArray InMemorySearch": "https://python.langchain.com/docs/integrations/vectorstores/docarray_in_memory"}, "ZepVectorStore": {"Zep": "https://python.langchain.com/docs/integrations/vectorstores/zep"}, "CollectionConfig": {"Zep": "https://python.langchain.com/docs/integrations/vectorstores/zep"}, "AsyncChromiumLoader": {"Beautiful Soup": "https://python.langchain.com/docs/integrations/document_transformers/beautiful_soup", "Async Chromium": "https://python.langchain.com/docs/integrations/document_loaders/async_chromium", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/web_scraping"}, "BeautifulSoupTransformer": {"Beautiful Soup": "https://python.langchain.com/docs/integrations/document_transformers/beautiful_soup", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/web_scraping"}, "NucliaTextTransformer": {"Nuclia Understanding API document transformer": "https://python.langchain.com/docs/integrations/document_transformers/nuclia_transformer"}, "create_metadata_tagger": {"OpenAI Functions Metadata Tagger": "https://python.langchain.com/docs/integrations/document_transformers/openai_metadata_tagger"}, "AsyncHtmlLoader": {"html2text": "https://python.langchain.com/docs/integrations/document_transformers/html2text", "AsyncHtmlLoader": "https://python.langchain.com/docs/integrations/document_loaders/async_html", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/web_scraping"}, "Html2TextTransformer": {"html2text": "https://python.langchain.com/docs/integrations/document_transformers/html2text", "Async Chromium": "https://python.langchain.com/docs/integrations/document_loaders/async_chromium", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/web_scraping"}, "DoctranPropertyExtractor": {"Doctran Extract Properties": "https://python.langchain.com/docs/integrations/document_transformers/doctran_extract_properties"}, "DoctranQATransformer": {"Doctran Interrogate Documents": "https://python.langchain.com/docs/integrations/document_transformers/doctran_interrogate_document"}, "Blob": {"docai.md": "https://python.langchain.com/docs/integrations/document_transformers/docai", "Embaas": "https://python.langchain.com/docs/integrations/document_loaders/embaas"}, "DocAIParser": {"docai.md": "https://python.langchain.com/docs/integrations/document_transformers/docai"}, "DoctranTextTranslator": {"Doctran Translate Documents": "https://python.langchain.com/docs/integrations/document_transformers/doctran_translate_document"}, "SnowflakeLoader": {"Snowflake": "https://python.langchain.com/docs/integrations/document_loaders/snowflake"}, "AcreomLoader": {"acreom": "https://python.langchain.com/docs/integrations/document_loaders/acreom"}, "ArcGISLoader": {"ArcGIS": "https://python.langchain.com/docs/integrations/document_loaders/arcgis"}, "UnstructuredCSVLoader": {"CSV": "https://python.langchain.com/docs/integrations/document_loaders/csv"}, "XorbitsLoader": {"Xorbits Pandas DataFrame": "https://python.langchain.com/docs/integrations/document_loaders/xorbits"}, "UnstructuredEmailLoader": {"Email": "https://python.langchain.com/docs/integrations/document_loaders/email"}, "OutlookMessageLoader": {"Email": "https://python.langchain.com/docs/integrations/document_loaders/email"}, "AssemblyAIAudioTranscriptLoader": {"AssemblyAI Audio Transcripts": "https://python.langchain.com/docs/integrations/document_loaders/assemblyai"}, "TranscriptFormat": {"AssemblyAI Audio Transcripts": "https://python.langchain.com/docs/integrations/document_loaders/assemblyai"}, "BlockchainDocumentLoader": {"Blockchain": "https://python.langchain.com/docs/integrations/document_loaders/blockchain"}, "BlockchainType": {"Blockchain": "https://python.langchain.com/docs/integrations/document_loaders/blockchain"}, "RecursiveUrlLoader": {"Recursive URL Loader": "https://python.langchain.com/docs/integrations/document_loaders/recursive_url_loader"}, "JoplinLoader": {"Joplin": "https://python.langchain.com/docs/integrations/document_loaders/joplin"}, "AirbyteSalesforceLoader": {"Airbyte Salesforce": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_salesforce"}, "EtherscanLoader": {"Etherscan Loader": "https://python.langchain.com/docs/integrations/document_loaders/Etherscan"}, "AirbyteCDKLoader": {"Airbyte CDK": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_cdk"}, "Docx2txtLoader": {"Microsoft Word": "https://python.langchain.com/docs/integrations/document_loaders/microsoft_word"}, "OpenAIWhisperParser": {"Loading documents from a YouTube url": "https://python.langchain.com/docs/integrations/document_loaders/youtube_audio"}, "YoutubeAudioLoader": {"Loading documents from a YouTube url": "https://python.langchain.com/docs/integrations/document_loaders/youtube_audio"}, "UnstructuredURLLoader": {"URL": "https://python.langchain.com/docs/integrations/document_loaders/url"}, "SeleniumURLLoader": {"URL": "https://python.langchain.com/docs/integrations/document_loaders/url"}, "PlaywrightURLLoader": {"URL": "https://python.langchain.com/docs/integrations/document_loaders/url"}, "OpenCityDataLoader": {"Geopandas": "https://python.langchain.com/docs/integrations/document_loaders/geopandas", "Open City Data": "https://python.langchain.com/docs/integrations/document_loaders/open_city_data"}, "GeoDataFrameLoader": {"Geopandas": "https://python.langchain.com/docs/integrations/document_loaders/geopandas"}, "OBSFileLoader": {"Huawei OBS File": "https://python.langchain.com/docs/integrations/document_loaders/huawei_obs_file"}, "HuggingFaceDatasetLoader": {"HuggingFace dataset": "https://python.langchain.com/docs/integrations/document_loaders/hugging_face_dataset"}, "DropboxLoader": {"Dropbox": "https://python.langchain.com/docs/integrations/document_loaders/dropbox"}, "AirbyteTypeformLoader": {"Airbyte Typeform": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_typeform"}, "MHTMLLoader": {"mhtml": "https://python.langchain.com/docs/integrations/document_loaders/mhtml"}, "NewsURLLoader": {"News URL": "https://python.langchain.com/docs/integrations/document_loaders/news"}, "ImageCaptionLoader": {"Image captions": "https://python.langchain.com/docs/integrations/document_loaders/image_captions"}, "UnstructuredRSTLoader": {"RST": "https://python.langchain.com/docs/integrations/document_loaders/rst"}, "ConversationBufferWindowMemory": {"Figma": "https://python.langchain.com/docs/integrations/document_loaders/figma", "OpaquePrompts": "https://python.langchain.com/docs/integrations/llms/opaqueprompts", "Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Meta-Prompt": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/meta_prompt", "Create ChatGPT clone": "https://python.langchain.com/docs/modules/agents/how_to/chatgpt_clone"}, "UnstructuredImageLoader": {"Images": "https://python.langchain.com/docs/integrations/document_loaders/image"}, "NucliaLoader": {"Nuclia Understanding API document loader": "https://python.langchain.com/docs/integrations/document_loaders/nuclia"}, "TencentCOSFileLoader": {"Tencent COS File": "https://python.langchain.com/docs/integrations/document_loaders/tencent_cos_file"}, "TomlLoader": {"TOML": "https://python.langchain.com/docs/integrations/document_loaders/toml"}, "UnstructuredAPIFileLoader": {"Unstructured File": "https://python.langchain.com/docs/integrations/document_loaders/unstructured_file"}, "PsychicLoader": {"Psychic": "https://python.langchain.com/docs/integrations/document_loaders/psychic"}, "TencentCOSDirectoryLoader": {"Tencent COS Directory": "https://python.langchain.com/docs/integrations/document_loaders/tencent_cos_directory"}, "GitHubIssuesLoader": {"GitHub": "https://python.langchain.com/docs/integrations/document_loaders/github"}, "UnstructuredOrgModeLoader": {"Org-mode": "https://python.langchain.com/docs/integrations/document_loaders/org_mode"}, "LarkSuiteDocLoader": {"LarkSuite (FeiShu)": "https://python.langchain.com/docs/integrations/document_loaders/larksuite"}, "load_summarize_chain": {"LarkSuite (FeiShu)": "https://python.langchain.com/docs/integrations/document_loaders/larksuite", "LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/summarization"}, "IuguLoader": {"Iugu": "https://python.langchain.com/docs/integrations/document_loaders/iugu"}, "SharePointLoader": {"Microsoft SharePoint": "https://python.langchain.com/docs/integrations/document_loaders/microsoft_sharepoint"}, "UnstructuredEPubLoader": {"EPub ": "https://python.langchain.com/docs/integrations/document_loaders/epub"}, "UnstructuredFileIOLoader": {"Google Drive": "https://python.langchain.com/docs/integrations/document_loaders/google_drive"}, "BrowserlessLoader": {"Browserless": "https://python.langchain.com/docs/integrations/document_loaders/browserless"}, "BibtexLoader": {"BibTeX": "https://python.langchain.com/docs/integrations/document_loaders/bibtex"}, "AirbyteHubspotLoader": {"Airbyte Hubspot": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_hubspot"}, "AirbyteGongLoader": {"Airbyte Gong": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_gong"}, "ReadTheDocsLoader": {"ReadTheDocs Documentation": "https://python.langchain.com/docs/integrations/document_loaders/readthedocs_documentation"}, "PolarsDataFrameLoader": {"Polars DataFrame": "https://python.langchain.com/docs/integrations/document_loaders/polars_dataframe"}, "DataFrameLoader": {"Pandas DataFrame": "https://python.langchain.com/docs/integrations/document_loaders/pandas_dataframe"}, "GoogleApiClient": {"YouTube transcripts": "https://python.langchain.com/docs/integrations/document_loaders/youtube_transcript"}, "ConcurrentLoader": {"Concurrent Loader": "https://python.langchain.com/docs/integrations/document_loaders/concurrent"}, "RSSFeedLoader": {"RSS Feeds": "https://python.langchain.com/docs/integrations/document_loaders/rss"}, "NotebookLoader": {"Jupyter Notebook": "https://python.langchain.com/docs/integrations/document_loaders/jupyter_notebook", "Notebook": "https://python.langchain.com/docs/integrations/document_loaders/example_data/notebook"}, "UnstructuredTSVLoader": {"TSV": "https://python.langchain.com/docs/integrations/document_loaders/tsv"}, "UnstructuredODTLoader": {"Open Document Format (ODT)": "https://python.langchain.com/docs/integrations/document_loaders/odt"}, "EmbaasBlobLoader": {"Embaas": "https://python.langchain.com/docs/integrations/document_loaders/embaas"}, "EmbaasLoader": {"Embaas": "https://python.langchain.com/docs/integrations/document_loaders/embaas"}, "UnstructuredXMLLoader": {"XML": "https://python.langchain.com/docs/integrations/document_loaders/xml"}, "MaxComputeLoader": {"Alibaba Cloud MaxCompute": "https://python.langchain.com/docs/integrations/document_loaders/alibaba_cloud_maxcompute"}, "CubeSemanticLoader": {"Cube Semantic Layer": "https://python.langchain.com/docs/integrations/document_loaders/cube_semantic"}, "UnstructuredExcelLoader": {"Microsoft Excel": "https://python.langchain.com/docs/integrations/document_loaders/excel"}, "AmazonTextractPDFLoader": {"Amazon Textract ": "https://python.langchain.com/docs/integrations/document_loaders/pdf-amazonTextractPDFLoader"}, "Language": {"Source Code": "https://python.langchain.com/docs/integrations/document_loaders/source_code", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding"}, "LanguageParser": {"Source Code": "https://python.langchain.com/docs/integrations/document_loaders/source_code", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding"}, "SRTLoader": {"Subtitle": "https://python.langchain.com/docs/integrations/document_loaders/subtitle"}, "MastodonTootsLoader": {"Mastodon": "https://python.langchain.com/docs/integrations/document_loaders/mastodon"}, "AirbyteShopifyLoader": {"Airbyte Shopify": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_shopify"}, "MergedDataLoader": {"MergeDocLoader": "https://python.langchain.com/docs/integrations/document_loaders/merge_doc_loader"}, "PySparkDataFrameLoader": {"PySpark DataFrame Loader": "https://python.langchain.com/docs/integrations/document_loaders/pyspark_dataframe"}, "AirbyteZendeskSupportLoader": {"Airbyte Zendesk Support": "https://python.langchain.com/docs/integrations/document_loaders/airbyte_zendesk_support"}, "CoNLLULoader": {"CoNLL-U": "https://python.langchain.com/docs/integrations/document_loaders/conll-u"}, "OBSDirectoryLoader": {"Huawei OBS Directory": "https://python.langchain.com/docs/integrations/document_loaders/huawei_obs_directory"}, "FaunaLoader": {"Fauna": "https://python.langchain.com/docs/integrations/document_loaders/fauna"}, "SitemapLoader": {"Sitemap": "https://python.langchain.com/docs/integrations/document_loaders/sitemap"}, "DocumentIntelligenceLoader": {"Azure Document Intelligence": "https://python.langchain.com/docs/integrations/document_loaders/azure_document_intelligence"}, "StochasticAI": {"StochasticAI": "https://python.langchain.com/docs/integrations/llms/stochasticai"}, "FireworksChat": {"Fireworks": "https://python.langchain.com/docs/integrations/llms/fireworks"}, "OctoAIEndpoint": {"OctoAI": "https://python.langchain.com/docs/integrations/llms/octoai"}, "Writer": {"Writer": "https://python.langchain.com/docs/integrations/llms/writer"}, "TextGen": {"TextGen": "https://python.langchain.com/docs/integrations/llms/textgen"}, "ForefrontAI": {"ForefrontAI": "https://python.langchain.com/docs/integrations/llms/forefrontai"}, "MosaicML": {"MosaicML": "https://python.langchain.com/docs/integrations/llms/mosaicml"}, "KoboldApiLLM": {"KoboldAI API": "https://python.langchain.com/docs/integrations/llms/koboldai"}, "CerebriumAI": {"CerebriumAI": "https://python.langchain.com/docs/integrations/llms/cerebriumai"}, "VertexAI": {"Google Vertex AI PaLM ": "https://python.langchain.com/docs/integrations/llms/google_vertex_ai_palm"}, "VertexAIModelGarden": {"Google Vertex AI PaLM ": "https://python.langchain.com/docs/integrations/llms/google_vertex_ai_palm"}, "Ollama": {"Ollama": "https://python.langchain.com/docs/integrations/llms/ollama", "Run LLMs locally": "https://python.langchain.com/docs/guides/local_llms"}, "OpaquePrompts": {"OpaquePrompts": "https://python.langchain.com/docs/integrations/llms/opaqueprompts"}, "RunnableMap": {"OpaquePrompts": "https://python.langchain.com/docs/integrations/llms/opaqueprompts", "interface.md": "https://python.langchain.com/docs/expression_language/interface", "First we add a step to load memory": "https://python.langchain.com/docs/expression_language/cookbook/retrieval", "sql_db.md": "https://python.langchain.com/docs/expression_language/cookbook/sql_db", "prompt_llm_parser.md": "https://python.langchain.com/docs/expression_language/cookbook/prompt_llm_parser", "Adding memory": "https://python.langchain.com/docs/expression_language/cookbook/memory", "multiple_chains.md": "https://python.langchain.com/docs/expression_language/cookbook/multiple_chains"}, "TitanTakeoff": {"Titan Takeoff": "https://python.langchain.com/docs/integrations/llms/titan_takeoff"}, "Databricks": {"Databricks": "https://python.langchain.com/docs/integrations/llms/databricks"}, "QianfanLLMEndpoint": {"Baidu Qianfan": "https://python.langchain.com/docs/integrations/llms/baidu_qianfan_endpoint"}, "VLLM": {"vLLM": "https://python.langchain.com/docs/integrations/llms/vllm"}, "VLLMOpenAI": {"vLLM": "https://python.langchain.com/docs/integrations/llms/vllm"}, "AzureMLOnlineEndpoint": {"Azure ML": "https://python.langchain.com/docs/integrations/llms/azure_ml"}, "ContentFormatterBase": {"Azure ML": "https://python.langchain.com/docs/integrations/llms/azure_ml"}, "DollyContentFormatter": {"Azure ML": "https://python.langchain.com/docs/integrations/llms/azure_ml"}, "load_llm": {"Azure ML": "https://python.langchain.com/docs/integrations/llms/azure_ml", "Serialization": "https://python.langchain.com/docs/modules/model_io/models/llms/llm_serialization"}, "AzureMLEndpointClient": {"Azure ML": "https://python.langchain.com/docs/integrations/llms/azure_ml"}, "MapReduceChain": {"Manifest": "https://python.langchain.com/docs/integrations/llms/manifest", "LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/summarization"}, "ModelLaboratory": {"Manifest": "https://python.langchain.com/docs/integrations/llms/manifest", "Model comparison": "https://python.langchain.com/docs/guides/model_laboratory"}, "Tongyi": {"Tongyi Qwen": "https://python.langchain.com/docs/integrations/llms/tongyi", "DashVector": "https://python.langchain.com/docs/modules/data_connection/retrievers/self_query/dashvector"}, "InMemoryCache": {"LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching"}, "SQLiteCache": {"LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching"}, "GPTCache": {"LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching"}, "SQLAlchemyCache": {"LLM Caching integrations": "https://python.langchain.com/docs/integrations/llms/llm_caching"}, "GooseAI": {"GooseAI": "https://python.langchain.com/docs/integrations/llms/gooseai"}, "OpenLM": {"OpenLM": "https://python.langchain.com/docs/integrations/llms/openlm"}, "CTranslate2": {"CTranslate2": "https://python.langchain.com/docs/integrations/llms/ctranslate2"}, "HuggingFaceTextGenInference": {"Huggingface TextGen Inference": "https://python.langchain.com/docs/integrations/llms/huggingface_textgen_inference"}, "ChatGLM": {"ChatGLM": "https://python.langchain.com/docs/integrations/llms/chatglm"}, "Replicate": {"Replicate": "https://python.langchain.com/docs/integrations/llms/replicate"}, "DatetimeOutputParser": {"Fallbacks": "https://python.langchain.com/docs/guides/fallbacks", "Datetime parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/datetime"}, "ConditionalPromptSelector": {"Run LLMs locally": "https://python.langchain.com/docs/guides/local_llms"}, "tracing_v2_enabled": {"LangSmith Walkthrough": "https://python.langchain.com/docs/guides/langsmith/walkthrough"}, "wait_for_all_tracers": {"LangSmith Walkthrough": "https://python.langchain.com/docs/guides/langsmith/walkthrough"}, "EvaluatorType": {"LangSmith Walkthrough": "https://python.langchain.com/docs/guides/langsmith/walkthrough", "Criteria Evaluation": "https://python.langchain.com/docs/guides/evaluation/string/criteria_eval_chain"}, "RunEvalConfig": {"LangSmith Walkthrough": "https://python.langchain.com/docs/guides/langsmith/walkthrough"}, "arun_on_dataset": {"LangSmith Walkthrough": "https://python.langchain.com/docs/guides/langsmith/walkthrough"}, "run_on_dataset": {"LangSmith Walkthrough": "https://python.langchain.com/docs/guides/langsmith/walkthrough"}, "load_chain": {"Hugging Face Prompt Injection Identification": "https://python.langchain.com/docs/guides/safety/hugging_face_prompt_injection", "Serialization": "https://python.langchain.com/docs/modules/chains/how_to/serialization", "Loading from LangChainHub": "https://python.langchain.com/docs/modules/chains/how_to/from_hub"}, "FakeListLLM": {"Amazon Comprehend Moderation Chain": "https://python.langchain.com/docs/guides/safety/amazon_comprehend_chain", "Fake LLM": "https://python.langchain.com/docs/modules/model_io/models/llms/fake_llm"}, "load_prompt": {"Amazon Comprehend Moderation Chain": "https://python.langchain.com/docs/guides/safety/amazon_comprehend_chain", "Serialization": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/prompt_serialization"}, "openai": {"OpenAI Adapter": "https://python.langchain.com/docs/guides/adapters/openai"}, "load_evaluator": {"Comparing Chain Outputs": "https://python.langchain.com/docs/guides/evaluation/examples/comparisons", "Agent Trajectory": "https://python.langchain.com/docs/guides/evaluation/trajectory/trajectory_eval", "Pairwise Embedding Distance ": "https://python.langchain.com/docs/guides/evaluation/comparison/pairwise_embedding_distance", "Pairwise String Comparison": "https://python.langchain.com/docs/guides/evaluation/comparison/pairwise_string", "Criteria Evaluation": "https://python.langchain.com/docs/guides/evaluation/string/criteria_eval_chain", "String Distance": "https://python.langchain.com/docs/guides/evaluation/string/string_distance", "Embedding Distance": "https://python.langchain.com/docs/guides/evaluation/string/embedding_distance"}, "load_dataset": {"Comparing Chain Outputs": "https://python.langchain.com/docs/guides/evaluation/examples/comparisons"}, "AgentAction": {"Custom Trajectory Evaluator": "https://python.langchain.com/docs/guides/evaluation/trajectory/custom", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Wikibase Agent": "https://python.langchain.com/docs/use_cases/more/agents/agents/wikibase_agent", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval", "Multiple callback handlers": "https://python.langchain.com/docs/modules/callbacks/multiple_callbacks", "Custom multi-action agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_multi_action_agent", "Custom agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent", "Custom agent with tool retrieval": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent_with_tool_retrieval"}, "AgentTrajectoryEvaluator": {"Custom Trajectory Evaluator": "https://python.langchain.com/docs/guides/evaluation/trajectory/custom"}, "EmbeddingDistance": {"Pairwise Embedding Distance ": "https://python.langchain.com/docs/guides/evaluation/comparison/pairwise_embedding_distance", "Embedding Distance": "https://python.langchain.com/docs/guides/evaluation/string/embedding_distance"}, "PairwiseStringEvaluator": {"Custom Pairwise Evaluator": "https://python.langchain.com/docs/guides/evaluation/comparison/custom"}, "Criteria": {"Criteria Evaluation": "https://python.langchain.com/docs/guides/evaluation/string/criteria_eval_chain"}, "StringEvaluator": {"Custom String Evaluator": "https://python.langchain.com/docs/guides/evaluation/string/custom"}, "StringDistance": {"String Distance": "https://python.langchain.com/docs/guides/evaluation/string/string_distance"}, "WebResearchRetriever": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/web_scraping", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research"}, "ConversationSummaryMemory": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/code_understanding", "Multiple Memory classes": "https://python.langchain.com/docs/modules/memory/multiple_memory"}, "ConversationSummaryBufferMemory": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Conversation Summary Buffer": "https://python.langchain.com/docs/modules/memory/types/summary_buffer"}, "MessagesPlaceholder": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/chatbots", "Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents", "Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "Memory in LLMChain": "https://python.langchain.com/docs/modules/memory/adding_memory", "Add Memory to OpenAI Functions Agent": "https://python.langchain.com/docs/modules/agents/how_to/add_memory_openai_functions", "Types of `MessagePromptTemplate`": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/msg_prompt_templates", "Adding memory": "https://python.langchain.com/docs/expression_language/cookbook/memory"}, "StuffDocumentsChain": {"Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/summarization", "Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa", "Lost in the middle: The problem with long contexts": "https://python.langchain.com/docs/modules/data_connection/document_transformers/post_retrieval/long_context_reorder"}, "ReduceDocumentsChain": {"Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/summarization"}, "MapReduceDocumentsChain": {"Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/summarization"}, "create_extraction_chain_pydantic": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/extraction"}, "PydanticOutputParser": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/extraction", "MultiQueryRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/MultiQueryRetriever", "WebResearchRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/web_research", "Retry parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/retry", "Pydantic (JSON) parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/pydantic"}, "get_openapi_chain": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/apis"}, "APIChain": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/apis"}, "open_meteo_docs": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/apis"}, "tmdb_docs": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/apis"}, "podcast_docs": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/apis"}, "LLMRequestsChain": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/apis"}, "create_tagging_chain_pydantic": {"Set env var OPENAI_API_KEY or load from a .env file:": "https://python.langchain.com/docs/use_cases/tagging"}, "MultiQueryRetriever": {"Question Answering": "https://python.langchain.com/docs/use_cases/question_answering/question_answering", "MultiQueryRetriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/MultiQueryRetriever"}, "MarkdownHeaderTextSplitter": {"Perform context-aware text splitting": "https://python.langchain.com/docs/use_cases/question_answering/how_to/document-context-aware-QA", "MarkdownHeaderTextSplitter": "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/markdown_header_metadata"}, "create_conversational_retrieval_agent": {"Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents"}, "AgentTokenBufferMemory": {"Conversational Retrieval Agent": "https://python.langchain.com/docs/use_cases/question_answering/how_to/conversational_retrieval_agents"}, "create_sql_query_chain": {"Multiple Retrieval Sources": "https://python.langchain.com/docs/use_cases/question_answering/how_to/multiple_retrieval", "Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql"}, "create_citation_fuzzy_match_chain": {"Cite sources": "https://python.langchain.com/docs/use_cases/question_answering/how_to/qa_citations"}, "BaseRetriever": {"Retrieve as you generate with FLARE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/flare"}, "AsyncCallbackManagerForRetrieverRun": {"Retrieve as you generate with FLARE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/flare"}, "CallbackManagerForRetrieverRun": {"Retrieve as you generate with FLARE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/flare"}, "FlareChain": {"Retrieve as you generate with FLARE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/flare"}, "HypotheticalDocumentEmbedder": {"Improve document indexing with HyDE": "https://python.langchain.com/docs/use_cases/question_answering/how_to/hyde"}, "create_qa_with_sources_chain": {"Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa"}, "create_qa_with_structure_chain": {"Structure answers with OpenAI functions": "https://python.langchain.com/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa"}, "NeptuneGraph": {"Neptune Open Cypher QA Chain": "https://python.langchain.com/docs/use_cases/more/graph/neptune_cypher_qa"}, "NeptuneOpenCypherQAChain": {"Neptune Open Cypher QA Chain": "https://python.langchain.com/docs/use_cases/more/graph/neptune_cypher_qa"}, "NebulaGraphQAChain": {"NebulaGraphQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_nebula_qa"}, "NebulaGraph": {"NebulaGraphQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_nebula_qa"}, "MemgraphGraph": {"Memgraph QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_memgraph_qa"}, "KuzuGraph": {"KuzuQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_kuzu_qa"}, "KuzuQAChain": {"KuzuQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_kuzu_qa"}, "HugeGraphQAChain": {"HugeGraph QA Chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_hugegraph_qa"}, "HugeGraph": {"HugeGraph QA Chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_hugegraph_qa"}, "GraphSparqlQAChain": {"GraphSparqlQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_sparql_qa"}, "RdfGraph": {"GraphSparqlQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_sparql_qa"}, "ArangoGraph": {"ArangoDB QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_arangodb_qa"}, "ArangoGraphQAChain": {"ArangoDB QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_arangodb_qa"}, "OntotextGraphDBGraph": {"Ontotext GraphDB QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_ontotext_graphdb_qa"}, "OntotextGraphDBQAChain": {"Ontotext GraphDB QA chain": "https://python.langchain.com/docs/use_cases/more/graph/graph_ontotext_graphdb_qa"},"GraphIndexCreator": {"Graph QA": "https://python.langchain.com/docs/use_cases/more/graph/graph_qa"}, "GraphQAChain": {"Graph QA": "https://python.langchain.com/docs/use_cases/more/graph/graph_qa"}, "NetworkxEntityGraph": {"Graph QA": "https://python.langchain.com/docs/use_cases/more/graph/graph_qa"}, "FalkorDBGraph": {"FalkorDBQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_falkordb_qa"}, "FalkorDBQAChain": {"FalkorDBQAChain": "https://python.langchain.com/docs/use_cases/more/graph/graph_falkordb_qa"}, "AgentFinish": {"Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Wikibase Agent": "https://python.langchain.com/docs/use_cases/more/agents/agents/wikibase_agent", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval", "Custom multi-action agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_multi_action_agent", "Running Agent as an Iterator": "https://python.langchain.com/docs/modules/agents/how_to/agent_iter", "Custom agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent", "Custom agent with tool retrieval": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent_with_tool_retrieval"}, "BaseSingleActionAgent": {"Agents": "https://python.langchain.com/docs/use_cases/more/agents/agents", "Custom agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent"}, "FileChatMessageHistory": {"AutoGPT": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/autogpt"}, "BaseLLM": {"BabyAGI User Guide": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi", "BabyAGI with Tools": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi_with_agent", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context"}, "VectorStore": {"BabyAGI User Guide": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi", "BabyAGI with Tools": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi_with_agent"}, "Chain": {"BabyAGI User Guide": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi", "BabyAGI with Tools": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/baby_agi_with_agent", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Custom chain": "https://python.langchain.com/docs/modules/chains/how_to/custom_chain"}, "BaseTool": {"!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times", "Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools", "Combine agents and vector stores": "https://python.langchain.com/docs/modules/agents/how_to/agent_vectorstore", "Custom functions with OpenAI Functions Agent": "https://python.langchain.com/docs/modules/agents/how_to/custom-functions-with-openai-functions-agent"}, "BaseCombineDocumentsChain": {"!pip install bs4": "https://python.langchain.com/docs/use_cases/more/agents/autonomous_agents/marathon_times"}, "LLMSingleActionAgent": {"Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Wikibase Agent": "https://python.langchain.com/docs/use_cases/more/agents/agents/wikibase_agent", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval", "Custom agent with tool retrieval": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent_with_tool_retrieval"}, "AgentOutputParser": {"Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Wikibase Agent": "https://python.langchain.com/docs/use_cases/more/agents/agents/wikibase_agent", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval", "Custom agent with tool retrieval": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent_with_tool_retrieval"}, "StringPromptTemplate": {"Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Wikibase Agent": "https://python.langchain.com/docs/use_cases/more/agents/agents/wikibase_agent", "SalesGPT - Your Context-Aware AI Sales Assistant With Knowledge Base": "https://python.langchain.com/docs/use_cases/more/agents/agents/sales_agent_with_context", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval", "Custom agent with tool retrieval": "https://python.langchain.com/docs/modules/agents/how_to/custom_agent_with_tool_retrieval", "Custom prompt template": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/custom_prompt_template", "Connecting to a Feature Store": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/connecting_to_a_feature_store"}, "AIPlugin": {"Plug-and-Plai": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai", "Custom Agent with PlugIn Retrieval": "https://python.langchain.com/docs/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval"}, "SteamshipImageGenerationTool": {"Multi-modal outputs: Image & Text": "https://python.langchain.com/docs/use_cases/more/agents/multi_modal/multi_modal_output_agent"}, "RegexParser": {"Multi-Agent Simulated Environment: Petting Zoo": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/petting_zoo", "Multi-agent decentralized speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_bidding", "Multi-agent authoritarian speaker selection": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/multiagent_authoritarian", "Simulated Environment: Gymnasium": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/gymnasium"}, "TimeWeightedVectorStoreRetriever": {"Generative Agents in LangChain": "https://python.langchain.com/docs/use_cases/more/agents/agent_simulations/characters"}, "LLMBashChain": {"Bash chain": "https://python.langchain.com/docs/use_cases/more/code_writing/llm_bash"}, "BashOutputParser": {"Bash chain": "https://python.langchain.com/docs/use_cases/more/code_writing/llm_bash"}, "BashProcess": {"Bash chain": "https://python.langchain.com/docs/use_cases/more/code_writing/llm_bash"}, "LLMSymbolicMathChain": {"LLM Symbolic Math ": "https://python.langchain.com/docs/use_cases/more/code_writing/llm_symbolic_math"}, "LLMSummarizationCheckerChain": {"Summarization checker chain": "https://python.langchain.com/docs/use_cases/more/self_check/llm_summarization_checker"}, "LLMCheckerChain": {"Self-checking chain": "https://python.langchain.com/docs/use_cases/more/self_check/llm_checker"}, "ElasticsearchDatabaseChain": {"Set env var OPENAI_API_KEY or load from a .env file": "https://python.langchain.com/docs/use_cases/qa_structured/sql", "Elasticsearch": "https://python.langchain.com/docs/use_cases/qa_structured/integrations/elasticsearch", "SQL": "https://python.langchain.com/docs/use_cases/sql/sql"}, "SQLRecordManager": {"Indexing": "https://python.langchain.com/docs/modules/data_connection/indexing"}, "index": {"Indexing": "https://python.langchain.com/docs/modules/data_connection/indexing"}, "BaseLoader": {"Indexing": "https://python.langchain.com/docs/modules/data_connection/indexing"}, "InMemoryStore": {"Caching": "https://python.langchain.com/docs/modules/data_connection/text_embedding/caching_embeddings", "MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector", "Parent Document Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/parent_document_retriever"}, "LocalFileStore": {"Caching": "https://python.langchain.com/docs/modules/data_connection/text_embedding/caching_embeddings"}, "RedisStore": {"Caching": "https://python.langchain.com/docs/modules/data_connection/text_embedding/caching_embeddings"}, "CacheBackedEmbeddings": {"Caching": "https://python.langchain.com/docs/modules/data_connection/text_embedding/caching_embeddings"}, "EnsembleRetriever": {"Ensemble Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/ensemble"}, "MultiVectorRetriever": {"MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector"}, "JsonKeyOutputFunctionsParser": {"MultiVector Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/multi_vector", "prompt_llm_parser.md": "https://python.langchain.com/docs/expression_language/cookbook/prompt_llm_parser"}, "ParentDocumentRetriever": {"Parent Document Retriever": "https://python.langchain.com/docs/modules/data_connection/retrievers/parent_document_retriever"}, "SentenceTransformersTokenTextSplitter": {"Split by tokens ": "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/split_by_token"}, "NLTKTextSplitter": {"Split by tokens ": "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/split_by_token"}, "ChatMessageHistory": {"Message Memory in Agent backed by a database": "https://python.langchain.com/docs/modules/memory/agent_with_memory_in_db"}, "BaseMemory": {"Custom Memory": "https://python.langchain.com/docs/modules/memory/custom_memory"}, "ConversationKGMemory": {"Conversation Knowledge Graph": "https://python.langchain.com/docs/modules/memory/types/kg"}, "ConversationTokenBufferMemory": {"Conversation Token Buffer": "https://python.langchain.com/docs/modules/memory/types/token_buffer"}, "tracing_enabled": {"Multiple callback handlers": "https://python.langchain.com/docs/modules/callbacks/multiple_callbacks"}, "FileCallbackHandler": {"Logging to file": "https://python.langchain.com/docs/modules/callbacks/filecallbackhandler"}, "AsyncCallbackHandler": {"Async callbacks": "https://python.langchain.com/docs/modules/callbacks/async_callbacks"}, "StructuredTool": {"Multi-Input Tools": "https://python.langchain.com/docs/modules/agents/tools/multi_input_tool", "Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools"}, "AsyncCallbackManagerForToolRun": {"Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools"}, "CallbackManagerForToolRun": {"Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools"}, "ToolException": {"Defining Custom Tools": "https://python.langchain.com/docs/modules/agents/tools/custom_tools"}, "format_tool_to_openai_function": {"Tools as OpenAI Functions": "https://python.langchain.com/docs/modules/agents/tools/tools_as_openai_functions"}, "RequestsGetTool": {"Tool Input Schema": "https://python.langchain.com/docs/modules/agents/tools/tool_input_validation"}, "HumanApprovalCallbackHandler": {"Human-in-the-loop Tool Validation": "https://python.langchain.com/docs/modules/agents/tools/human_approval"}, "XMLAgent": {"XML Agent": "https://python.langchain.com/docs/modules/agents/agent_types/xml_agent", "Agents": "https://python.langchain.com/docs/expression_language/cookbook/agent"}, "DocstoreExplorer": {"ReAct document store": "https://python.langchain.com/docs/modules/agents/agent_types/react_docstore"}, "ReadOnlySharedMemory": {"Shared memory across agents and tools": "https://python.langchain.com/docs/modules/agents/how_to/sharedmemory_for_tools"}, "BaseMultiActionAgent": {"Custom multi-action agent": "https://python.langchain.com/docs/modules/agents/how_to/custom_multi_action_agent"}, "FinalStreamingStdOutCallbackHandler": {"Streaming final agent output": "https://python.langchain.com/docs/modules/agents/how_to/streaming_stdout_final_only"}, "LangChainTracer": {"Async API": "https://python.langchain.com/docs/modules/agents/how_to/async_agent"}, "HumanInputChatModel": {"Human input chat model": "https://python.langchain.com/docs/modules/model_io/models/chat/human_input_chat_model"}, "CallbackManagerForLLMRun": {"Custom LLM": "https://python.langchain.com/docs/modules/model_io/models/llms/custom_llm"}, "LLM": {"Custom LLM": "https://python.langchain.com/docs/modules/model_io/models/llms/custom_llm"}, "HumanInputLLM": {"Human input LLM": "https://python.langchain.com/docs/modules/model_io/models/llms/human_input_llm"}, "OutputFixingParser": {"Retry parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/retry"}, "RetryOutputParser": {"Retry parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/retry"}, "RetryWithErrorOutputParser": {"Retry parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/retry"}, "EnumOutputParser": {"Enum parser": "https://python.langchain.com/docs/modules/model_io/output_parsers/enum"}, "MaxMarginalRelevanceExampleSelector": {"Select by maximal marginal relevance (MMR)": "https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/mmr"}, "SemanticSimilarityExampleSelector": {"Select by maximal marginal relevance (MMR)": "https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/mmr", "Few-shot examples for chat models": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/few_shot_examples_chat"}, "FewShotPromptTemplate": {"Select by maximal marginal relevance (MMR)": "https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/mmr", "Select by n-gram overlap": "https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/ngram_overlap"}, "BaseExampleSelector": {"Custom example selector": "https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/custom_example_selector"}, "NGramOverlapExampleSelector": {"Select by n-gram overlap": "https://python.langchain.com/docs/modules/model_io/prompts/example_selectors/ngram_overlap"}, "FewShotChatMessagePromptTemplate": {"Few-shot examples for chat models": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/few_shot_examples_chat"}, "ChatMessagePromptTemplate": {"Types of `MessagePromptTemplate`": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/msg_prompt_templates"}, "MultiPromptChain": {"Router": "https://python.langchain.com/docs/modules/chains/foundational/router"}, "LLMRouterChain": {"Router": "https://python.langchain.com/docs/modules/chains/foundational/router"}, "RouterOutputParser": {"Router": "https://python.langchain.com/docs/modules/chains/foundational/router"}, "EmbeddingRouterChain": {"Router": "https://python.langchain.com/docs/modules/chains/foundational/router"}, "BaseLanguageModel": {"Custom chain": "https://python.langchain.com/docs/modules/chains/how_to/custom_chain"}, "AsyncCallbackManagerForChainRun": {"Custom chain": "https://python.langchain.com/docs/modules/chains/how_to/custom_chain"}, "CallbackManagerForChainRun": {"Custom chain": "https://python.langchain.com/docs/modules/chains/how_to/custom_chain"}, "BasePromptTemplate": {"Custom chain": "https://python.langchain.com/docs/modules/chains/how_to/custom_chain"}, "create_openai_fn_chain": {"Using OpenAI functions": "https://python.langchain.com/docs/modules/chains/how_to/openai_functions"}, "create_structured_output_chain": {"Using OpenAI functions": "https://python.langchain.com/docs/modules/chains/how_to/openai_functions"}, "RunnablePassthrough": {"First we add a step to load memory": "https://python.langchain.com/docs/expression_language/cookbook/retrieval", "prompt_llm_parser.md": "https://python.langchain.com/docs/expression_language/cookbook/prompt_llm_parser", "multiple_chains.md": "https://python.langchain.com/docs/expression_language/cookbook/multiple_chains"}, "format_document": {"First we add a step to load memory": "https://python.langchain.com/docs/expression_language/cookbook/retrieval"}, "RunnableLambda": {"sql_db.md": "https://python.langchain.com/docs/expression_language/cookbook/sql_db", "Run arbitrary functions": "https://python.langchain.com/docs/expression_language/how_to/functions"}, "JsonOutputFunctionsParser": {"prompt_llm_parser.md": "https://python.langchain.com/docs/expression_language/cookbook/prompt_llm_parser"}, "RunnableConfig": {"Run arbitrary functions": "https://python.langchain.com/docs/expression_language/how_to/functions"}, "GoogleSpeechToTextLoader": {"Google Cloud Speech-to-Text": "https://python.langchain.com/docs/integrations/document_loaders/google_speech_to_text"}, "GoogleTranslateTransformer": {"Google Cloud Translation": "https://python.langchain.com/docs/integrations/document_loaders/google_translate"}} diff --git a/docs/docs/integrations/providers/ontotext_graphdb.mdx b/docs/docs/integrations/providers/ontotext_graphdb.mdx new file mode 100644 index 00000000000..1b941e72d92 --- /dev/null +++ b/docs/docs/integrations/providers/ontotext_graphdb.mdx @@ -0,0 +1,21 @@ +# Ontotext GraphDB + +>[Ontotext GraphDB](https://graphdb.ontotext.com/) is a graph database and knowledge discovery tool compliant with RDF and SPARQL. + +## Dependencies + +Install the [rdflib](https://github.com/RDFLib/rdflib) package with +```bash +pip install rdflib==7.0.0 +``` + +## Graph QA Chain + +Connect your GraphDB Database with a chat model to get insights on your data. + +See the notebook example [here](/docs/use_cases/graph/graph_ontotext_graphdb_qa). + +```python +from langchain_community.graphs import OntotextGraphDBGraph +from langchain.chains import OntotextGraphDBQAChain +``` \ No newline at end of file diff --git a/docs/docs/use_cases/graph/graph_ontotext_graphdb_qa.ipynb b/docs/docs/use_cases/graph/graph_ontotext_graphdb_qa.ipynb new file mode 100644 index 00000000000..b1afd3320af --- /dev/null +++ b/docs/docs/use_cases/graph/graph_ontotext_graphdb_qa.ipynb @@ -0,0 +1,543 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "922a7a98-7d73-4a1a-8860-76a33451d1be", + "metadata": { + "id": "922a7a98-7d73-4a1a-8860-76a33451d1be" + }, + "source": [ + "# Ontotext GraphDB QA Chain\n", + "\n", + "This notebook shows how to use LLMs to provide natural language querying (NLQ to SPARQL, also called text2sparql) for [Ontotext GraphDB](https://graphdb.ontotext.com/). Ontotext GraphDB is a graph database and knowledge discovery tool compliant with [RDF](https://www.w3.org/RDF/) and [SPARQL](https://www.w3.org/TR/sparql11-query/).\n", + "\n", + "## GraphDB LLM Functionalities\n", + "\n", + "GraphDB supports some LLM integration functionalities as described in [https://github.com/w3c/sparql-dev/issues/193](https://github.com/w3c/sparql-dev/issues/193):\n", + "\n", + "[gpt-queries](https://graphdb.ontotext.com/documentation/10.5/gpt-queries.html)\n", + "\n", + "* magic predicates to ask an LLM for text, list or table using data from your knowledge graph (KG)\n", + "* query explanation\n", + "* result explanation, summarization, rephrasing, translation\n", + "\n", + "[retrieval-graphdb-connector](https://graphdb.ontotext.com/documentation/10.5/retrieval-graphdb-connector.html)\n", + "\n", + "* Indexing of KG entities in a vector database\n", + "* Supports any text embedding algorithm and vector database\n", + "* Uses the same powerful connector (indexing) language that GraphDB uses for Elastic, Solr, Lucene\n", + "* Automatic synchronization of changes in RDF data to the KG entity index\n", + "* Supports nested objects (no UI support in GraphDB version 10.5)\n", + "* Serializes KG entities to text like this (e.g. for a Wines dataset):\n", + "\n", + "```\n", + "Franvino:\n", + "- is a RedWine.\n", + "- made from grape Merlo.\n", + "- made from grape Cabernet Franc.\n", + "- has sugar dry.\n", + "- has year 2012.\n", + "```\n", + "\n", + "[talk-to-graph](https://graphdb.ontotext.com/documentation/10.5/talk-to-graph.html)\n", + "\n", + "* A simple chatbot using a defined KG entity index\n", + "\n", + "## Querying the GraphDB Database\n", + "\n", + "For this tutorial, we won't use the GraphDB LLM integration, but SPARQL generation from NLQ. We'll use the Star Wars API (SWAPI) ontology and dataset that you can examine [here](https://drive.google.com/file/d/1wQ2K4uZp4eq3wlJ6_F_TxkOolaiczdYp/view?usp=drive_link).\n", + "\n", + "You will need to have a running GraphDB instance. This tutorial shows how to run the database locally using the [GraphDB Docker image](https://hub.docker.com/r/ontotext/graphdb). It provides a docker compose set-up, which populates GraphDB with the Star Wars dataset. All nessessary files including this notebook can be downloaded from GDrive.\n", + "\n", + "### Set-up\n", + "\n", + "* Install [Docker](https://docs.docker.com/get-docker/). This tutorial is created using Docker version `24.0.7` which bundles [Docker Compose](https://docs.docker.com/compose/). For earlier Docker versions you may need to install Docker Compose separately.\n", + "* Download all files from [GDrive](https://drive.google.com/drive/folders/18dN7WQxfGu26Z9C9HUU5jBwDuPnVTLbl) in a local folder on your machine.\n", + "* Start GraphDB with the following script executed from this folder\n", + " ```\n", + " docker build --tag graphdb .\n", + " docker compose up -d graphdb\n", + " ```\n", + " You need to wait a couple of seconds for the database to start on `http://localhost:7200/`. The Star Wars dataset `starwars-data.trig` is automatically loaded into the `langchain` repository. The local SPARQL endpoint `http://localhost:7200/repositories/langchain` can be used to run queries against. You can also open the GraphDB Workbench from your favourite web browser `http://localhost:7200/sparql` where you can make queries interactively.\n", + "* Working environment\n", + "\n", + "If you use `conda`, create and activate a new conda env (e.g. `conda create -n graph_ontotext_graphdb_qa python=3.9.18`).\n", + "Install the following libraries:\n", + "\n", + "```\n", + "pip install jupyter==1.0.0\n", + "pip install openai==1.6.1\n", + "pip install rdflib==7.0.0\n", + "pip install langchain-openai==0.0.2\n", + "pip install langchain\n", + "```\n", + "\n", + "Run Jupyter with\n", + "```\n", + "jupyter notebook\n", + "```" + ] + }, + { + "cell_type": "markdown", + "id": "e51b397c-2fdc-4b99-9fed-1ab2b6ef7547", + "metadata": { + "id": "e51b397c-2fdc-4b99-9fed-1ab2b6ef7547" + }, + "source": [ + "### Specifying the Ontology\n", + "\n", + "In order for the LLM to be able to generate SPARQL, it needs to know the knowledge graph schema (the ontology). It can be provided using one of two parameters on the `OntotextGraphDBGraph` class:\n", + "\n", + "* `query_ontology`: a `CONSTRUCT` query that is executed on the SPARQL endpoint and returns the KG schema statements. We recommend that you store the ontology in its own named graph, which will make it easier to get only the relevant statements (as the example below). `DESCRIBE` queries are not supported, because `DESCRIBE` returns the Symmetric Concise Bounded Description (SCBD), i.e. also the incoming class links. In case of large graphs with a million of instances, this is not efficient. Check https://github.com/eclipse-rdf4j/rdf4j/issues/4857\n", + "* `local_file`: a local RDF ontology file. Supported RDF formats are `Turtle`, `RDF/XML`, `JSON-LD`, `N-Triples`, `Notation-3`, `Trig`, `Trix`, `N-Quads`.\n", + "\n", + "In either case, the ontology dump should:\n", + "\n", + "* Include enough information about classes, properties, property attachment to classes (using rdfs:domain, schema:domainIncludes or OWL restrictions), and taxonomies (important individuals).\n", + "* Not include overly verbose and irrelevant definitions and examples that do not help SPARQL construction." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "dc8792e0-acfb-4310-b5fa-8f649e448870", + "metadata": { + "id": "dc8792e0-acfb-4310-b5fa-8f649e448870" + }, + "outputs": [], + "source": [ + "from langchain_community.graphs import OntotextGraphDBGraph\n", + "\n", + "# feeding the schema using a user construct query\n", + "\n", + "graph = OntotextGraphDBGraph(\n", + " query_endpoint=\"http://localhost:7200/repositories/langchain\",\n", + " query_ontology=\"CONSTRUCT {?s ?p ?o} FROM WHERE {?s ?p ?o}\",\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a08b8d8c-af01-4401-8069-5f2cd022a6df", + "metadata": { + "id": "a08b8d8c-af01-4401-8069-5f2cd022a6df" + }, + "outputs": [], + "source": [ + "# feeding the schema using a local RDF file\n", + "\n", + "graph = OntotextGraphDBGraph(\n", + " query_endpoint=\"http://localhost:7200/repositories/langchain\",\n", + " local_file=\"/path/to/langchain_graphdb_tutorial/starwars-ontology.nt\", # change the path here\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "583b26ce-fb0d-4e9c-b5cd-9ec0e3be8922", + "metadata": { + "id": "583b26ce-fb0d-4e9c-b5cd-9ec0e3be8922" + }, + "source": [ + "Either way, the ontology (schema) is fed to the LLM as `Turtle` since `Turtle` with appropriate prefixes is most compact and easiest for the LLM to remember.\n", + "\n", + "The Star Wars ontology is a bit unusual in that it includes a lot of specific triples about classes, e.g. that the species `:Aleena` live on ``, they are a subclass of `:Reptile`, have certain typical characteristics (average height, average lifespan, skinColor), and specific individuals (characters) are representatives of that class:\n", + "\n", + "\n", + "```\n", + "@prefix : .\n", + "@prefix owl: .\n", + "@prefix rdfs: .\n", + "@prefix xsd: .\n", + "\n", + ":Aleena a owl:Class, :Species ;\n", + " rdfs:label \"Aleena\" ;\n", + " rdfs:isDefinedBy ;\n", + " rdfs:subClassOf :Reptile, :Sentient ;\n", + " :averageHeight 80.0 ;\n", + " :averageLifespan \"79\" ;\n", + " :character ;\n", + " :film ;\n", + " :language \"Aleena\" ;\n", + " :planet ;\n", + " :skinColor \"blue\", \"gray\" .\n", + "\n", + " ...\n", + "\n", + " ```\n" + ] + }, + { + "cell_type": "markdown", + "id": "6277d911-b0f6-4aeb-9aa5-96416b668468", + "metadata": { + "id": "6277d911-b0f6-4aeb-9aa5-96416b668468" + }, + "source": [ + "In order to keep this tutorial simple, we use un-secured GraphDB. If GraphDB is secured, you should set the environment variables 'GRAPHDB_USERNAME' and 'GRAPHDB_PASSWORD' before the initialization of `OntotextGraphDBGraph`.\n", + "\n", + "```python\n", + "os.environ[\"GRAPHDB_USERNAME\"] = \"graphdb-user\"\n", + "os.environ[\"GRAPHDB_PASSWORD\"] = \"graphdb-password\"\n", + "\n", + "graph = OntotextGraphDBGraph(\n", + " query_endpoint=...,\n", + " query_ontology=...\n", + ")\n", + "```\n" + ] + }, + { + "cell_type": "markdown", + "id": "446d8a00-c98f-43b8-9e84-77b244f7bb24", + "metadata": { + "id": "446d8a00-c98f-43b8-9e84-77b244f7bb24" + }, + "source": [ + "### Question Answering against the StarWars Dataset\n", + "\n", + "We can now use the `OntotextGraphDBQAChain` to ask some questions." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "fab63d88-511d-4049-9bf0-ca8748f1fbff", + "metadata": { + "id": "fab63d88-511d-4049-9bf0-ca8748f1fbff" + }, + "outputs": [], + "source": [ + "import os\n", + "\n", + "from langchain.chains import OntotextGraphDBQAChain\n", + "from langchain_openai import ChatOpenAI\n", + "\n", + "# We'll be using an OpenAI model which requires an OpenAI API Key.\n", + "# However, other models are available as well:\n", + "# https://python.langchain.com/docs/integrations/chat/\n", + "\n", + "# Set the environment variable `OPENAI_API_KEY` to your OpenAI API key\n", + "os.environ[\"OPENAI_API_KEY\"] = \"sk-***\"\n", + "\n", + "# Any available OpenAI model can be used here.\n", + "# We use 'gpt-4-1106-preview' because of the bigger context window.\n", + "# The 'gpt-4-1106-preview' model_name will deprecate in the future and will change to 'gpt-4-turbo' or similar,\n", + "# so be sure to consult with the OpenAI API https://platform.openai.com/docs/models for the correct naming.\n", + "\n", + "chain = OntotextGraphDBQAChain.from_llm(\n", + " ChatOpenAI(temperature=0, model_name=\"gpt-4-1106-preview\"),\n", + " graph=graph,\n", + " verbose=True,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "64de8463-35b1-4c65-91e4-387daf4dd7d4", + "metadata": {}, + "source": [ + "Let's ask a simple one." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "f1dc4bea-b0f1-48f7-99a6-351a31acac7b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\n", + "\u001b[1m> Entering new OntotextGraphDBQAChain chain...\u001b[0m\n", + "Generated SPARQL:\n", + "\u001b[32;1m\u001b[1;3mPREFIX : \n", + "PREFIX rdfs: \n", + "\n", + "SELECT ?climate\n", + "WHERE {\n", + " ?planet rdfs:label \"Tatooine\" ;\n", + " :climate ?climate .\n", + "}\u001b[0m\n", + "\n", + "\u001b[1m> Finished chain.\u001b[0m\n" + ] + }, + { + "data": { + "text/plain": [ + "'The climate on Tatooine is arid.'" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "chain.invoke({chain.input_key: \"What is the climate on Tatooine?\"})[chain.output_key]" + ] + }, + { + "cell_type": "markdown", + "id": "6d3a37f4-5c56-4b3e-b6ae-3eb030ffcc8f", + "metadata": {}, + "source": [ + "And a bit more complicated one." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "4dde8b18-4329-4a86-abfb-26d3e77034b7", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\n", + "\u001b[1m> Entering new OntotextGraphDBQAChain chain...\u001b[0m\n", + "Generated SPARQL:\n", + "\u001b[32;1m\u001b[1;3mPREFIX : \n", + "PREFIX owl: \n", + "PREFIX rdfs: \n", + "\n", + "SELECT ?climate\n", + "WHERE {\n", + " ?character rdfs:label \"Luke Skywalker\" .\n", + " ?character :homeworld ?planet .\n", + " ?planet :climate ?climate .\n", + "}\u001b[0m\n", + "\n", + "\u001b[1m> Finished chain.\u001b[0m\n" + ] + }, + { + "data": { + "text/plain": [ + "\"The climate on Luke Skywalker's home planet is arid.\"" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "chain.invoke({chain.input_key: \"What is the climate on Luke Skywalker's home planet?\"})[\n", + " chain.output_key\n", + "]" + ] + }, + { + "cell_type": "markdown", + "id": "51d3ce3e-9528-4a65-8f3e-2281de08cbf1", + "metadata": {}, + "source": [ + "We can also ask more complicated questions like" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "ab6f55f1-a3e0-4615-abd2-3cb26619c8d9", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "\n", + "\u001b[1m> Entering new OntotextGraphDBQAChain chain...\u001b[0m\n", + "Generated SPARQL:\n", + "\u001b[32;1m\u001b[1;3mPREFIX : \n", + "PREFIX owl: \n", + "PREFIX rdf: \n", + "PREFIX xsd: \n", + "\n", + "SELECT (AVG(?boxOffice) AS ?averageBoxOffice)\n", + "WHERE {\n", + " ?film a :Film .\n", + " ?film :boxOffice ?boxOfficeValue .\n", + " BIND(xsd:decimal(?boxOfficeValue) AS ?boxOffice)\n", + "}\n", + "\u001b[0m\n", + "\n", + "\u001b[1m> Finished chain.\u001b[0m\n" + ] + }, + { + "data": { + "text/plain": [ + "'The average box office revenue for all the Star Wars movies is approximately 754.1 million dollars.'" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "chain.invoke(\n", + " {\n", + " chain.input_key: \"What is the average box office revenue for all the Star Wars movies?\"\n", + " }\n", + ")[chain.output_key]" + ] + }, + { + "cell_type": "markdown", + "id": "11511345-8436-4634-92c6-36f2c0dd44db", + "metadata": { + "id": "11511345-8436-4634-92c6-36f2c0dd44db" + }, + "source": [ + "### Chain Modifiers\n", + "\n", + "The Ontotext GraphDB QA chain allows prompt refinement for further improvement of your QA chain and enhancing the overall user experience of your app.\n", + "\n", + "\n", + "#### \"SPARQL Generation\" Prompt\n", + "\n", + "The prompt is used for the SPARQL query generation based on the user question and the KG schema.\n", + "\n", + "- `sparql_generation_prompt`\n", + "\n", + " Default value:\n", + " ````python\n", + " GRAPHDB_SPARQL_GENERATION_TEMPLATE = \"\"\"\n", + " Write a SPARQL SELECT query for querying a graph database.\n", + " The ontology schema delimited by triple backticks in Turtle format is:\n", + " ```\n", + " {schema}\n", + " ```\n", + " Use only the classes and properties provided in the schema to construct the SPARQL query.\n", + " Do not use any classes or properties that are not explicitly provided in the SPARQL query.\n", + " Include all necessary prefixes.\n", + " Do not include any explanations or apologies in your responses.\n", + " Do not wrap the query in backticks.\n", + " Do not include any text except the SPARQL query generated.\n", + " The question delimited by triple backticks is:\n", + " ```\n", + " {prompt}\n", + " ```\n", + " \"\"\"\n", + " GRAPHDB_SPARQL_GENERATION_PROMPT = PromptTemplate(\n", + " input_variables=[\"schema\", \"prompt\"],\n", + " template=GRAPHDB_SPARQL_GENERATION_TEMPLATE,\n", + " )\n", + " ````\n", + "\n", + "#### \"SPARQL Fix\" Prompt\n", + "\n", + "Sometimes, the LLM may generate a SPARQL query with syntactic errors or missing prefixes, etc. The chain will try to amend this by prompting the LLM to correct it a certain number of times.\n", + "\n", + "- `sparql_fix_prompt`\n", + "\n", + " Default value:\n", + " ````python\n", + " GRAPHDB_SPARQL_FIX_TEMPLATE = \"\"\"\n", + " This following SPARQL query delimited by triple backticks\n", + " ```\n", + " {generated_sparql}\n", + " ```\n", + " is not valid.\n", + " The error delimited by triple backticks is\n", + " ```\n", + " {error_message}\n", + " ```\n", + " Give me a correct version of the SPARQL query.\n", + " Do not change the logic of the query.\n", + " Do not include any explanations or apologies in your responses.\n", + " Do not wrap the query in backticks.\n", + " Do not include any text except the SPARQL query generated.\n", + " The ontology schema delimited by triple backticks in Turtle format is:\n", + " ```\n", + " {schema}\n", + " ```\n", + " \"\"\"\n", + " \n", + " GRAPHDB_SPARQL_FIX_PROMPT = PromptTemplate(\n", + " input_variables=[\"error_message\", \"generated_sparql\", \"schema\"],\n", + " template=GRAPHDB_SPARQL_FIX_TEMPLATE,\n", + " )\n", + " ````\n", + "\n", + "- `max_fix_retries`\n", + " \n", + " Default value: `5`\n", + "\n", + "#### \"Answering\" Prompt\n", + "\n", + "The prompt is used for answering the question based on the results returned from the database and the initial user question. By default, the LLM is instructed to only use the information from the returned result(s). If the result set is empty, the LLM should inform that it can't answer the question.\n", + "\n", + "- `qa_prompt`\n", + " \n", + " Default value:\n", + " ````python\n", + " GRAPHDB_QA_TEMPLATE = \"\"\"Task: Generate a natural language response from the results of a SPARQL query.\n", + " You are an assistant that creates well-written and human understandable answers.\n", + " The information part contains the information provided, which you can use to construct an answer.\n", + " The information provided is authoritative, you must never doubt it or try to use your internal knowledge to correct it.\n", + " Make your response sound like the information is coming from an AI assistant, but don't add any information.\n", + " Don't use internal knowledge to answer the question, just say you don't know if no information is available.\n", + " Information:\n", + " {context}\n", + " \n", + " Question: {prompt}\n", + " Helpful Answer:\"\"\"\n", + " GRAPHDB_QA_PROMPT = PromptTemplate(\n", + " input_variables=[\"context\", \"prompt\"], template=GRAPHDB_QA_TEMPLATE\n", + " )\n", + " ````" + ] + }, + { + "cell_type": "markdown", + "id": "2ef8c073-003d-44ab-8a7b-cf45c50f6370", + "metadata": { + "id": "2ef8c073-003d-44ab-8a7b-cf45c50f6370" + }, + "source": [ + "Once you're finished playing with QA with GraphDB, you can shut down the Docker environment by running\n", + "``\n", + "docker compose down -v --remove-orphans\n", + "``\n", + "from the directory with the Docker compose file." + ] + } + ], + "metadata": { + "colab": { + "provenance": [], + "toc_visible": true + }, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.1" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/libs/community/langchain_community/graphs/__init__.py b/libs/community/langchain_community/graphs/__init__.py index 4037f157214..bd15f6465d1 100644 --- a/libs/community/langchain_community/graphs/__init__.py +++ b/libs/community/langchain_community/graphs/__init__.py @@ -9,6 +9,7 @@ from langchain_community.graphs.nebula_graph import NebulaGraph from langchain_community.graphs.neo4j_graph import Neo4jGraph from langchain_community.graphs.neptune_graph import NeptuneGraph from langchain_community.graphs.networkx_graph import NetworkxEntityGraph +from langchain_community.graphs.ontotext_graphdb_graph import OntotextGraphDBGraph from langchain_community.graphs.rdf_graph import RdfGraph from langchain_community.graphs.tigergraph_graph import TigerGraph @@ -24,4 +25,5 @@ __all__ = [ "ArangoGraph", "FalkorDBGraph", "TigerGraph", + "OntotextGraphDBGraph", ] diff --git a/libs/community/langchain_community/graphs/ontotext_graphdb_graph.py b/libs/community/langchain_community/graphs/ontotext_graphdb_graph.py new file mode 100644 index 00000000000..c1072a97f81 --- /dev/null +++ b/libs/community/langchain_community/graphs/ontotext_graphdb_graph.py @@ -0,0 +1,213 @@ +from __future__ import annotations + +import os +from typing import ( + TYPE_CHECKING, + List, + Optional, + Union, +) + +if TYPE_CHECKING: + import rdflib + + +class OntotextGraphDBGraph: + """Ontotext GraphDB https://graphdb.ontotext.com/ wrapper for graph operations. + + *Security note*: Make sure that the database connection uses credentials + that are narrowly-scoped to only include necessary permissions. + Failure to do so may result in data corruption or loss, since the calling + code may attempt commands that would result in deletion, mutation + of data if appropriately prompted or reading sensitive data if such + data is present in the database. + The best way to guard against such negative outcomes is to (as appropriate) + limit the permissions granted to the credentials used with this tool. + + See https://python.langchain.com/docs/security for more information. + """ + + def __init__( + self, + query_endpoint: str, + query_ontology: Optional[str] = None, + local_file: Optional[str] = None, + local_file_format: Optional[str] = None, + ) -> None: + """ + Set up the GraphDB wrapper + + :param query_endpoint: SPARQL endpoint for queries, read access + + If GraphDB is secured, + set the environment variables 'GRAPHDB_USERNAME' and 'GRAPHDB_PASSWORD'. + + :param query_ontology: a `CONSTRUCT` query that is executed + on the SPARQL endpoint and returns the KG schema statements + Example: + 'CONSTRUCT {?s ?p ?o} FROM WHERE {?s ?p ?o}' + Currently, DESCRIBE queries like + 'PREFIX onto: + PREFIX rdfs: + DESCRIBE ?term WHERE { + ?term rdfs:isDefinedBy onto: + }' + are not supported, because DESCRIBE returns + the Symmetric Concise Bounded Description (SCBD), + i.e. also the incoming class links. + In case of large graphs with a million of instances, this is not efficient. + Check https://github.com/eclipse-rdf4j/rdf4j/issues/4857 + + :param local_file: a local RDF ontology file. + Supported RDF formats: + Turtle, RDF/XML, JSON-LD, N-Triples, Notation-3, Trig, Trix, N-Quads. + If the rdf format can't be determined from the file extension, + pass explicitly the rdf format in `local_file_format` param. + + :param local_file_format: Used if the rdf format can't be determined + from the local file extension. + One of "json-ld", "xml", "n3", "turtle", "nt", "trig", "nquads", "trix" + + Either `query_ontology` or `local_file` should be passed. + """ + + if query_ontology and local_file: + raise ValueError("Both file and query provided. Only one is allowed.") + + if not query_ontology and not local_file: + raise ValueError("Neither file nor query provided. One is required.") + + try: + import rdflib + from rdflib.plugins.stores import sparqlstore + except ImportError: + raise ValueError( + "Could not import rdflib python package. " + "Please install it with `pip install rdflib`." + ) + + auth = self._get_auth() + store = sparqlstore.SPARQLStore(auth=auth) + store.open(query_endpoint) + + self.graph = rdflib.Graph(store, identifier=None, bind_namespaces="none") + self._check_connectivity() + + if local_file: + ontology_schema_graph = self._load_ontology_schema_from_file( + local_file, local_file_format + ) + else: + self._validate_user_query(query_ontology) + ontology_schema_graph = self._load_ontology_schema_with_query( + query_ontology + ) + self.schema = ontology_schema_graph.serialize(format="turtle") + + @staticmethod + def _get_auth() -> Union[tuple, None]: + """ + Returns the basic authentication configuration + """ + username = os.environ.get("GRAPHDB_USERNAME", None) + password = os.environ.get("GRAPHDB_PASSWORD", None) + + if username: + if not password: + raise ValueError( + "Environment variable 'GRAPHDB_USERNAME' is set, " + "but 'GRAPHDB_PASSWORD' is not set." + ) + else: + return username, password + return None + + def _check_connectivity(self) -> None: + """ + Executes a simple `ASK` query to check connectivity + """ + try: + self.graph.query("ASK { ?s ?p ?o }") + except ValueError: + raise ValueError( + "Could not query the provided endpoint. " + "Please, check, if the value of the provided " + "query_endpoint points to the right repository. " + "If GraphDB is secured, please, " + "make sure that the environment variables " + "'GRAPHDB_USERNAME' and 'GRAPHDB_PASSWORD' are set." + ) + + @staticmethod + def _load_ontology_schema_from_file(local_file: str, local_file_format: str = None): + """ + Parse the ontology schema statements from the provided file + """ + import rdflib + + if not os.path.exists(local_file): + raise FileNotFoundError(f"File {local_file} does not exist.") + if not os.access(local_file, os.R_OK): + raise PermissionError(f"Read permission for {local_file} is restricted") + graph = rdflib.ConjunctiveGraph() + try: + graph.parse(local_file, format=local_file_format) + except Exception as e: + raise ValueError(f"Invalid file format for {local_file} : ", e) + return graph + + @staticmethod + def _validate_user_query(query_ontology: str) -> None: + """ + Validate the query is a valid SPARQL CONSTRUCT query + """ + from pyparsing import ParseException + from rdflib.plugins.sparql import prepareQuery + + if not isinstance(query_ontology, str): + raise TypeError("Ontology query must be provided as string.") + try: + parsed_query = prepareQuery(query_ontology) + except ParseException as e: + raise ValueError("Ontology query is not a valid SPARQL query.", e) + + if parsed_query.algebra.name != "ConstructQuery": + raise ValueError( + "Invalid query type. Only CONSTRUCT queries are supported." + ) + + def _load_ontology_schema_with_query(self, query: str): + """ + Execute the query for collecting the ontology schema statements + """ + from rdflib.exceptions import ParserError + + try: + results = self.graph.query(query) + except ParserError as e: + raise ValueError(f"Generated SPARQL statement is invalid\n{e}") + + return results.graph + + @property + def get_schema(self) -> str: + """ + Returns the schema of the graph database in turtle format + """ + return self.schema + + def query( + self, + query: str, + ) -> List[rdflib.query.ResultRow]: + """ + Query the graph. + """ + from rdflib.exceptions import ParserError + from rdflib.query import ResultRow + + try: + res = self.graph.query(query) + except ParserError as e: + raise ValueError(f"Generated SPARQL statement is invalid\n{e}") + return [r for r in res if isinstance(r, ResultRow)] diff --git a/libs/community/poetry.lock b/libs/community/poetry.lock index c8f92876020..ea439ba7d65 100644 --- a/libs/community/poetry.lock +++ b/libs/community/poetry.lock @@ -3433,7 +3433,6 @@ files = [ {file = "jq-1.6.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:227b178b22a7f91ae88525810441791b1ca1fc71c86f03190911793be15cec3d"}, {file = "jq-1.6.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:780eb6383fbae12afa819ef676fc93e1548ae4b076c004a393af26a04b460742"}, {file = "jq-1.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08ded6467f4ef89fec35b2bf310f210f8cd13fbd9d80e521500889edf8d22441"}, - {file = "jq-1.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49e44ed677713f4115bd5bf2dbae23baa4cd503be350e12a1c1f506b0687848f"}, {file = "jq-1.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:984f33862af285ad3e41e23179ac4795f1701822473e1a26bf87ff023e5a89ea"}, {file = "jq-1.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f42264fafc6166efb5611b5d4cb01058887d050a6c19334f6a3f8a13bb369df5"}, {file = "jq-1.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a67154f150aaf76cc1294032ed588436eb002097dd4fd1e283824bf753a05080"}, @@ -6223,6 +6222,7 @@ files = [ {file = "pymongo-4.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8729dbf25eb32ad0dc0b9bd5e6a0d0b7e5c2dc8ec06ad171088e1896b522a74"}, {file = "pymongo-4.6.1-cp312-cp312-win32.whl", hash = "sha256:3177f783ae7e08aaf7b2802e0df4e4b13903520e8380915e6337cdc7a6ff01d8"}, {file = "pymongo-4.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:00c199e1c593e2c8b033136d7a08f0c376452bac8a896c923fcd6f419e07bdd2"}, + {file = "pymongo-4.6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6dcc95f4bb9ed793714b43f4f23a7b0c57e4ef47414162297d6f650213512c19"}, {file = "pymongo-4.6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:13552ca505366df74e3e2f0a4f27c363928f3dff0eef9f281eb81af7f29bc3c5"}, {file = "pymongo-4.6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:77e0df59b1a4994ad30c6d746992ae887f9756a43fc25dec2db515d94cf0222d"}, {file = "pymongo-4.6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3a7f02a58a0c2912734105e05dedbee4f7507e6f1bd132ebad520be0b11d46fd"}, @@ -7093,6 +7093,27 @@ PyYAML = "*" Shapely = ">=1.7.1" six = ">=1.15.0" +[[package]] +name = "rdflib" +version = "7.0.0" +description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." +optional = true +python-versions = ">=3.8.1,<4.0.0" +files = [ + {file = "rdflib-7.0.0-py3-none-any.whl", hash = "sha256:0438920912a642c866a513de6fe8a0001bd86ef975057d6962c79ce4771687cd"}, + {file = "rdflib-7.0.0.tar.gz", hash = "sha256:9995eb8569428059b8c1affd26b25eac510d64f5043d9ce8c84e0d0036e995ae"}, +] + +[package.dependencies] +isodate = ">=0.6.0,<0.7.0" +pyparsing = ">=2.1.0,<4" + +[package.extras] +berkeleydb = ["berkeleydb (>=18.1.0,<19.0.0)"] +html = ["html5lib (>=1.0,<2.0)"] +lxml = ["lxml (>=4.3.0,<5.0.0)"] +networkx = ["networkx (>=2.0.0,<3.0.0)"] + [[package]] name = "referencing" version = "0.31.1" @@ -9226,9 +9247,9 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [extras] cli = ["typer"] -extended-testing = ["aiosqlite", "aleph-alpha-client", "anthropic", "arxiv", "assemblyai", "atlassian-python-api", "azure-ai-documentintelligence", "beautifulsoup4", "bibtexparser", "cassio", "chardet", "cohere", "dashvector", "databricks-vectorsearch", "datasets", "dgml-utils", "elasticsearch", "esprima", "faiss-cpu", "feedparser", "fireworks-ai", "geopandas", "gitpython", "google-cloud-documentai", "gql", "gradientai", "hdbcli", "hologres-vector", "html2text", "javelin-sdk", "jinja2", "jq", "jsonschema", "lxml", "markdownify", "motor", "msal", "mwparserfromhell", "mwxml", "newspaper3k", "numexpr", "oci", "openai", "openapi-pydantic", "oracle-ads", "pandas", "pdfminer-six", "pgvector", "praw", "psychicapi", "py-trello", "pymupdf", "pypdf", "pypdfium2", "pyspark", "rank-bm25", "rapidfuzz", "rapidocr-onnxruntime", "requests-toolbelt", "rspace_client", "scikit-learn", "sqlite-vss", "streamlit", "sympy", "telethon", "timescale-vector", "tqdm", "upstash-redis", "xata", "xmltodict", "zhipuai"] +extended-testing = ["aiosqlite", "aleph-alpha-client", "anthropic", "arxiv", "assemblyai", "atlassian-python-api", "azure-ai-documentintelligence", "beautifulsoup4", "bibtexparser", "cassio", "chardet", "cohere", "dashvector", "databricks-vectorsearch", "datasets", "dgml-utils", "elasticsearch", "esprima", "faiss-cpu", "feedparser", "fireworks-ai", "geopandas", "gitpython", "google-cloud-documentai", "gql", "gradientai", "hdbcli", "hologres-vector", "html2text", "javelin-sdk", "jinja2", "jq", "jsonschema", "lxml", "markdownify", "motor", "msal", "mwparserfromhell", "mwxml", "newspaper3k", "numexpr", "oci", "openai", "openapi-pydantic", "oracle-ads", "pandas", "pdfminer-six", "pgvector", "praw", "psychicapi", "py-trello", "pymupdf", "pypdf", "pypdfium2", "pyspark", "rank-bm25", "rapidfuzz", "rapidocr-onnxruntime", "rdflib", "requests-toolbelt", "rspace_client", "scikit-learn", "sqlite-vss", "streamlit", "sympy", "telethon", "timescale-vector", "tqdm", "upstash-redis", "xata", "xmltodict", "zhipuai"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "064816bab088c1f6ff9902cb998291581b66a6d7762f965ff805b4e0b9b2e7e9" +content-hash = "42d012441d7b42d273e11708b7e12308fc56b169d4d56c4c2511e7469743a983" diff --git a/libs/community/pyproject.toml b/libs/community/pyproject.toml index 07c71617861..6691ca8b471 100644 --- a/libs/community/pyproject.toml +++ b/libs/community/pyproject.toml @@ -90,6 +90,7 @@ zhipuai = {version = "^1.0.7", optional = true} elasticsearch = {version = "^8.12.0", optional = true} hdbcli = {version = "^2.19.21", optional = true} oci = {version = "^2.119.1", optional = true} +rdflib = {version = "7.0.0", optional = true} [tool.poetry.group.test] optional = true @@ -254,7 +255,8 @@ extended_testing = [ "zhipuai", "elasticsearch", "hdbcli", - "oci" + "oci", + "rdflib", ] [tool.ruff] @@ -303,7 +305,7 @@ markers = [ asyncio_mode = "auto" [tool.codespell] -skip = '.git,*.pdf,*.svg,*.pdf,*.yaml,*.ipynb,poetry.lock,*.min.js,*.css,package-lock.json,example_data,_dist,examples' +skip = '.git,*.pdf,*.svg,*.pdf,*.yaml,*.ipynb,poetry.lock,*.min.js,*.css,package-lock.json,example_data,_dist,examples,*.trig' # Ignore latin etc ignore-regex = '.*(Stati Uniti|Tense=Pres).*' # whats is a typo but used frequently in queries so kept as is diff --git a/libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/Dockerfile b/libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/Dockerfile new file mode 100644 index 00000000000..ed94886573c --- /dev/null +++ b/libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/Dockerfile @@ -0,0 +1,6 @@ +FROM ontotext/graphdb:10.5.1 +RUN mkdir -p /opt/graphdb/dist/data/repositories/langchain +COPY config.ttl /opt/graphdb/dist/data/repositories/langchain/ +COPY starwars-data.trig / +COPY graphdb_create.sh /run.sh +ENTRYPOINT bash /run.sh \ No newline at end of file diff --git a/libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/config.ttl b/libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/config.ttl new file mode 100644 index 00000000000..dcbdeeebe12 --- /dev/null +++ b/libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/config.ttl @@ -0,0 +1,46 @@ +@prefix rdfs: . +@prefix rep: . +@prefix sr: . +@prefix sail: . +@prefix graphdb: . + +[] a rep:Repository ; + rep:repositoryID "langchain" ; + rdfs:label "" ; + rep:repositoryImpl [ + rep:repositoryType "graphdb:SailRepository" ; + sr:sailImpl [ + sail:sailType "graphdb:Sail" ; + + graphdb:read-only "false" ; + + # Inference and Validation + graphdb:ruleset "empty" ; + graphdb:disable-sameAs "true" ; + graphdb:check-for-inconsistencies "false" ; + + # Indexing + graphdb:entity-id-size "32" ; + graphdb:enable-context-index "false" ; + graphdb:enablePredicateList "true" ; + graphdb:enable-fts-index "false" ; + graphdb:fts-indexes ("default" "iri") ; + graphdb:fts-string-literals-index "default" ; + graphdb:fts-iris-index "none" ; + + # Queries and Updates + graphdb:query-timeout "0" ; + graphdb:throw-QueryEvaluationException-on-timeout "false" ; + graphdb:query-limit-results "0" ; + + # Settable in the file but otherwise hidden in the UI and in the RDF4J console + graphdb:base-URL "http://example.org/owlim#" ; + graphdb:defaultNS "" ; + graphdb:imports "" ; + graphdb:repository-type "file-repository" ; + graphdb:storage-folder "storage" ; + graphdb:entity-index-size "10000000" ; + graphdb:in-memory-literal-properties "true" ; + graphdb:enable-literal-index "true" ; + ] + ]. diff --git a/libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/docker-compose.yaml b/libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/docker-compose.yaml new file mode 100644 index 00000000000..80c15421f95 --- /dev/null +++ b/libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/docker-compose.yaml @@ -0,0 +1,9 @@ +version: '3.7' + +services: + + graphdb: + image: graphdb + container_name: graphdb + ports: + - "7200:7200" diff --git a/libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/graphdb_create.sh b/libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/graphdb_create.sh new file mode 100644 index 00000000000..dadbdfc1c4a --- /dev/null +++ b/libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/graphdb_create.sh @@ -0,0 +1,33 @@ +#! /bin/bash +REPOSITORY_ID="langchain" +GRAPHDB_URI="http://localhost:7200/" + +echo -e "\nUsing GraphDB: ${GRAPHDB_URI}" + +function startGraphDB { + echo -e "\nStarting GraphDB..." + exec /opt/graphdb/dist/bin/graphdb +} + +function waitGraphDBStart { + echo -e "\nWaiting GraphDB to start..." + for _ in $(seq 1 5); do + CHECK_RES=$(curl --silent --write-out '%{http_code}' --output /dev/null ${GRAPHDB_URI}/rest/repositories) + if [ "${CHECK_RES}" = '200' ]; then + echo -e "\nUp and running" + break + fi + sleep 30s + echo "CHECK_RES: ${CHECK_RES}" + done +} + +function loadData { + echo -e "\nImporting starwars-data.trig" + curl -X POST -H "Content-Type: application/x-trig" -T /starwars-data.trig ${GRAPHDB_URI}/repositories/${REPOSITORY_ID}/statements +} + +startGraphDB & +waitGraphDBStart +loadData +wait diff --git a/libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/start.sh b/libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/start.sh new file mode 100755 index 00000000000..fe3856ad338 --- /dev/null +++ b/libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/start.sh @@ -0,0 +1,5 @@ +set -ex + +docker compose down -v --remove-orphans +docker build --tag graphdb . +docker compose up -d graphdb diff --git a/libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/starwars-data.trig b/libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/starwars-data.trig new file mode 100644 index 00000000000..49276faee49 --- /dev/null +++ b/libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb/starwars-data.trig @@ -0,0 +1,43 @@ +@base . +@prefix voc: . +@prefix owl: . +@prefix rdfs: . + +{ + + + a voc:Besalisk , voc:Character ; + rdfs:label "Dexter Jettster" ; + voc:eyeColor "yellow" ; + voc:gender "male" ; + voc:height 198.0 ; + voc:mass 102.0 ; + voc:skinColor "brown" . + +} + + { + + voc:Character a owl:Class . + voc:Species a owl:Class . + + voc:Besalisk a voc:Species; + rdfs:label "Besalisk"; + voc:averageHeight 178.0; + voc:averageLifespan "75"; + voc:character ; + voc:language "besalisk"; + voc:skinColor "brown"; + voc:eyeColor "yellow" . + + voc:averageHeight a owl:DatatypeProperty . + voc:averageLifespan a owl:DatatypeProperty . + voc:character a owl:ObjectProperty . + voc:language a owl:DatatypeProperty . + voc:skinColor a owl:DatatypeProperty . + voc:eyeColor a owl:DatatypeProperty . + voc:gender a owl:DatatypeProperty . + voc:height a owl:DatatypeProperty . + voc:mass a owl:DatatypeProperty . + +} diff --git a/libs/community/tests/integration_tests/graphs/test_ontotext_graphdb_graph.py b/libs/community/tests/integration_tests/graphs/test_ontotext_graphdb_graph.py new file mode 100644 index 00000000000..bba6c1ebb0b --- /dev/null +++ b/libs/community/tests/integration_tests/graphs/test_ontotext_graphdb_graph.py @@ -0,0 +1,181 @@ +from pathlib import Path + +import pytest + +from langchain_community.graphs import OntotextGraphDBGraph + +""" +cd libs/community/tests/integration_tests/graphs/docker-compose-ontotext-graphdb +./start.sh +""" + + +def test_query() -> None: + graph = OntotextGraphDBGraph( + query_endpoint="http://localhost:7200/repositories/langchain", + query_ontology="CONSTRUCT {?s ?p ?o}" + "FROM WHERE {?s ?p ?o}", + ) + + query_results = graph.query( + "PREFIX voc: " + "PREFIX rdfs: " + "SELECT ?eyeColor " + "WHERE {" + ' ?besalisk rdfs:label "Dexter Jettster" ; ' + " voc:eyeColor ?eyeColor ." + "}" + ) + assert len(query_results) == 1 + assert len(query_results[0]) == 1 + assert str(query_results[0][0]) == "yellow" + + +def test_get_schema_with_query() -> None: + graph = OntotextGraphDBGraph( + query_endpoint="http://localhost:7200/repositories/langchain", + query_ontology="CONSTRUCT {?s ?p ?o}" + "FROM WHERE {?s ?p ?o}", + ) + + from rdflib import Graph + + assert len(Graph().parse(data=graph.get_schema, format="turtle")) == 19 + + +@pytest.mark.parametrize( + "rdf_format, file_extension", + [ + ("json-ld", "json"), + ("json-ld", "jsonld"), + ("json-ld", "json-ld"), + ("xml", "rdf"), + ("xml", "xml"), + ("xml", "owl"), + ("pretty-xml", "xml"), + ("n3", "n3"), + ("turtle", "ttl"), + ("nt", "nt"), + ("trig", "trig"), + ("nquads", "nq"), + ("nquads", "nquads"), + ("trix", "trix"), + ], +) +def test_get_schema_from_file( + tmp_path: Path, rdf_format: str, file_extension: str +) -> None: + expected_number_of_ontology_statements = 19 + + graph = OntotextGraphDBGraph( + query_endpoint="http://localhost:7200/repositories/langchain", + query_ontology="CONSTRUCT {?s ?p ?o}" + "FROM WHERE {?s ?p ?o}", + ) + + from rdflib import ConjunctiveGraph, Graph + + assert ( + len(Graph().parse(data=graph.get_schema, format="turtle")) + == expected_number_of_ontology_statements + ) + + # serialize the ontology schema loaded with the query in a local file + # in various rdf formats and check that this results + # in the same number of statements + conjunctive_graph = ConjunctiveGraph() + ontology_context = conjunctive_graph.get_context("https://swapi.co/ontology/") + ontology_context.parse(data=graph.get_schema, format="turtle") + + assert len(ontology_context) == expected_number_of_ontology_statements + assert len(conjunctive_graph) == expected_number_of_ontology_statements + + local_file = tmp_path / ("starwars-ontology." + file_extension) + conjunctive_graph.serialize(local_file, format=rdf_format) + + graph = OntotextGraphDBGraph( + query_endpoint="http://localhost:7200/repositories/langchain", + local_file=str(local_file), + ) + assert ( + len(Graph().parse(data=graph.get_schema, format="turtle")) + == expected_number_of_ontology_statements + ) + + +@pytest.mark.parametrize( + "rdf_format", ["json-ld", "xml", "n3", "turtle", "nt", "trig", "nquads", "trix"] +) +def test_get_schema_from_file_with_explicit_rdf_format( + tmp_path: Path, rdf_format: str +) -> None: + expected_number_of_ontology_statements = 19 + + graph = OntotextGraphDBGraph( + query_endpoint="http://localhost:7200/repositories/langchain", + query_ontology="CONSTRUCT {?s ?p ?o}" + "FROM WHERE {?s ?p ?o}", + ) + + from rdflib import ConjunctiveGraph, Graph + + assert ( + len(Graph().parse(data=graph.get_schema, format="turtle")) + == expected_number_of_ontology_statements + ) + + # serialize the ontology schema loaded with the query in a local file + # in various rdf formats and check that this results + # in the same number of statements + conjunctive_graph = ConjunctiveGraph() + ontology_context = conjunctive_graph.get_context("https://swapi.co/ontology/") + ontology_context.parse(data=graph.get_schema, format="turtle") + + assert len(ontology_context) == expected_number_of_ontology_statements + assert len(conjunctive_graph) == expected_number_of_ontology_statements + + local_file = tmp_path / "starwars-ontology.txt" + conjunctive_graph.serialize(local_file, format=rdf_format) + + graph = OntotextGraphDBGraph( + query_endpoint="http://localhost:7200/repositories/langchain", + local_file=str(local_file), + local_file_format=rdf_format, + ) + assert ( + len(Graph().parse(data=graph.get_schema, format="turtle")) + == expected_number_of_ontology_statements + ) + + +def test_get_schema_from_file_with_wrong_extension(tmp_path: Path) -> None: + expected_number_of_ontology_statements = 19 + + graph = OntotextGraphDBGraph( + query_endpoint="http://localhost:7200/repositories/langchain", + query_ontology="CONSTRUCT {?s ?p ?o}" + "FROM WHERE {?s ?p ?o}", + ) + + from rdflib import ConjunctiveGraph, Graph + + assert ( + len(Graph().parse(data=graph.get_schema, format="turtle")) + == expected_number_of_ontology_statements + ) + + conjunctive_graph = ConjunctiveGraph() + ontology_context = conjunctive_graph.get_context("https://swapi.co/ontology/") + ontology_context.parse(data=graph.get_schema, format="turtle") + + assert len(ontology_context) == expected_number_of_ontology_statements + assert len(conjunctive_graph) == expected_number_of_ontology_statements + + local_file = tmp_path / "starwars-ontology.trig" + conjunctive_graph.serialize(local_file, format="nquads") + + with pytest.raises(ValueError): + OntotextGraphDBGraph( + query_endpoint="http://localhost:7200/repositories/langchain", + local_file=str(local_file), + ) diff --git a/libs/community/tests/unit_tests/graphs/test_imports.py b/libs/community/tests/unit_tests/graphs/test_imports.py index fb98e973d07..653d7d540ba 100644 --- a/libs/community/tests/unit_tests/graphs/test_imports.py +++ b/libs/community/tests/unit_tests/graphs/test_imports.py @@ -12,6 +12,7 @@ EXPECTED_ALL = [ "ArangoGraph", "FalkorDBGraph", "TigerGraph", + "OntotextGraphDBGraph", ] diff --git a/libs/community/tests/unit_tests/graphs/test_ontotext_graphdb_graph.py b/libs/community/tests/unit_tests/graphs/test_ontotext_graphdb_graph.py new file mode 100644 index 00000000000..8b025fed36f --- /dev/null +++ b/libs/community/tests/unit_tests/graphs/test_ontotext_graphdb_graph.py @@ -0,0 +1,176 @@ +import os +import tempfile +import unittest + +import pytest + + +class TestOntotextGraphDBGraph(unittest.TestCase): + def test_import(self) -> None: + from langchain_community.graphs import OntotextGraphDBGraph # noqa: F401 + + @pytest.mark.requires("rdflib") + def test_validate_user_query_wrong_type(self) -> None: + from langchain_community.graphs import OntotextGraphDBGraph + + with self.assertRaises(TypeError) as e: + OntotextGraphDBGraph._validate_user_query( + [ + "PREFIX starwars: " + "PREFIX rdfs: " + "DESCRIBE starwars: ?term " + "WHERE {?term rdfs:isDefinedBy starwars: }" + ] + ) + self.assertEqual("Ontology query must be provided as string.", str(e.exception)) + + @pytest.mark.requires("rdflib") + def test_validate_user_query_invalid_sparql_syntax(self) -> None: + from langchain_community.graphs import OntotextGraphDBGraph + + with self.assertRaises(ValueError) as e: + OntotextGraphDBGraph._validate_user_query( + "CONSTRUCT {?s ?p ?o} FROM WHERE {?s ?p ?o" + ) + self.assertEqual( + "('Ontology query is not a valid SPARQL query.', " + "Expected ConstructQuery, " + "found end of text (at char 70), (line:1, col:71))", + str(e.exception), + ) + + @pytest.mark.requires("rdflib") + def test_validate_user_query_invalid_query_type_select(self) -> None: + from langchain_community.graphs import OntotextGraphDBGraph + + with self.assertRaises(ValueError) as e: + OntotextGraphDBGraph._validate_user_query("SELECT * { ?s ?p ?o }") + self.assertEqual( + "Invalid query type. Only CONSTRUCT queries are supported.", + str(e.exception), + ) + + @pytest.mark.requires("rdflib") + def test_validate_user_query_invalid_query_type_ask(self) -> None: + from langchain_community.graphs import OntotextGraphDBGraph + + with self.assertRaises(ValueError) as e: + OntotextGraphDBGraph._validate_user_query("ASK { ?s ?p ?o }") + self.assertEqual( + "Invalid query type. Only CONSTRUCT queries are supported.", + str(e.exception), + ) + + @pytest.mark.requires("rdflib") + def test_validate_user_query_invalid_query_type_describe(self) -> None: + from langchain_community.graphs import OntotextGraphDBGraph + + with self.assertRaises(ValueError) as e: + OntotextGraphDBGraph._validate_user_query( + "PREFIX swapi: " + "PREFIX rdfs: " + "DESCRIBE ?term WHERE { ?term rdfs:isDefinedBy swapi: }" + ) + self.assertEqual( + "Invalid query type. Only CONSTRUCT queries are supported.", + str(e.exception), + ) + + @pytest.mark.requires("rdflib") + def test_validate_user_query_construct(self) -> None: + from langchain_community.graphs import OntotextGraphDBGraph + + OntotextGraphDBGraph._validate_user_query( + "CONSTRUCT {?s ?p ?o} FROM WHERE {?s ?p ?o}" + ) + + @pytest.mark.requires("rdflib") + def test_check_connectivity(self) -> None: + from langchain_community.graphs import OntotextGraphDBGraph + + with self.assertRaises(ValueError) as e: + OntotextGraphDBGraph( + query_endpoint="http://localhost:7200/repositories/non-existing-repository", + query_ontology="PREFIX swapi: " + "PREFIX rdfs: " + "DESCRIBE ?term WHERE {?term rdfs:isDefinedBy swapi: }", + ) + self.assertEqual( + "Could not query the provided endpoint. " + "Please, check, if the value of the provided " + "query_endpoint points to the right repository. " + "If GraphDB is secured, please, make sure that the environment variables " + "'GRAPHDB_USERNAME' and 'GRAPHDB_PASSWORD' are set.", + str(e.exception), + ) + + @pytest.mark.requires("rdflib") + def test_local_file_does_not_exist(self) -> None: + from langchain_community.graphs import OntotextGraphDBGraph + + non_existing_file = os.path.join("non", "existing", "path", "to", "file.ttl") + with self.assertRaises(FileNotFoundError) as e: + OntotextGraphDBGraph._load_ontology_schema_from_file(non_existing_file) + self.assertEqual(f"File {non_existing_file} does not exist.", str(e.exception)) + + @pytest.mark.requires("rdflib") + def test_local_file_no_access(self) -> None: + from langchain_community.graphs import OntotextGraphDBGraph + + with tempfile.NamedTemporaryFile() as tmp_file: + tmp_file_name = tmp_file.name + + # Set file permissions to write and execute only + os.chmod(tmp_file_name, 0o300) + + with self.assertRaises(PermissionError) as e: + OntotextGraphDBGraph._load_ontology_schema_from_file(tmp_file_name) + + self.assertEqual( + f"Read permission for {tmp_file_name} is restricted", str(e.exception) + ) + + @pytest.mark.requires("rdflib") + def test_local_file_bad_syntax(self) -> None: + from langchain_community.graphs import OntotextGraphDBGraph + + with tempfile.TemporaryDirectory() as tempdir: + tmp_file_path = os.path.join(tempdir, "starwars-ontology.trig") + with open(tmp_file_path, "w") as tmp_file: + tmp_file.write("invalid trig") + + with self.assertRaises(ValueError) as e: + OntotextGraphDBGraph._load_ontology_schema_from_file(tmp_file_path) + self.assertEqual( + f"('Invalid file format for {tmp_file_path} : '" + ", BadSyntax('', 0, 'invalid trig', 0, " + "'expected directive or statement'))", + str(e.exception), + ) + + @pytest.mark.requires("rdflib") + def test_both_query_and_local_file_provided(self) -> None: + from langchain_community.graphs import OntotextGraphDBGraph + + with self.assertRaises(ValueError) as e: + OntotextGraphDBGraph( + query_endpoint="http://localhost:7200/repositories/non-existing-repository", + query_ontology="CONSTRUCT {?s ?p ?o}" + "FROM WHERE {?s ?p ?o}", + local_file="starwars-ontology-wrong.trig", + ) + self.assertEqual( + "Both file and query provided. Only one is allowed.", str(e.exception) + ) + + @pytest.mark.requires("rdflib") + def test_nor_query_nor_local_file_provided(self) -> None: + from langchain_community.graphs import OntotextGraphDBGraph + + with self.assertRaises(ValueError) as e: + OntotextGraphDBGraph( + query_endpoint="http://localhost:7200/repositories/non-existing-repository", + ) + self.assertEqual( + "Neither file nor query provided. One is required.", str(e.exception) + ) diff --git a/libs/langchain/langchain/chains/__init__.py b/libs/langchain/langchain/chains/__init__.py index 10a135e67b3..2b7ba6ac256 100644 --- a/libs/langchain/langchain/chains/__init__.py +++ b/libs/langchain/langchain/chains/__init__.py @@ -41,6 +41,7 @@ from langchain.chains.graph_qa.hugegraph import HugeGraphQAChain from langchain.chains.graph_qa.kuzu import KuzuQAChain from langchain.chains.graph_qa.nebulagraph import NebulaGraphQAChain from langchain.chains.graph_qa.neptune_cypher import NeptuneOpenCypherQAChain +from langchain.chains.graph_qa.ontotext_graphdb import OntotextGraphDBQAChain from langchain.chains.graph_qa.sparql import GraphSparqlQAChain from langchain.chains.history_aware_retriever import create_history_aware_retriever from langchain.chains.hyde.base import HypotheticalDocumentEmbedder @@ -96,6 +97,7 @@ __all__ = [ "GraphCypherQAChain", "GraphQAChain", "GraphSparqlQAChain", + "OntotextGraphDBQAChain", "HugeGraphQAChain", "HypotheticalDocumentEmbedder", "KuzuQAChain", diff --git a/libs/langchain/langchain/chains/graph_qa/ontotext_graphdb.py b/libs/langchain/langchain/chains/graph_qa/ontotext_graphdb.py new file mode 100644 index 00000000000..4a4d89b6753 --- /dev/null +++ b/libs/langchain/langchain/chains/graph_qa/ontotext_graphdb.py @@ -0,0 +1,182 @@ +"""Question answering over a graph.""" +from __future__ import annotations + +from typing import Any, Dict, List, Optional + +from langchain_community.graphs import OntotextGraphDBGraph +from langchain_core.callbacks.manager import CallbackManager +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts.base import BasePromptTemplate +from langchain_core.pydantic_v1 import Field + +from langchain.callbacks.manager import CallbackManagerForChainRun +from langchain.chains.base import Chain +from langchain.chains.graph_qa.prompts import ( + GRAPHDB_QA_PROMPT, + GRAPHDB_SPARQL_FIX_PROMPT, + GRAPHDB_SPARQL_GENERATION_PROMPT, +) +from langchain.chains.llm import LLMChain + + +class OntotextGraphDBQAChain(Chain): + """Question-answering against Ontotext GraphDB + https://graphdb.ontotext.com/ by generating SPARQL queries. + + *Security note*: Make sure that the database connection uses credentials + that are narrowly-scoped to only include necessary permissions. + Failure to do so may result in data corruption or loss, since the calling + code may attempt commands that would result in deletion, mutation + of data if appropriately prompted or reading sensitive data if such + data is present in the database. + The best way to guard against such negative outcomes is to (as appropriate) + limit the permissions granted to the credentials used with this tool. + + See https://python.langchain.com/docs/security for more information. + """ + + graph: OntotextGraphDBGraph = Field(exclude=True) + sparql_generation_chain: LLMChain + sparql_fix_chain: LLMChain + max_fix_retries: int + qa_chain: LLMChain + input_key: str = "query" #: :meta private: + output_key: str = "result" #: :meta private: + + @property + def input_keys(self) -> List[str]: + return [self.input_key] + + @property + def output_keys(self) -> List[str]: + _output_keys = [self.output_key] + return _output_keys + + @classmethod + def from_llm( + cls, + llm: BaseLanguageModel, + *, + sparql_generation_prompt: BasePromptTemplate = GRAPHDB_SPARQL_GENERATION_PROMPT, + sparql_fix_prompt: BasePromptTemplate = GRAPHDB_SPARQL_FIX_PROMPT, + max_fix_retries: int = 5, + qa_prompt: BasePromptTemplate = GRAPHDB_QA_PROMPT, + **kwargs: Any, + ) -> OntotextGraphDBQAChain: + """Initialize from LLM.""" + sparql_generation_chain = LLMChain(llm=llm, prompt=sparql_generation_prompt) + sparql_fix_chain = LLMChain(llm=llm, prompt=sparql_fix_prompt) + max_fix_retries = max_fix_retries + qa_chain = LLMChain(llm=llm, prompt=qa_prompt) + return cls( + qa_chain=qa_chain, + sparql_generation_chain=sparql_generation_chain, + sparql_fix_chain=sparql_fix_chain, + max_fix_retries=max_fix_retries, + **kwargs, + ) + + def _call( + self, + inputs: Dict[str, Any], + run_manager: Optional[CallbackManagerForChainRun] = None, + ) -> Dict[str, str]: + """ + Generate a SPARQL query, use it to retrieve a response from GraphDB and answer + the question. + """ + _run_manager = run_manager or CallbackManagerForChainRun.get_noop_manager() + callbacks = _run_manager.get_child() + prompt = inputs[self.input_key] + ontology_schema = self.graph.get_schema + + sparql_generation_chain_result = self.sparql_generation_chain.invoke( + {"prompt": prompt, "schema": ontology_schema}, callbacks=callbacks + ) + generated_sparql = sparql_generation_chain_result[ + self.sparql_generation_chain.output_key + ] + + generated_sparql = self._get_valid_sparql_query( + _run_manager, callbacks, generated_sparql, ontology_schema + ) + query_results = self.graph.query(generated_sparql) + + qa_chain_result = self.qa_chain.invoke( + {"prompt": prompt, "context": query_results}, callbacks=callbacks + ) + result = qa_chain_result[self.qa_chain.output_key] + return {self.output_key: result} + + def _get_valid_sparql_query( + self, + _run_manager: CallbackManagerForChainRun, + callbacks: CallbackManager, + generated_sparql: str, + ontology_schema: str, + ) -> str: + try: + return self._prepare_sparql_query(_run_manager, generated_sparql) + except Exception as e: + retries = 0 + error_message = str(e) + self._log_invalid_sparql_query( + _run_manager, generated_sparql, error_message + ) + + while retries < self.max_fix_retries: + try: + sparql_fix_chain_result = self.sparql_fix_chain.invoke( + { + "error_message": error_message, + "generated_sparql": generated_sparql, + "schema": ontology_schema, + }, + callbacks=callbacks, + ) + generated_sparql = sparql_fix_chain_result[ + self.sparql_fix_chain.output_key + ] + return self._prepare_sparql_query(_run_manager, generated_sparql) + except Exception as e: + retries += 1 + parse_exception = str(e) + self._log_invalid_sparql_query( + _run_manager, generated_sparql, parse_exception + ) + + raise ValueError("The generated SPARQL query is invalid.") + + def _prepare_sparql_query( + self, _run_manager: CallbackManagerForChainRun, generated_sparql: str + ) -> str: + from rdflib.plugins.sparql import prepareQuery + + prepareQuery(generated_sparql) + self._log_valid_sparql_query(_run_manager, generated_sparql) + return generated_sparql + + def _log_valid_sparql_query( + self, _run_manager: CallbackManagerForChainRun, generated_query: str + ) -> None: + _run_manager.on_text("Generated SPARQL:", end="\n", verbose=self.verbose) + _run_manager.on_text( + generated_query, color="green", end="\n", verbose=self.verbose + ) + + def _log_invalid_sparql_query( + self, + _run_manager: CallbackManagerForChainRun, + generated_query: str, + error_message: str, + ) -> None: + _run_manager.on_text("Invalid SPARQL query: ", end="\n", verbose=self.verbose) + _run_manager.on_text( + generated_query, color="red", end="\n", verbose=self.verbose + ) + _run_manager.on_text( + "SPARQL Query Parse Error: ", end="\n", verbose=self.verbose + ) + _run_manager.on_text( + error_message, color="red", end="\n\n", verbose=self.verbose + ) diff --git a/libs/langchain/langchain/chains/graph_qa/prompts.py b/libs/langchain/langchain/chains/graph_qa/prompts.py index 6ca1e70266c..5d4652453b6 100644 --- a/libs/langchain/langchain/chains/graph_qa/prompts.py +++ b/libs/langchain/langchain/chains/graph_qa/prompts.py @@ -197,6 +197,68 @@ SPARQL_QA_PROMPT = PromptTemplate( input_variables=["context", "prompt"], template=SPARQL_QA_TEMPLATE ) +GRAPHDB_SPARQL_GENERATION_TEMPLATE = """ +Write a SPARQL SELECT query for querying a graph database. +The ontology schema delimited by triple backticks in Turtle format is: +``` +{schema} +``` +Use only the classes and properties provided in the schema to construct the SPARQL query. +Do not use any classes or properties that are not explicitly provided in the SPARQL query. +Include all necessary prefixes. +Do not include any explanations or apologies in your responses. +Do not wrap the query in backticks. +Do not include any text except the SPARQL query generated. +The question delimited by triple backticks is: +``` +{prompt} +``` +""" +GRAPHDB_SPARQL_GENERATION_PROMPT = PromptTemplate( + input_variables=["schema", "prompt"], + template=GRAPHDB_SPARQL_GENERATION_TEMPLATE, +) + +GRAPHDB_SPARQL_FIX_TEMPLATE = """ +This following SPARQL query delimited by triple backticks +``` +{generated_sparql} +``` +is not valid. +The error delimited by triple backticks is +``` +{error_message} +``` +Give me a correct version of the SPARQL query. +Do not change the logic of the query. +Do not include any explanations or apologies in your responses. +Do not wrap the query in backticks. +Do not include any text except the SPARQL query generated. +The ontology schema delimited by triple backticks in Turtle format is: +``` +{schema} +``` +""" + +GRAPHDB_SPARQL_FIX_PROMPT = PromptTemplate( + input_variables=["error_message", "generated_sparql", "schema"], + template=GRAPHDB_SPARQL_FIX_TEMPLATE, +) + +GRAPHDB_QA_TEMPLATE = """Task: Generate a natural language response from the results of a SPARQL query. +You are an assistant that creates well-written and human understandable answers. +The information part contains the information provided, which you can use to construct an answer. +The information provided is authoritative, you must never doubt it or try to use your internal knowledge to correct it. +Make your response sound like the information is coming from an AI assistant, but don't add any information. +Don't use internal knowledge to answer the question, just say you don't know if no information is available. +Information: +{context} + +Question: {prompt} +Helpful Answer:""" +GRAPHDB_QA_PROMPT = PromptTemplate( + input_variables=["context", "prompt"], template=GRAPHDB_QA_TEMPLATE +) AQL_GENERATION_TEMPLATE = """Task: Generate an ArangoDB Query Language (AQL) query from a User Input. diff --git a/libs/langchain/poetry.lock b/libs/langchain/poetry.lock index 927e0ead701..987a173f836 100644 --- a/libs/langchain/poetry.lock +++ b/libs/langchain/poetry.lock @@ -3049,7 +3049,6 @@ files = [ {file = "jq-1.6.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:227b178b22a7f91ae88525810441791b1ca1fc71c86f03190911793be15cec3d"}, {file = "jq-1.6.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:780eb6383fbae12afa819ef676fc93e1548ae4b076c004a393af26a04b460742"}, {file = "jq-1.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08ded6467f4ef89fec35b2bf310f210f8cd13fbd9d80e521500889edf8d22441"}, - {file = "jq-1.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49e44ed677713f4115bd5bf2dbae23baa4cd503be350e12a1c1f506b0687848f"}, {file = "jq-1.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:984f33862af285ad3e41e23179ac4795f1701822473e1a26bf87ff023e5a89ea"}, {file = "jq-1.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f42264fafc6166efb5611b5d4cb01058887d050a6c19334f6a3f8a13bb369df5"}, {file = "jq-1.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a67154f150aaf76cc1294032ed588436eb002097dd4fd1e283824bf753a05080"}, @@ -3447,7 +3446,7 @@ files = [ [[package]] name = "langchain-community" -version = "0.0.15" +version = "0.0.16" description = "Community contributed LangChain integrations." optional = false python-versions = ">=3.8.1,<4.0" @@ -3457,7 +3456,7 @@ develop = true [package.dependencies] aiohttp = "^3.8.3" dataclasses-json = ">= 0.5.7, < 0.7" -langchain-core = ">=0.1.14,<0.2" +langchain-core = ">=0.1.16,<0.2" langsmith = ">=0.0.83,<0.1" numpy = "^1" PyYAML = ">=5.3" @@ -3467,7 +3466,7 @@ tenacity = "^8.1.0" [package.extras] cli = ["typer (>=0.9.0,<0.10.0)"] -extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)", "zhipuai (>=1.0.7,<2.0.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)", "zhipuai (>=1.0.7,<2.0.0)"] [package.source] type = "directory" @@ -5807,6 +5806,7 @@ files = [ {file = "pymongo-4.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6422b6763b016f2ef2beedded0e546d6aa6ba87910f9244d86e0ac7690f75c96"}, {file = "pymongo-4.5.0-cp312-cp312-win32.whl", hash = "sha256:77cfff95c1fafd09e940b3fdcb7b65f11442662fad611d0e69b4dd5d17a81c60"}, {file = "pymongo-4.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:e57d859b972c75ee44ea2ef4758f12821243e99de814030f69a3decb2aa86807"}, + {file = "pymongo-4.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8443f3a8ab2d929efa761c6ebce39a6c1dca1c9ac186ebf11b62c8fe1aef53f4"}, {file = "pymongo-4.5.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2b0176f9233a5927084c79ff80b51bd70bfd57e4f3d564f50f80238e797f0c8a"}, {file = "pymongo-4.5.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:89b3f2da57a27913d15d2a07d58482f33d0a5b28abd20b8e643ab4d625e36257"}, {file = "pymongo-4.5.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:5caee7bd08c3d36ec54617832b44985bd70c4cbd77c5b313de6f7fce0bb34f93"}, @@ -6698,6 +6698,27 @@ PyYAML = "*" Shapely = ">=1.7.1" six = ">=1.15.0" +[[package]] +name = "rdflib" +version = "7.0.0" +description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." +optional = true +python-versions = ">=3.8.1,<4.0.0" +files = [ + {file = "rdflib-7.0.0-py3-none-any.whl", hash = "sha256:0438920912a642c866a513de6fe8a0001bd86ef975057d6962c79ce4771687cd"}, + {file = "rdflib-7.0.0.tar.gz", hash = "sha256:9995eb8569428059b8c1affd26b25eac510d64f5043d9ce8c84e0d0036e995ae"}, +] + +[package.dependencies] +isodate = ">=0.6.0,<0.7.0" +pyparsing = ">=2.1.0,<4" + +[package.extras] +berkeleydb = ["berkeleydb (>=18.1.0,<19.0.0)"] +html = ["html5lib (>=1.0,<2.0)"] +lxml = ["lxml (>=4.3.0,<5.0.0)"] +networkx = ["networkx (>=2.0.0,<3.0.0)"] + [[package]] name = "redis" version = "4.6.0" @@ -9118,7 +9139,7 @@ cli = ["typer"] cohere = ["cohere"] docarray = ["docarray"] embeddings = ["sentence-transformers"] -extended-testing = ["aiosqlite", "aleph-alpha-client", "anthropic", "arxiv", "assemblyai", "atlassian-python-api", "beautifulsoup4", "bibtexparser", "cassio", "chardet", "cohere", "couchbase", "dashvector", "databricks-vectorsearch", "datasets", "dgml-utils", "esprima", "faiss-cpu", "feedparser", "fireworks-ai", "geopandas", "gitpython", "google-cloud-documentai", "gql", "hologres-vector", "html2text", "javelin-sdk", "jinja2", "jq", "jsonschema", "langchain-openai", "lxml", "markdownify", "motor", "msal", "mwparserfromhell", "mwxml", "newspaper3k", "numexpr", "openai", "openai", "openapi-pydantic", "pandas", "pdfminer-six", "pgvector", "praw", "psychicapi", "py-trello", "pymupdf", "pypdf", "pypdfium2", "pyspark", "rank-bm25", "rapidfuzz", "rapidocr-onnxruntime", "requests-toolbelt", "rspace_client", "scikit-learn", "sqlite-vss", "streamlit", "sympy", "telethon", "timescale-vector", "tqdm", "upstash-redis", "xata", "xmltodict"] +extended-testing = ["aiosqlite", "aleph-alpha-client", "anthropic", "arxiv", "assemblyai", "atlassian-python-api", "beautifulsoup4", "bibtexparser", "cassio", "chardet", "cohere", "couchbase", "dashvector", "databricks-vectorsearch", "datasets", "dgml-utils", "esprima", "faiss-cpu", "feedparser", "fireworks-ai", "geopandas", "gitpython", "google-cloud-documentai", "gql", "hologres-vector", "html2text", "javelin-sdk", "jinja2", "jq", "jsonschema", "langchain-openai", "lxml", "markdownify", "motor", "msal", "mwparserfromhell", "mwxml", "newspaper3k", "numexpr", "openai", "openai", "openapi-pydantic", "pandas", "pdfminer-six", "pgvector", "praw", "psychicapi", "py-trello", "pymupdf", "pypdf", "pypdfium2", "pyspark", "rank-bm25", "rapidfuzz", "rapidocr-onnxruntime", "rdflib", "requests-toolbelt", "rspace_client", "scikit-learn", "sqlite-vss", "streamlit", "sympy", "telethon", "timescale-vector", "tqdm", "upstash-redis", "xata", "xmltodict"] javascript = ["esprima"] llms = ["clarifai", "cohere", "huggingface_hub", "manifest-ml", "nlpcloud", "openai", "openlm", "torch", "transformers"] openai = ["openai", "tiktoken"] @@ -9128,4 +9149,4 @@ text-helpers = ["chardet"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "3cabbf56e60340c9e95892a50c281ca9e0859a5bee76a9f45fd54f6a047e6673" +content-hash = "1f0d8707a249814b4b96af0d775e5f1484f332af232a8b3e855c263efbb19fc2" diff --git a/libs/langchain/pyproject.toml b/libs/langchain/pyproject.toml index d7264b1a304..bf6f30f3c28 100644 --- a/libs/langchain/pyproject.toml +++ b/libs/langchain/pyproject.toml @@ -111,6 +111,7 @@ couchbase = {version = "^4.1.9", optional = true} dgml-utils = {version = "^0.3.0", optional = true} datasets = {version = "^2.15.0", optional = true} langchain-openai = {version = ">=0.0.2,<0.1", optional = true} +rdflib = {version = "7.0.0", optional = true} [tool.poetry.group.test] optional = true @@ -296,6 +297,7 @@ extended_testing = [ "dgml-utils", "cohere", "langchain-openai", + "rdflib", ] [tool.ruff] @@ -343,7 +345,7 @@ markers = [ asyncio_mode = "auto" [tool.codespell] -skip = '.git,*.pdf,*.svg,*.pdf,*.yaml,*.ipynb,poetry.lock,*.min.js,*.css,package-lock.json,example_data,_dist,examples' +skip = '.git,*.pdf,*.svg,*.pdf,*.yaml,*.ipynb,poetry.lock,*.min.js,*.css,package-lock.json,example_data,_dist,examples,*.trig' # Ignore latin etc ignore-regex = '.*(Stati Uniti|Tense=Pres).*' # whats is a typo but used frequently in queries so kept as is diff --git a/libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/Dockerfile b/libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/Dockerfile new file mode 100644 index 00000000000..29d0b239a8d --- /dev/null +++ b/libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/Dockerfile @@ -0,0 +1,6 @@ +FROM ontotext/graphdb:10.5.1 +RUN mkdir -p /opt/graphdb/dist/data/repositories/starwars +COPY config.ttl /opt/graphdb/dist/data/repositories/starwars/ +COPY starwars-data.trig / +COPY graphdb_create.sh /run.sh +ENTRYPOINT bash /run.sh \ No newline at end of file diff --git a/libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/config.ttl b/libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/config.ttl new file mode 100644 index 00000000000..799a892ed1b --- /dev/null +++ b/libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/config.ttl @@ -0,0 +1,46 @@ +@prefix rdfs: . +@prefix rep: . +@prefix sr: . +@prefix sail: . +@prefix graphdb: . + +[] a rep:Repository ; + rep:repositoryID "starwars" ; + rdfs:label "" ; + rep:repositoryImpl [ + rep:repositoryType "graphdb:SailRepository" ; + sr:sailImpl [ + sail:sailType "graphdb:Sail" ; + + graphdb:read-only "false" ; + + # Inference and Validation + graphdb:ruleset "empty" ; + graphdb:disable-sameAs "true" ; + graphdb:check-for-inconsistencies "false" ; + + # Indexing + graphdb:entity-id-size "32" ; + graphdb:enable-context-index "false" ; + graphdb:enablePredicateList "true" ; + graphdb:enable-fts-index "false" ; + graphdb:fts-indexes ("default" "iri") ; + graphdb:fts-string-literals-index "default" ; + graphdb:fts-iris-index "none" ; + + # Queries and Updates + graphdb:query-timeout "0" ; + graphdb:throw-QueryEvaluationException-on-timeout "false" ; + graphdb:query-limit-results "0" ; + + # Settable in the file but otherwise hidden in the UI and in the RDF4J console + graphdb:base-URL "http://example.org/owlim#" ; + graphdb:defaultNS "" ; + graphdb:imports "" ; + graphdb:repository-type "file-repository" ; + graphdb:storage-folder "storage" ; + graphdb:entity-index-size "10000000" ; + graphdb:in-memory-literal-properties "true" ; + graphdb:enable-literal-index "true" ; + ] + ]. diff --git a/libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/docker-compose.yaml b/libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/docker-compose.yaml new file mode 100644 index 00000000000..80c15421f95 --- /dev/null +++ b/libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/docker-compose.yaml @@ -0,0 +1,9 @@ +version: '3.7' + +services: + + graphdb: + image: graphdb + container_name: graphdb + ports: + - "7200:7200" diff --git a/libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/graphdb_create.sh b/libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/graphdb_create.sh new file mode 100644 index 00000000000..9e01c9ba48c --- /dev/null +++ b/libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/graphdb_create.sh @@ -0,0 +1,33 @@ +#! /bin/bash +REPOSITORY_ID="starwars" +GRAPHDB_URI="http://localhost:7200/" + +echo -e "\nUsing GraphDB: ${GRAPHDB_URI}" + +function startGraphDB { + echo -e "\nStarting GraphDB..." + exec /opt/graphdb/dist/bin/graphdb +} + +function waitGraphDBStart { + echo -e "\nWaiting GraphDB to start..." + for _ in $(seq 1 5); do + CHECK_RES=$(curl --silent --write-out '%{http_code}' --output /dev/null ${GRAPHDB_URI}/rest/repositories) + if [ "${CHECK_RES}" = '200' ]; then + echo -e "\nUp and running" + break + fi + sleep 30s + echo "CHECK_RES: ${CHECK_RES}" + done +} + +function loadData { + echo -e "\nImporting starwars-data.trig" + curl -X POST -H "Content-Type: application/x-trig" -T /starwars-data.trig ${GRAPHDB_URI}/repositories/${REPOSITORY_ID}/statements +} + +startGraphDB & +waitGraphDBStart +loadData +wait diff --git a/libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/start.sh b/libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/start.sh new file mode 100755 index 00000000000..fe3856ad338 --- /dev/null +++ b/libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/start.sh @@ -0,0 +1,5 @@ +set -ex + +docker compose down -v --remove-orphans +docker build --tag graphdb . +docker compose up -d graphdb diff --git a/libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/starwars-data.trig b/libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/starwars-data.trig new file mode 100644 index 00000000000..7ddc022a146 --- /dev/null +++ b/libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb/starwars-data.trig @@ -0,0 +1,160 @@ +@base . +@prefix voc: . +@prefix owl: . +@prefix rdfs: . + +{ + + + a voc:Character , voc:Human ; + rdfs:label "Anakin Skywalker", "Darth Vader" ; + voc:birthYear "41.9BBY" ; + voc:eyeColor "blue" ; + voc:gender "male" ; + voc:hairColor "blond" ; + voc:height 188.0 ; + voc:homeworld ; + voc:mass 84.0 ; + voc:skinColor "fair" ; + voc:cybernetics "Cybernetic right arm" . + + + a voc:Character , voc:Human ; + rdfs:label "Luke Skywalker" ; + voc:birthYear "19BBY" ; + voc:eyeColor "blue" ; + voc:gender "male" ; + voc:hairColor "blond" ; + voc:height 172.0 ; + voc:homeworld ; + voc:mass 77.0 ; + voc:skinColor "fair" . + + + a voc:Character , voc:Human ; + rdfs:label "Padmé Amidala" ; + voc:birthYear "46BBY" ; + voc:eyeColor "brown" ; + voc:gender "female" ; + voc:hairColor "brown" ; + voc:height 165.0 ; + voc:homeworld ; + voc:mass 45.0 ; + voc:skinColor "light" . + + + a voc:Planet ; + rdfs:label "Tatooine" ; + voc:climate "arid" ; + voc:diameter 10465 ; + voc:gravity "1 standard" ; + voc:orbitalPeriod 304 ; + voc:population 200000 ; + voc:resident , ; + voc:rotationPeriod 23 ; + voc:surfaceWater 1 ; + voc:terrain "desert" . + + + a voc:Planet ; + rdfs:label "Naboo" ; + voc:climate "temperate" ; + voc:diameter 12120 ; + voc:gravity "1 standard" ; + voc:orbitalPeriod 312 ; + voc:population 4500000000 ; + voc:resident ; + voc:rotationPeriod 26 ; + voc:surfaceWater 12 ; + voc:terrain "grassy hills, swamps, forests, mountains" . + + + a voc:Planet ; + rdfs:label "Kashyyyk" ; + voc:climate "tropical" ; + voc:diameter 12765 ; + voc:gravity "1 standard" ; + voc:orbitalPeriod 381 ; + voc:population 45000000 ; + voc:resident , ; + voc:rotationPeriod 26 ; + voc:surfaceWater 60 ; + voc:terrain "jungle, forests, lakes, rivers" . + + + a voc:Character , voc:Wookiee ; + rdfs:label "Chewbacca" ; + voc:birthYear "200BBY" ; + voc:eyeColor "blue" ; + voc:gender "male" ; + voc:hairColor "brown" ; + voc:height 228.0 ; + voc:homeworld ; + voc:mass 112.0 . + + + a voc:Character , voc:Wookiee ; + rdfs:label "Tarfful" ; + voc:eyeColor "blue" ; + voc:gender "male" ; + voc:hairColor "brown" ; + voc:height 234.0 ; + voc:homeworld ; + voc:mass 136.0 ; + voc:skinColor "brown" . +} + + { + + voc:Character a owl:Class . + voc:Species a owl:Class . + + voc:Human a voc:Species; + rdfs:label "Human"; + voc:averageHeight 180.0; + voc:averageLifespan "120"; + voc:character , , + ; + voc:language "Galactic Basic"; + voc:skinColor "black", "caucasian", "asian", "hispanic"; + voc:eyeColor "blue", "brown", "hazel", "green", "grey", "amber"; + voc:hairColor "brown", "red", "black", "blonde" . + + voc:Planet a owl:Class . + + voc:Wookiee a voc:Species; + rdfs:label "Wookiee"; + voc:averageHeight 210.0; + voc:averageLifespan "400"; + voc:character , ; + voc:language "Shyriiwook"; + voc:planet ; + voc:skinColor "gray"; + voc:eyeColor "blue", "yellow", "brown", "red", "green", "golden"; + voc:hairColor "brown", "black" . + + voc:birthYear a owl:DatatypeProperty . + voc:eyeColor a owl:DatatypeProperty . + voc:gender a owl:DatatypeProperty . + voc:hairColor a owl:DatatypeProperty . + voc:height a owl:DatatypeProperty . + voc:homeworld a owl:ObjectProperty . + voc:mass a owl:DatatypeProperty . + voc:skinColor a owl:DatatypeProperty . + voc:cybernetics a owl:DatatypeProperty . + voc:climate a owl:DatatypeProperty . + voc:diameter a owl:DatatypeProperty . + voc:gravity a owl:DatatypeProperty . + voc:orbitalPeriod a owl:DatatypeProperty . + voc:population a owl:DatatypeProperty . + voc:resident a owl:ObjectProperty . + voc:rotationPeriod a owl:DatatypeProperty . + voc:surfaceWater a owl:DatatypeProperty . + voc:terrain a owl:DatatypeProperty . + voc:averageHeight a owl:DatatypeProperty . + voc:averageLifespan a owl:DatatypeProperty . + voc:character a owl:ObjectProperty . + voc:language a owl:DatatypeProperty . + voc:planet a owl:ObjectProperty . + +} diff --git a/libs/langchain/tests/integration_tests/chains/test_ontotext_graphdb_qa.py b/libs/langchain/tests/integration_tests/chains/test_ontotext_graphdb_qa.py new file mode 100644 index 00000000000..bab407ec1c3 --- /dev/null +++ b/libs/langchain/tests/integration_tests/chains/test_ontotext_graphdb_qa.py @@ -0,0 +1,323 @@ +from unittest.mock import MagicMock, Mock + +import pytest +from langchain_community.graphs import OntotextGraphDBGraph + +from langchain.chains import LLMChain, OntotextGraphDBQAChain + +""" +cd libs/langchain/tests/integration_tests/chains/docker-compose-ontotext-graphdb +./start.sh +""" + + +@pytest.mark.requires("langchain_openai", "rdflib") +@pytest.mark.parametrize("max_fix_retries", [-2, -1, 0, 1, 2]) +def test_valid_sparql(max_fix_retries: int) -> None: + from langchain_openai import ChatOpenAI + + question = "What is Luke Skywalker's home planet?" + answer = "Tatooine" + + graph = OntotextGraphDBGraph( + query_endpoint="http://localhost:7200/repositories/starwars", + query_ontology="CONSTRUCT {?s ?p ?o} " + "FROM WHERE {?s ?p ?o}", + ) + chain = OntotextGraphDBQAChain.from_llm( + Mock(ChatOpenAI), + graph=graph, + max_fix_retries=max_fix_retries, + ) + chain.sparql_generation_chain = Mock(LLMChain) + chain.sparql_fix_chain = Mock(LLMChain) + chain.qa_chain = Mock(LLMChain) + + chain.sparql_generation_chain.output_key = "text" + chain.sparql_generation_chain.invoke = MagicMock( + return_value={ + "text": "SELECT * {?s ?p ?o} LIMIT 1", + "prompt": question, + "schema": "", + } + ) + chain.sparql_fix_chain.output_key = "text" + chain.sparql_fix_chain.invoke = MagicMock() + chain.qa_chain.output_key = "text" + chain.qa_chain.invoke = MagicMock( + return_value={ + "text": answer, + "prompt": question, + "context": [], + } + ) + + result = chain.invoke({chain.input_key: question}) + + assert chain.sparql_generation_chain.invoke.call_count == 1 + assert chain.sparql_fix_chain.invoke.call_count == 0 + assert chain.qa_chain.invoke.call_count == 1 + assert result == {chain.output_key: answer, chain.input_key: question} + + +@pytest.mark.requires("langchain_openai", "rdflib") +@pytest.mark.parametrize("max_fix_retries", [-2, -1, 0]) +def test_invalid_sparql_non_positive_max_fix_retries( + max_fix_retries: int, +) -> None: + from langchain_openai import ChatOpenAI + + question = "What is Luke Skywalker's home planet?" + + graph = OntotextGraphDBGraph( + query_endpoint="http://localhost:7200/repositories/starwars", + query_ontology="CONSTRUCT {?s ?p ?o} " + "FROM WHERE {?s ?p ?o}", + ) + chain = OntotextGraphDBQAChain.from_llm( + Mock(ChatOpenAI), + graph=graph, + max_fix_retries=max_fix_retries, + ) + chain.sparql_generation_chain = Mock(LLMChain) + chain.sparql_fix_chain = Mock(LLMChain) + chain.qa_chain = Mock(LLMChain) + + chain.sparql_generation_chain.output_key = "text" + chain.sparql_generation_chain.invoke = MagicMock( + return_value={ + "text": "```sparql SELECT * {?s ?p ?o} LIMIT 1```", + "prompt": question, + "schema": "", + } + ) + chain.sparql_fix_chain.output_key = "text" + chain.sparql_fix_chain.invoke = MagicMock() + chain.qa_chain.output_key = "text" + chain.qa_chain.invoke = MagicMock() + + with pytest.raises(ValueError) as e: + chain.invoke({chain.input_key: question}) + + assert str(e.value) == "The generated SPARQL query is invalid." + + assert chain.sparql_generation_chain.invoke.call_count == 1 + assert chain.sparql_fix_chain.invoke.call_count == 0 + assert chain.qa_chain.invoke.call_count == 0 + + +@pytest.mark.requires("langchain_openai", "rdflib") +@pytest.mark.parametrize("max_fix_retries", [1, 2, 3]) +def test_valid_sparql_after_first_retry(max_fix_retries: int) -> None: + from langchain_openai import ChatOpenAI + + question = "What is Luke Skywalker's home planet?" + answer = "Tatooine" + generated_invalid_sparql = "```sparql SELECT * {?s ?p ?o} LIMIT 1```" + + graph = OntotextGraphDBGraph( + query_endpoint="http://localhost:7200/repositories/starwars", + query_ontology="CONSTRUCT {?s ?p ?o} " + "FROM WHERE {?s ?p ?o}", + ) + chain = OntotextGraphDBQAChain.from_llm( + Mock(ChatOpenAI), + graph=graph, + max_fix_retries=max_fix_retries, + ) + chain.sparql_generation_chain = Mock(LLMChain) + chain.sparql_fix_chain = Mock(LLMChain) + chain.qa_chain = Mock(LLMChain) + + chain.sparql_generation_chain.output_key = "text" + chain.sparql_generation_chain.invoke = MagicMock( + return_value={ + "text": generated_invalid_sparql, + "prompt": question, + "schema": "", + } + ) + chain.sparql_fix_chain.output_key = "text" + chain.sparql_fix_chain.invoke = MagicMock( + return_value={ + "text": "SELECT * {?s ?p ?o} LIMIT 1", + "error_message": "pyparsing.exceptions.ParseException: " + "Expected {SelectQuery | ConstructQuery | DescribeQuery | AskQuery}, " + "found '`' (at char 0), (line:1, col:1)", + "generated_sparql": generated_invalid_sparql, + "schema": "", + } + ) + chain.qa_chain.output_key = "text" + chain.qa_chain.invoke = MagicMock( + return_value={ + "text": answer, + "prompt": question, + "context": [], + } + ) + + result = chain.invoke({chain.input_key: question}) + + assert chain.sparql_generation_chain.invoke.call_count == 1 + assert chain.sparql_fix_chain.invoke.call_count == 1 + assert chain.qa_chain.invoke.call_count == 1 + assert result == {chain.output_key: answer, chain.input_key: question} + + +@pytest.mark.requires("langchain_openai", "rdflib") +@pytest.mark.parametrize("max_fix_retries", [1, 2, 3]) +def test_invalid_sparql_after_all_retries(max_fix_retries: int) -> None: + from langchain_openai import ChatOpenAI + + question = "What is Luke Skywalker's home planet?" + generated_invalid_sparql = "```sparql SELECT * {?s ?p ?o} LIMIT 1```" + + graph = OntotextGraphDBGraph( + query_endpoint="http://localhost:7200/repositories/starwars", + query_ontology="CONSTRUCT {?s ?p ?o} " + "FROM WHERE {?s ?p ?o}", + ) + chain = OntotextGraphDBQAChain.from_llm( + Mock(ChatOpenAI), + graph=graph, + max_fix_retries=max_fix_retries, + ) + chain.sparql_generation_chain = Mock(LLMChain) + chain.sparql_fix_chain = Mock(LLMChain) + chain.qa_chain = Mock(LLMChain) + + chain.sparql_generation_chain.output_key = "text" + chain.sparql_generation_chain.invoke = MagicMock( + return_value={ + "text": generated_invalid_sparql, + "prompt": question, + "schema": "", + } + ) + chain.sparql_fix_chain.output_key = "text" + chain.sparql_fix_chain.invoke = MagicMock( + return_value={ + "text": generated_invalid_sparql, + "error_message": "pyparsing.exceptions.ParseException: " + "Expected {SelectQuery | ConstructQuery | DescribeQuery | AskQuery}, " + "found '`' (at char 0), (line:1, col:1)", + "generated_sparql": generated_invalid_sparql, + "schema": "", + } + ) + chain.qa_chain.output_key = "text" + chain.qa_chain.invoke = MagicMock() + + with pytest.raises(ValueError) as e: + chain.invoke({chain.input_key: question}) + + assert str(e.value) == "The generated SPARQL query is invalid." + + assert chain.sparql_generation_chain.invoke.call_count == 1 + assert chain.sparql_fix_chain.invoke.call_count == max_fix_retries + assert chain.qa_chain.invoke.call_count == 0 + + +@pytest.mark.requires("langchain_openai", "rdflib") +@pytest.mark.parametrize( + "max_fix_retries,number_of_invalid_responses", + [(1, 0), (2, 0), (2, 1), (10, 6)], +) +def test_valid_sparql_after_some_retries( + max_fix_retries: int, number_of_invalid_responses: int +) -> None: + from langchain_openai import ChatOpenAI + + question = "What is Luke Skywalker's home planet?" + answer = "Tatooine" + generated_invalid_sparql = "```sparql SELECT * {?s ?p ?o} LIMIT 1```" + generated_valid_sparql_query = "SELECT * {?s ?p ?o} LIMIT 1" + + graph = OntotextGraphDBGraph( + query_endpoint="http://localhost:7200/repositories/starwars", + query_ontology="CONSTRUCT {?s ?p ?o} " + "FROM WHERE {?s ?p ?o}", + ) + chain = OntotextGraphDBQAChain.from_llm( + Mock(ChatOpenAI), + graph=graph, + max_fix_retries=max_fix_retries, + ) + chain.sparql_generation_chain = Mock(LLMChain) + chain.sparql_fix_chain = Mock(LLMChain) + chain.qa_chain = Mock(LLMChain) + + chain.sparql_generation_chain.output_key = "text" + chain.sparql_generation_chain.invoke = MagicMock( + return_value={ + "text": generated_invalid_sparql, + "prompt": question, + "schema": "", + } + ) + chain.sparql_fix_chain.output_key = "text" + chain.sparql_fix_chain.invoke = Mock() + chain.sparql_fix_chain.invoke.side_effect = [ + { + "text": generated_invalid_sparql, + "error_message": "pyparsing.exceptions.ParseException: " + "Expected {SelectQuery | ConstructQuery | DescribeQuery | AskQuery}, " + "found '`' (at char 0), (line:1, col:1)", + "generated_sparql": generated_invalid_sparql, + "schema": "", + } + ] * number_of_invalid_responses + [ + { + "text": generated_valid_sparql_query, + "error_message": "pyparsing.exceptions.ParseException: " + "Expected {SelectQuery | ConstructQuery | DescribeQuery | AskQuery}, " + "found '`' (at char 0), (line:1, col:1)", + "generated_sparql": generated_invalid_sparql, + "schema": "", + } + ] + chain.qa_chain.output_key = "text" + chain.qa_chain.invoke = MagicMock( + return_value={ + "text": answer, + "prompt": question, + "context": [], + } + ) + + result = chain.invoke({chain.input_key: question}) + + assert chain.sparql_generation_chain.invoke.call_count == 1 + assert chain.sparql_fix_chain.invoke.call_count == number_of_invalid_responses + 1 + assert chain.qa_chain.invoke.call_count == 1 + assert result == {chain.output_key: answer, chain.input_key: question} + + +@pytest.mark.requires("langchain_openai", "rdflib") +@pytest.mark.parametrize( + "model_name,question", + [ + ("gpt-3.5-turbo-1106", "What is the average height of the Wookiees?"), + ("gpt-3.5-turbo-1106", "What is the climate on Tatooine?"), + ("gpt-3.5-turbo-1106", "What is Luke Skywalker's home planet?"), + ("gpt-4-1106-preview", "What is the average height of the Wookiees?"), + ("gpt-4-1106-preview", "What is the climate on Tatooine?"), + ("gpt-4-1106-preview", "What is Luke Skywalker's home planet?"), + ], +) +def test_chain(model_name: str, question: str) -> None: + from langchain_openai import ChatOpenAI + + graph = OntotextGraphDBGraph( + query_endpoint="http://localhost:7200/repositories/starwars", + query_ontology="CONSTRUCT {?s ?p ?o} " + "FROM WHERE {?s ?p ?o}", + ) + chain = OntotextGraphDBQAChain.from_llm( + ChatOpenAI(temperature=0, model_name=model_name), graph=graph, verbose=True + ) + try: + chain.invoke({chain.input_key: question}) + except ValueError: + pass diff --git a/libs/langchain/tests/unit_tests/chains/test_imports.py b/libs/langchain/tests/unit_tests/chains/test_imports.py index 0ac3cf19ef4..8317dd62ea9 100644 --- a/libs/langchain/tests/unit_tests/chains/test_imports.py +++ b/libs/langchain/tests/unit_tests/chains/test_imports.py @@ -14,6 +14,7 @@ EXPECTED_ALL = [ "GraphCypherQAChain", "GraphQAChain", "GraphSparqlQAChain", + "OntotextGraphDBQAChain", "HugeGraphQAChain", "HypotheticalDocumentEmbedder", "KuzuQAChain", diff --git a/libs/langchain/tests/unit_tests/chains/test_ontotext_graphdb_qa.py b/libs/langchain/tests/unit_tests/chains/test_ontotext_graphdb_qa.py new file mode 100644 index 00000000000..46917abdab6 --- /dev/null +++ b/libs/langchain/tests/unit_tests/chains/test_ontotext_graphdb_qa.py @@ -0,0 +1,2 @@ +def test_import() -> None: + from langchain.chains import OntotextGraphDBQAChain # noqa: F401 diff --git a/pyproject.toml b/pyproject.toml index 1220209cb7f..c16f623a37a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,7 +53,7 @@ langchain-openai = { path = "libs/partners/openai", develop = true } [tool.poetry.group.typing.dependencies] [tool.codespell] -skip = '.git,*.pdf,*.svg,*.pdf,*.yaml,*.ipynb,poetry.lock,*.min.js,*.css,package-lock.json,example_data,_dist,examples,templates' +skip = '.git,*.pdf,*.svg,*.pdf,*.yaml,*.ipynb,poetry.lock,*.min.js,*.css,package-lock.json,example_data,_dist,examples,templates,*.trig' # Ignore latin etc ignore-regex = '.*(Stati Uniti|Tense=Pres).*' # whats is a typo but used frequently in queries so kept as is