mv popular and additional chains to use cases (#8242)

This commit is contained in:
Bagatur
2023-07-27 12:55:13 -07:00
committed by GitHub
parent ff98fad2d9
commit 68763bd25f
78 changed files with 959 additions and 1014 deletions

View File

@@ -0,0 +1,6 @@
# Preventing harmful outputs
One of the key concerns with using LLMs is that they may generate harmful or unethical text. This is an area of active research in the field. Here we present some built-in chains inspired by this research, which are intended to make the outputs of LLMs safer.
- [Moderation chain](/docs/use_cases/safety/moderation): Explicitly check if any output text is harmful and flag it.
- [Constitutional chain](/docs/use_cases/safety/constitutional_chain): Prompt the model with a set of principles which should guide it's behavior.

View File

@@ -1,8 +0,0 @@
---
sidebar_position: 4
---
# Additional
import DocCardList from "@theme/DocCardList";
<DocCardList />

View File

@@ -1,7 +0,0 @@
# Dynamically selecting from multiple prompts
This notebook demonstrates how to use the `RouterChain` paradigm to create a chain that dynamically selects the prompt to use for a given input. Specifically we show how to use the `MultiPromptChain` to create a question-answering chain that selects the prompt which is most relevant for a given question, and then answers the question using that prompt.
import Example from "@snippets/modules/chains/additional/multi_prompt_router.mdx"
<Example/>

View File

@@ -1,8 +0,0 @@
---
sidebar_position: 3
---
# Popular
import DocCardList from "@theme/DocCardList";
<DocCardList />

View File

@@ -0,0 +1 @@
label: 'How to'

View File

@@ -2,7 +2,7 @@
sidebar_position: 2
---
# Conversational Retrieval QA
# Store and reference chat history
The ConversationalRetrievalQA chain builds on RetrievalQAChain to provide a chat history component.
It first combines the chat history (either explicitly passed in or retrieved from the provided memory) and the question into a standalone question, then looks up relevant documents from the retriever, and finally passes those documents and the question to a question answering chain to return a response.

View File

@@ -1,4 +1,4 @@
# Dynamically selecting from multiple retrievers
# Dynamically select from multiple retrievers
This notebook demonstrates how to use the `RouterChain` paradigm to create a chain that dynamically selects which Retrieval system to use. Specifically we show how to use the `MultiRetrievalQAChain` to create a question-answering chain that selects the retrieval QA chain which is most relevant for a given question, and then answers the question using it.

View File

@@ -1,4 +1,4 @@
# Document QA
# QA over in-memory documents
Here we walk through how to use LangChain for question answering over a list of documents. Under the hood we'll be using our [Document chains](/docs/modules/chains/document/).

View File

@@ -1,7 +1,7 @@
---
sidebar_position: 1
---
# Retrieval QA
# QA using a Retriever
This example showcases question answering over an index.

View File

@@ -1610,59 +1610,59 @@
},
{
"source": "/en/latest/modules/chains/examples/flare.html",
"destination": "/docs/modules/chains/additional/flare"
"destination": "/docs/use_cases/question_answering/how_to/flare"
},
{
"source": "/en/latest/modules/chains/examples/graph_cypher_qa.html",
"destination": "/docs/modules/chains/additional/graph_cypher_qa"
"destination": "/docs/use_cases/graph/graph_cypher_qa"
},
{
"source": "/en/latest/modules/chains/examples/graph_nebula_qa.html",
"destination": "/docs/modules/chains/additional/graph_nebula_qa"
"destination": "/docs/use_cases/graph/graph_nebula_qa"
},
{
"source": "/en/latest/modules/chains/index_examples/graph_qa.html",
"destination": "/docs/modules/chains/additional/graph_qa"
"destination": "/docs/use_cases/graph/graph_qa"
},
{
"source": "/en/latest/modules/chains/index_examples/hyde.html",
"destination": "/docs/modules/chains/additional/hyde"
"destination": "/docs/use_cases/question_answering/how_to/hyde"
},
{
"source": "/en/latest/modules/chains/examples/llm_bash.html",
"destination": "/docs/modules/chains/additional/llm_bash"
"destination": "/docs/use_cases/code_writing/llm_bash"
},
{
"source": "/en/latest/modules/chains/examples/llm_checker.html",
"destination": "/docs/modules/chains/additional/llm_checker"
"destination": "/docs/use_cases/self_check/llm_checker"
},
{
"source": "/en/latest/modules/chains/examples/llm_math.html",
"destination": "/docs/modules/chains/additional/llm_math"
"destination": "/docs/use_cases/code_writing/llm_math"
},
{
"source": "/en/latest/modules/chains/examples/llm_requests.html",
"destination": "/docs/modules/chains/additional/llm_requests"
"destination": "/docs/use_cases/apis/llm_requests"
},
{
"source": "/en/latest/modules/chains/examples/llm_summarization_checker.html",
"destination": "/docs/modules/chains/additional/llm_summarization_checker"
"destination": "/docs/use_cases/self_check/llm_summarization_checker"
},
{
"source": "/en/latest/modules/chains/examples/openapi.html",
"destination": "/docs/modules/chains/additional/openapi"
"destination": "/docs/use_cases/apis/openapi"
},
{
"source": "/en/latest/modules/chains/examples/pal.html",
"destination": "/docs/modules/chains/additional/pal"
"destination": "/docs/use_cases/code_writing/pal"
},
{
"source": "/en/latest/modules/chains/examples/tagging.html",
"destination": "/docs/modules/chains/additional/tagging"
"destination": "/docs/use_cases/tagging"
},
{
"source": "/en/latest/modules/chains/index_examples/vector_db_text_generation.html",
"destination": "/docs/modules/chains/additional/vector_db_text_generation"
"destination": "/docs/use_cases/question_answering/how_to/vector_db_text_generation"
},
{
"source": "/en/latest/modules/chains/generic/router.html",
@@ -3771,6 +3771,170 @@
{
"source": "/en/latest/:path*",
"destination": "/docs/:path*"
},
{
"source": "/docs/modules/chains/additional/constitutional_chain",
"destination": "/docs/guides/safety/constitutional_chain"
},
{
"source": "/docs/modules/chains/additional/moderation",
"destination": "/docs/guides/safety/moderation"
},
{
"source": "/docs/modules/chains/popular/api",
"destination": "/docs/use_cases/apis/api"
},
{
"source": "/docs/modules/chains/additional/analyze_document",
"destination": "/docs/use_cases/question_answering/how_to/analyze_document"
},
{
"source": "/docs/modules/chains/popular/chat_vector_db",
"destination": "/docs/use_cases/question_answering/how_to/chat_vector_db"
},
{
"source": "/docs/modules/chains/additional/multi_retrieval_qa_router",
"destination": "/docs/use_cases/question_answering/how_to/multi_retrieval_qa_router"
},
{
"source": "/docs/modules/chains/additional/question_answering",
"destination": "/docs/use_cases/question_answering/how_to/question_answering"
},
{
"source": "/docs/modules/chains/popular/vector_db_qa",
"destination": "/docs/use_cases/question_answering/how_to/vector_db_qa"
},
{
"source": "/docs/modules/chains/popular/summarize",
"destination": "/docs/use_cases/summarization/summarize"
},
{
"source": "/docs/modules/chains/popular/sqlite",
"destination": "/docs/use_cases/tabular/sqlite"
},
{
"source": "/docs/modules/chains/popular/openai_functions",
"destination": "/docs/modules/chains/how_to/openai_functions"
},
{
"source": "/docs/modules/chains/additional/llm_requests",
"destination": "/docs/use_cases/apis/llm_requests"
},
{
"source": "/docs/modules/chains/additional/openai_openapi",
"destination": "/docs/use_cases/apis/openai_openapi"
},
{
"source": "/docs/modules/chains/additional/openapi",
"destination": "/docs/use_cases/apis/openapi"
},
{
"source": "/docs/modules/chains/additional/openapi_openai",
"destination": "/docs/use_cases/apis/openapi_openai"
},
{
"source": "/docs/modules/chains/additional/cpal",
"destination": "/docs/use_cases/code_writing/cpal"
},
{
"source": "/docs/modules/chains/additional/llm_bash",
"destination": "/docs/use_cases/code_writing/llm_bash"
},
{
"source": "/docs/modules/chains/additional/llm_math",
"destination": "/docs/use_cases/code_writing/llm_math"
},
{
"source": "/docs/modules/chains/additional/llm_symbolic_math",
"destination": "/docs/use_cases/code_writing/llm_symbolic_math"
},
{
"source": "/docs/modules/chains/additional/pal",
"destination": "/docs/use_cases/code_writing/pal"
},
{
"source": "/docs/modules/chains/additional/graph_arangodb_qa",
"destination": "/docs/use_cases/graph/graph_arangodb_qa"
},
{
"source": "/docs/modules/chains/additional/graph_cypher_qa",
"destination": "/docs/use_cases/graph/graph_cypher_qa"
},
{
"source": "/docs/modules/chains/additional/graph_hugegraph_qa",
"destination": "/docs/use_cases/graph/graph_hugegraph_qa"
},
{
"source": "/docs/modules/chains/additional/graph_kuzu_qa",
"destination": "/docs/use_cases/graph/graph_kuzu_qa"
},
{
"source": "/docs/modules/chains/additional/graph_nebula_qa",
"destination": "/docs/use_cases/graph/graph_nebula_qa"
},
{
"source": "/docs/modules/chains/additional/graph_qa",
"destination": "/docs/use_cases/graph/graph_qa"
},
{
"source": "/docs/modules/chains/additional/graph_sparql_qa",
"destination": "/docs/use_cases/graph/graph_sparql_qa"
},
{
"source": "/docs/modules/chains/additional/neptune_cypher_qa",
"destination": "/docs/use_cases/graph/neptune_cypher_qa"
},
{
"source": "/docs/modules/chains/additional/tot",
"destination": "/docs/use_cases/graph/tot"
},
{
"source": "/docs/use_cases/question_answering//document-context-aware-QA",
"destination": "/docs/use_cases/question_answering/how_to/document-context-aware-QA"
},
{
"source": "/docs/modules/chains/additional/flare",
"destination": "/docs/use_cases/question_answering/how_to/flare"
},
{
"source": "/docs/modules/chains/additional/hyde",
"destination": "/docs/use_cases/question_answering/how_to/hyde"
},
{
"source": "/docs/use_cases/question_answering//local_retrieval_qa",
"destination": "/docs/use_cases/question_answering/how_to/local_retrieval_qa"
},
{
"source": "/docs/modules/chains/additional/qa_citations",
"destination": "/docs/use_cases/question_answering/how_to/qa_citations"
},
{
"source": "/docs/modules/chains/additional/vector_db_text_generation",
"destination": "/docs/use_cases/question_answering/how_to/vector_db_text_generation"
},
{
"source": "/docs/modules/chains/additional/openai_functions_retrieval_qa",
"destination": "/docs/use_cases/question_answering/integrations/openai_functions_retrieval_qa"
},
{
"source": "/docs/use_cases/question_answering//semantic-search-over-chat",
"destination": "/docs/use_cases/question_answering/integrations/semantic-search-over-chat"
},
{
"source": "/docs/modules/chains/additional/llm_checker",
"destination": "/docs/use_cases/self_check/llm_checker"
},
{
"source": "/docs/modules/chains/additional/llm_summarization_checker",
"destination": "/docs/use_cases/self_check/llm_summarization_checker"
},
{
"source": "/docs/modules/chains/additional/elasticsearch_database",
"destination": "/docs/use_cases/tabular/elasticsearch_database"
},
{
"source": "/docs/modules/chains/additional/tagging",
"destination": "/docs/use_cases/tagging"
}
]
}