mirror of
https://github.com/hwchase17/langchain.git
synced 2025-05-28 10:39:23 +00:00
Document RunnableWithFallbacks (#13088)
Add documentation to RunnableWithFallbacks
This commit is contained in:
parent
8313c218da
commit
869df62736
@ -34,13 +34,61 @@ if TYPE_CHECKING:
|
||||
|
||||
|
||||
class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
|
||||
"""
|
||||
A Runnable that can fallback to other Runnables if it fails.
|
||||
"""A Runnable that can fallback to other Runnables if it fails.
|
||||
|
||||
External APIs (e.g., APIs for a language model) may at times experience
|
||||
degraded performance or even downtime.
|
||||
|
||||
In these cases, it can be useful to have a fallback runnable that can be
|
||||
used in place of the original runnable (e.g., fallback to another LLM provider).
|
||||
|
||||
Fallbacks can be defined at the level of a single runnable, or at the level
|
||||
of a chain of runnables. Fallbacks are tried in order until one succeeds or
|
||||
all fail.
|
||||
|
||||
While you can instantiate a ``RunnableWithFallbacks`` directly, it is usually
|
||||
more convenient to use the ``with_fallbacks`` method on a runnable.
|
||||
|
||||
Example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from langchain.chat_models.openai import ChatOpenAI
|
||||
from langchain.chat_models.anthropic import ChatAnthropic
|
||||
|
||||
model = ChatAnthropic().with_fallbacks([ChatOpenAI()])
|
||||
# Will usually use ChatAnthropic, but fallback to ChatOpenAI
|
||||
# if ChatAnthropic fails.
|
||||
model.invoke('hello')
|
||||
|
||||
# And you can also use fallbacks at the level of a chain.
|
||||
# Here if both LLM providers fail, we'll fallback to a good hardcoded
|
||||
# response.
|
||||
|
||||
from langchain.prompts import PromptTemplate
|
||||
from langchain.schema.output_parser import StrOutputParser
|
||||
from langchain.schema.runnable import RunnableLambda
|
||||
|
||||
def when_all_is_lost(inputs):
|
||||
return ("Looks like our LLM providers are down. "
|
||||
"Here's a nice 🦜️ emoji for you instead.")
|
||||
|
||||
chain_with_fallback = (
|
||||
PromptTemplate.from_template('Tell me a joke about {topic}')
|
||||
| model
|
||||
| StrOutputParser()
|
||||
).with_fallbacks([RunnableLambda(when_all_is_lost)])
|
||||
"""
|
||||
|
||||
runnable: Runnable[Input, Output]
|
||||
"""The runnable to run first."""
|
||||
fallbacks: Sequence[Runnable[Input, Output]]
|
||||
"""A sequence of fallbacks to try."""
|
||||
exceptions_to_handle: Tuple[Type[BaseException], ...] = (Exception,)
|
||||
"""The exceptions on which fallbacks should be tried.
|
||||
|
||||
Any exception that is not a subclass of these exceptions will be raised immediately.
|
||||
"""
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
|
Loading…
Reference in New Issue
Block a user