mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-19 13:23:35 +00:00
core: Add docs for RunnableConfigurableFields (#19849)
- [x] **docs**: core: Add docs for `RunnableConfigurableFields` - **Description:** Added incode docs for `RunnableConfigurableFields` with example - **Issue:** #18803 - **Dependencies:** NA - **Twitter handle:** NA --------- Co-authored-by: Chester Curme <chester.curme@gmail.com>
This commit is contained in:
parent
c28efb878c
commit
d5c412b0a9
@ -220,7 +220,65 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
|
||||
|
||||
|
||||
class RunnableConfigurableFields(DynamicRunnable[Input, Output]):
|
||||
"""Runnable that can be dynamically configured."""
|
||||
"""Runnable that can be dynamically configured.
|
||||
|
||||
A RunnableConfigurableFields should be initiated using the
|
||||
`configurable_fields` method of a Runnable.
|
||||
|
||||
Here is an example of using a RunnableConfigurableFields with LLMs:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from langchain_core.prompts import PromptTemplate
|
||||
from langchain_core.runnables import ConfigurableField
|
||||
from langchain_openai import ChatOpenAI
|
||||
|
||||
model = ChatOpenAI(temperature=0).configurable_fields(
|
||||
temperature=ConfigurableField(
|
||||
id="temperature",
|
||||
name="LLM Temperature",
|
||||
description="The temperature of the LLM",
|
||||
)
|
||||
)
|
||||
# This creates a RunnableConfigurableFields for a chat model.
|
||||
|
||||
# When invoking the created RunnableSequence, you can pass in the
|
||||
# value for your ConfigurableField's id which in this case
|
||||
# will be change in temperature
|
||||
|
||||
prompt = PromptTemplate.from_template("Pick a random number above {x}")
|
||||
chain = prompt | model
|
||||
|
||||
chain.invoke({"x": 0})
|
||||
chain.invoke({"x": 0}, config={"configurable": {"temperature": 0.9}})
|
||||
|
||||
|
||||
Here is an example of using a RunnableConfigurableFields with HubRunnables:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from langchain_core.prompts import PromptTemplate
|
||||
from langchain_core.runnables import ConfigurableField
|
||||
from langchain_openai import ChatOpenAI
|
||||
from langchain.runnables.hub import HubRunnable
|
||||
|
||||
prompt = HubRunnable("rlm/rag-prompt").configurable_fields(
|
||||
owner_repo_commit=ConfigurableField(
|
||||
id="hub_commit",
|
||||
name="Hub Commit",
|
||||
description="The Hub commit to pull from",
|
||||
)
|
||||
)
|
||||
|
||||
prompt.invoke({"question": "foo", "context": "bar"})
|
||||
|
||||
# Invoking prompt with `with_config` method
|
||||
|
||||
prompt.invoke(
|
||||
{"question": "foo", "context": "bar"},
|
||||
config={"configurable": {"hub_commit": "rlm/rag-prompt-llama"}},
|
||||
)
|
||||
"""
|
||||
|
||||
fields: Dict[str, AnyConfigurableField]
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user