mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-25 16:13:25 +00:00
core[patch]: enable RunnableWithMessageHistory without config (#23775)
Feedback that `RunnableWithMessageHistory` is unwieldy compared to ConversationChain and similar legacy abstractions is common. Legacy chains using memory typically had no explicit notion of threads or separate sessions. To use `RunnableWithMessageHistory`, users are forced to introduce this concept into their code. This possibly felt like unnecessary boilerplate. Here we enable `RunnableWithMessageHistory` to run without a config if the `get_session_history` callable has no arguments. This enables minimal implementations like the following: ```python from langchain_core.chat_history import InMemoryChatMessageHistory from langchain_core.runnables.history import RunnableWithMessageHistory from langchain_openai import ChatOpenAI llm = ChatOpenAI(model="gpt-3.5-turbo-0125") memory = InMemoryChatMessageHistory() chain = RunnableWithMessageHistory(llm, lambda: memory) chain.invoke("Hi I'm Bob") # Hello Bob! chain.invoke("What is my name?") # Your name is Bob. ```
This commit is contained in:
parent
5ade0187d0
commit
0f7569ddbc
@ -537,8 +537,9 @@ class RunnableWithMessageHistory(RunnableBindingBase):
|
||||
configurable = config.get("configurable", {})
|
||||
|
||||
missing_keys = set(expected_keys) - set(configurable.keys())
|
||||
parameter_names = _get_parameter_names(self.get_session_history)
|
||||
|
||||
if missing_keys:
|
||||
if missing_keys and parameter_names:
|
||||
example_input = {self.input_messages_key: "foo"}
|
||||
example_configurable = {
|
||||
missing_key: "[your-value-here]" for missing_key in missing_keys
|
||||
@ -551,11 +552,16 @@ class RunnableWithMessageHistory(RunnableBindingBase):
|
||||
f"e.g., chain.invoke({example_input}, {example_config})"
|
||||
)
|
||||
|
||||
parameter_names = _get_parameter_names(self.get_session_history)
|
||||
|
||||
if len(expected_keys) == 1:
|
||||
# If arity = 1, then invoke function by positional arguments
|
||||
message_history = self.get_session_history(configurable[expected_keys[0]])
|
||||
if parameter_names:
|
||||
# If arity = 1, then invoke function by positional arguments
|
||||
message_history = self.get_session_history(
|
||||
configurable[expected_keys[0]]
|
||||
)
|
||||
else:
|
||||
if not config:
|
||||
config["configurable"] = {}
|
||||
message_history = self.get_session_history()
|
||||
else:
|
||||
# otherwise verify that names of keys patch and invoke by named arguments
|
||||
if set(expected_keys) != set(parameter_names):
|
||||
|
@ -694,3 +694,24 @@ async def test_using_custom_config_specs_async() -> None:
|
||||
]
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def test_ignore_session_id() -> None:
|
||||
"""Test without config."""
|
||||
|
||||
def _fake_llm(input: List[BaseMessage]) -> List[BaseMessage]:
|
||||
return [
|
||||
AIMessage(
|
||||
content="you said: "
|
||||
+ "\n".join(
|
||||
str(m.content) for m in input if isinstance(m, HumanMessage)
|
||||
)
|
||||
)
|
||||
]
|
||||
|
||||
runnable = RunnableLambda(_fake_llm)
|
||||
history = InMemoryChatMessageHistory()
|
||||
with_message_history = RunnableWithMessageHistory(runnable, lambda: history) # type: ignore
|
||||
_ = with_message_history.invoke("hello")
|
||||
_ = with_message_history.invoke("hello again")
|
||||
assert len(history.messages) == 4
|
||||
|
Loading…
Reference in New Issue
Block a user