core:Add optional max_messages to MessagePlaceholder (#16098)

- **Description:** Add optional max_messages to MessagePlaceholder
- **Issue:**
[16096](https://github.com/langchain-ai/langchain/issues/16096)
- **Dependencies:** None
- **Twitter handle:** @davedecaprio

Sometimes it's better to limit the history in the prompt itself rather
than the memory. This is needed if you want different prompts in the
chain to have different history lengths.

---------

Co-authored-by: Harrison Chase <hw.chase.17@gmail.com>
This commit is contained in:
David DeCaprio
2024-06-19 18:39:51 -05:00
committed by GitHub
parent 7193634ae6
commit a4bcb45f65
2 changed files with 42 additions and 2 deletions

View File

@@ -655,6 +655,21 @@ def test_messages_placeholder() -> None:
]
def test_messages_placeholder_with_max() -> None:
history = [
AIMessage(content="1"),
AIMessage(content="2"),
AIMessage(content="3"),
]
prompt = MessagesPlaceholder("history")
assert prompt.format_messages(history=history) == history
prompt = MessagesPlaceholder("history", n_messages=2)
assert prompt.format_messages(history=history) == [
AIMessage(content="2"),
AIMessage(content="3"),
]
def test_chat_prompt_message_placeholder_partial() -> None:
prompt = ChatPromptTemplate.from_messages([MessagesPlaceholder("history")])
prompt = prompt.partial(history=[("system", "foo")])