diff --git a/langchain/memory/summary_buffer.py b/langchain/memory/summary_buffer.py index ac0d9287345..5e4c5b93ec6 100644 --- a/langchain/memory/summary_buffer.py +++ b/langchain/memory/summary_buffer.py @@ -57,7 +57,10 @@ class ConversationSummaryBufferMemory(BaseChatMemory, SummarizerMixin): def save_context(self, inputs: Dict[str, Any], outputs: Dict[str, str]) -> None: """Save context from this conversation to buffer.""" super().save_context(inputs, outputs) - # Prune buffer if it exceeds max token limit + self.prune() + + def prune(self) -> None: + """Prune buffer if it exceeds max token limit""" buffer = self.chat_memory.messages curr_buffer_length = self.llm.get_num_tokens_from_messages(buffer) if curr_buffer_length > self.max_token_limit: