From d5fd0bca35f615963940ea2d6b13b05a10101d39 Mon Sep 17 00:00:00 2001 From: Mason Daugherty Date: Tue, 12 Aug 2025 15:16:26 -0400 Subject: [PATCH] docs(anthropic): add documentation for extended context windows in Claude Sonnet 4 (#32517) --- .../langchain_anthropic/chat_models.py | 35 +++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/libs/partners/anthropic/langchain_anthropic/chat_models.py b/libs/partners/anthropic/langchain_anthropic/chat_models.py index e32c1279261..28e459993eb 100644 --- a/libs/partners/anthropic/langchain_anthropic/chat_models.py +++ b/libs/partners/anthropic/langchain_anthropic/chat_models.py @@ -995,6 +995,41 @@ class ChatAnthropic(BaseChatModel): See `Claude documentation `__ for detail. + Extended context windows (beta): + Claude Sonnet 4 supports a 1-million token context window, available in beta for + organizations in usage tier 4 and organizations with custom rate limits. + + .. code-block:: python + + from langchain_anthropic import ChatAnthropic + + llm = ChatAnthropic( + model="claude-sonnet-4-20250514", + betas=["context-1m-2025-08-07"], # Enable 1M context beta + ) + + long_document = \"\"\" + This is a very long document that would benefit from the extended 1M + context window... + [imagine this continues for hundreds of thousands of tokens] + \"\"\" + + messages = [ + HumanMessage(f\"\"\" + Please analyze this document and provide a summary: + + {long_document} + + What are the key themes and main conclusions? + \"\"\") + ] + + response = llm.invoke(messages) + + See `Claude documentation `__ + for detail. + + Token-efficient tool use (beta): See LangChain `docs `__ for more detail.