diff --git a/libs/partners/anthropic/langchain_anthropic/chat_models.py b/libs/partners/anthropic/langchain_anthropic/chat_models.py index e32c1279261..28e459993eb 100644 --- a/libs/partners/anthropic/langchain_anthropic/chat_models.py +++ b/libs/partners/anthropic/langchain_anthropic/chat_models.py @@ -995,6 +995,41 @@ class ChatAnthropic(BaseChatModel): See `Claude documentation `__ for detail. + Extended context windows (beta): + Claude Sonnet 4 supports a 1-million token context window, available in beta for + organizations in usage tier 4 and organizations with custom rate limits. + + .. code-block:: python + + from langchain_anthropic import ChatAnthropic + + llm = ChatAnthropic( + model="claude-sonnet-4-20250514", + betas=["context-1m-2025-08-07"], # Enable 1M context beta + ) + + long_document = \"\"\" + This is a very long document that would benefit from the extended 1M + context window... + [imagine this continues for hundreds of thousands of tokens] + \"\"\" + + messages = [ + HumanMessage(f\"\"\" + Please analyze this document and provide a summary: + + {long_document} + + What are the key themes and main conclusions? + \"\"\") + ] + + response = llm.invoke(messages) + + See `Claude documentation `__ + for detail. + + Token-efficient tool use (beta): See LangChain `docs `__ for more detail.