docs: document Anthropic cache TTL count details (#31708)

This commit is contained in:
ccurme 2025-06-23 16:16:42 -04:00 committed by GitHub
parent e6191d58e7
commit ee83993b91
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 68 additions and 0 deletions

View File

@ -568,6 +568,26 @@
" ```\n",
" and specifying `\"cache_control\": {\"type\": \"ephemeral\", \"ttl\": \"1h\"}`.\n",
"\n",
" Details of cached token counts will be included on the `InputTokenDetails` of response's `usage_metadata`:\n",
"\n",
" ```python\n",
" response = llm.invoke(messages)\n",
" response.usage_metadata\n",
" ```\n",
" ```\n",
" {\n",
" \"input_tokens\": 1500,\n",
" \"output_tokens\": 200,\n",
" \"total_tokens\": 1700,\n",
" \"input_token_details\": {\n",
" \"cache_read\": 0,\n",
" \"cache_creation\": 1000,\n",
" \"ephemeral_1h_input_tokens\": 750,\n",
" \"ephemeral_5m_input_tokens\": 250,\n",
" }\n",
" }\n",
" ```\n",
"\n",
":::"
]
},

View File

@ -55,6 +55,8 @@ class InputTokenDetails(TypedDict, total=False):
}
.. versionadded:: 0.3.9
May also hold extra provider-specific keys.
"""
audio: int

View File

@ -702,6 +702,8 @@
}
.. versionadded:: 0.3.9
May also hold extra provider-specific keys.
''',
'properties': dict({
'audio': dict({
@ -2132,6 +2134,8 @@
}
.. versionadded:: 0.3.9
May also hold extra provider-specific keys.
''',
'properties': dict({
'audio': dict({

View File

@ -1105,6 +1105,8 @@
}
.. versionadded:: 0.3.9
May also hold extra provider-specific keys.
''',
'properties': dict({
'audio': dict({

View File

@ -2650,6 +2650,8 @@
}
.. versionadded:: 0.3.9
May also hold extra provider-specific keys.
''',
'properties': dict({
'audio': dict({
@ -4124,6 +4126,8 @@
}
.. versionadded:: 0.3.9
May also hold extra provider-specific keys.
''',
'properties': dict({
'audio': dict({
@ -5629,6 +5633,8 @@
}
.. versionadded:: 0.3.9
May also hold extra provider-specific keys.
''',
'properties': dict({
'audio': dict({
@ -7009,6 +7015,8 @@
}
.. versionadded:: 0.3.9
May also hold extra provider-specific keys.
''',
'properties': dict({
'audio': dict({
@ -8525,6 +8533,8 @@
}
.. versionadded:: 0.3.9
May also hold extra provider-specific keys.
''',
'properties': dict({
'audio': dict({
@ -9950,6 +9960,8 @@
}
.. versionadded:: 0.3.9
May also hold extra provider-specific keys.
''',
'properties': dict({
'audio': dict({
@ -11374,6 +11386,8 @@
}
.. versionadded:: 0.3.9
May also hold extra provider-specific keys.
''',
'properties': dict({
'audio': dict({
@ -12840,6 +12854,8 @@
}
.. versionadded:: 0.3.9
May also hold extra provider-specific keys.
''',
'properties': dict({
'audio': dict({

View File

@ -955,6 +955,8 @@ class ChatAnthropic(BaseChatModel):
.. dropdown:: Extended caching
.. versionadded:: 0.3.15
The cache lifetime is 5 minutes by default. If this is too short, you can
apply one hour caching by enabling the ``"extended-cache-ttl-2025-04-11"``
beta header:
@ -968,6 +970,28 @@ class ChatAnthropic(BaseChatModel):
and specifying ``"cache_control": {"type": "ephemeral", "ttl": "1h"}``.
Details of cached token counts will be included on the ``InputTokenDetails``
of response's ``usage_metadata``:
.. code-block:: python
response = llm.invoke(messages)
response.usage_metadata
.. code-block:: python
{
"input_tokens": 1500,
"output_tokens": 200,
"total_tokens": 1700,
"input_token_details": {
"cache_read": 0,
"cache_creation": 1000,
"ephemeral_1h_input_tokens": 750,
"ephemeral_5m_input_tokens": 250,
}
}
See `Claude documentation <https://docs.anthropic.com/en/docs/build-with-claude/prompt-caching#1-hour-cache-duration-beta>`_
for detail.