anthropic: support built-in tools, improve docs (#30274)

- Support features from recent update:
https://www.anthropic.com/news/token-saving-updates (mostly adding
support for built-in tools in `bind_tools`
- Add documentation around prompt caching, token-efficient tool use, and
built-in tools.
This commit is contained in:
ccurme
2025-03-14 12:18:50 -04:00
committed by GitHub
parent f27e2d7ce7
commit 226f29bc96
7 changed files with 629 additions and 30 deletions

View File

@@ -1,4 +1,14 @@
from langchain_anthropic.chat_models import ChatAnthropic, ChatAnthropicMessages
from langchain_anthropic.chat_models import (
ChatAnthropic,
ChatAnthropicMessages,
convert_to_anthropic_tool,
)
from langchain_anthropic.llms import Anthropic, AnthropicLLM
__all__ = ["ChatAnthropicMessages", "ChatAnthropic", "Anthropic", "AnthropicLLM"]
__all__ = [
"ChatAnthropicMessages",
"ChatAnthropic",
"convert_to_anthropic_tool",
"Anthropic",
"AnthropicLLM",
]

View File

@@ -93,6 +93,22 @@ class AnthropicTool(TypedDict):
cache_control: NotRequired[Dict[str, str]]
def _is_builtin_tool(tool: Any) -> bool:
if not isinstance(tool, dict):
return False
tool_type = tool.get("type")
if not tool_type or not isinstance(tool_type, str):
return False
_builtin_tool_prefixes = [
"text_editor_",
"computer_",
"bash_",
]
return any(tool_type.startswith(prefix) for prefix in _builtin_tool_prefixes)
def _format_image(image_url: str) -> Dict:
"""
Formats an image of format data:image/jpeg;base64,{b64_string}
@@ -669,6 +685,109 @@ class ChatAnthropic(BaseChatModel):
These can be disabled by setting ``stream_usage=False`` in the stream method,
or by setting ``stream_usage=False`` when initializing ChatAnthropic.
Prompt caching:
See LangChain `docs <https://python.langchain.com/docs/integrations/chat/anthropic/>`_
for more detail.
.. code-block:: python
from langchain_anthropic import ChatAnthropic
llm = ChatAnthropic(model="claude-3-7-sonnet-20250219")
messages = [
{
"role": "system",
"content": [
{
"type": "text",
"text": "Below is some long context:",
},
{
"type": "text",
"text": f"{long_text}",
"cache_control": {"type": "ephemeral"},
},
],
},
{
"role": "user",
"content": "What's that about?",
},
]
response = llm.invoke(messages)
response.usage_metadata["input_token_details"]
.. code-block:: python
{'cache_read': 0, 'cache_creation': 1458}
Token-efficient tool use (beta):
See LangChain `docs <https://python.langchain.com/docs/integrations/chat/anthropic/>`_
for more detail.
.. code-block:: python
from langchain_anthropic import ChatAnthropic
from langchain_core.tools import tool
llm = ChatAnthropic(
model="claude-3-7-sonnet-20250219",
temperature=0,
model_kwargs={
"extra_headers": {
"anthropic-beta": "token-efficient-tools-2025-02-19"
}
}
)
@tool
def get_weather(location: str) -> str:
\"\"\"Get the weather at a location.\"\"\"
return "It's sunny."
llm_with_tools = llm.bind_tools([get_weather])
response = llm_with_tools.invoke(
"What's the weather in San Francisco?"
)
print(response.tool_calls)
print(f'Total tokens: {response.usage_metadata["total_tokens"]}')
.. code-block:: none
[{'name': 'get_weather', 'args': {'location': 'San Francisco'}, 'id': 'toolu_01HLjQMSb1nWmgevQUtEyz17', 'type': 'tool_call'}]
Total tokens: 408
Built-in tools:
See LangChain `docs <https://python.langchain.com/docs/integrations/chat/anthropic/>`_
for more detail.
.. code-block:: python
from langchain_anthropic import ChatAnthropic
llm = ChatAnthropic(model="claude-3-7-sonnet-20250219")
tool = {"type": "text_editor_20250124", "name": "str_replace_editor"}
llm_with_tools = llm.bind_tools([tool])
response = llm_with_tools.invoke(
"There's a syntax error in my primes.py file. Can you help me fix it?"
)
print(response.text())
response.tool_calls
.. code-block:: none
I'd be happy to help you fix the syntax error in your primes.py file. First, let's look at the current content of the file to identify the error.
[{'name': 'str_replace_editor',
'args': {'command': 'view', 'path': '/repo/primes.py'},
'id': 'toolu_01VdNgt1YV7kGfj9LFLm6HyQ',
'type': 'tool_call'}]
Response metadata
.. code-block:: python
@@ -1156,7 +1275,6 @@ class ChatAnthropic(BaseChatModel):
llm = ChatAnthropic(
model="claude-3-5-sonnet-20240620",
temperature=0,
extra_headers={"anthropic-beta": "prompt-caching-2024-07-31"}
)
llm_with_tools = llm.bind_tools([GetWeather, cached_price_tool])
llm_with_tools.invoke("what is the weather like in San Francisco",)
@@ -1174,7 +1292,10 @@ class ChatAnthropic(BaseChatModel):
AIMessage(content=[{'text': 'To get the current weather in San Francisco, I can use the GetWeather function. Let me check that for you.', 'type': 'text'}, {'id': 'toolu_01HtVtY1qhMFdPprx42qU2eA', 'input': {'location': 'San Francisco, CA'}, 'name': 'GetWeather', 'type': 'tool_use'}], response_metadata={'id': 'msg_016RfWHrRvW6DAGCdwB6Ac64', 'model': 'claude-3-5-sonnet-20240620', 'stop_reason': 'tool_use', 'stop_sequence': None, 'usage': {'input_tokens': 171, 'output_tokens': 82, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 1470}}, id='run-88b1f825-dcb7-4277-ac27-53df55d22001-0', tool_calls=[{'name': 'GetWeather', 'args': {'location': 'San Francisco, CA'}, 'id': 'toolu_01HtVtY1qhMFdPprx42qU2eA', 'type': 'tool_call'}], usage_metadata={'input_tokens': 171, 'output_tokens': 82, 'total_tokens': 253})
""" # noqa: E501
formatted_tools = [convert_to_anthropic_tool(tool) for tool in tools]
formatted_tools = [
tool if _is_builtin_tool(tool) else convert_to_anthropic_tool(tool)
for tool in tools
]
if not tool_choice:
pass
elif isinstance(tool_choice, dict):

View File

@@ -7,8 +7,8 @@ authors = []
license = { text = "MIT" }
requires-python = "<4.0,>=3.9"
dependencies = [
"anthropic<1,>=0.47.0",
"langchain-core<1.0.0,>=0.3.41",
"anthropic<1,>=0.49.0",
"langchain-core<1.0.0,>=0.3.44",
"pydantic<3.0.0,>=2.7.4",
]
name = "langchain-anthropic"

View File

@@ -380,20 +380,21 @@ async def test_astreaming() -> None:
def test_tool_use() -> None:
llm = ChatAnthropic(model=MODEL_NAME)
llm_with_tools = llm.bind_tools(
[
{
"name": "get_weather",
"description": "Get weather report for a city",
"input_schema": {
"type": "object",
"properties": {"location": {"type": "string"}},
},
}
]
llm = ChatAnthropic(
model="claude-3-7-sonnet-20250219",
temperature=0,
)
response = llm_with_tools.invoke("what's the weather in san francisco, ca")
tool_definition = {
"name": "get_weather",
"description": "Get weather report for a city",
"input_schema": {
"type": "object",
"properties": {"location": {"type": "string"}},
},
}
llm_with_tools = llm.bind_tools([tool_definition])
query = "how are you? what's the weather in san francisco, ca"
response = llm_with_tools.invoke(query)
assert isinstance(response, AIMessage)
assert isinstance(response.content, list)
assert isinstance(response.tool_calls, list)
@@ -404,10 +405,18 @@ def test_tool_use() -> None:
assert "location" in tool_call["args"]
# Test streaming
input = "how are you? what's the weather in san francisco, ca"
llm = ChatAnthropic(
model="claude-3-7-sonnet-20250219",
temperature=0,
# Add extra headers to also test token-efficient tools
model_kwargs={
"extra_headers": {"anthropic-beta": "token-efficient-tools-2025-02-19"}
},
)
llm_with_tools = llm.bind_tools([tool_definition])
first = True
chunks = [] # type: ignore
for chunk in llm_with_tools.stream(input):
for chunk in llm_with_tools.stream(query):
chunks = chunks + [chunk]
if first:
gathered = chunk
@@ -435,10 +444,19 @@ def test_tool_use() -> None:
assert "location" in tool_call["args"]
assert tool_call["id"] is not None
# Testing token-efficient tools
# https://docs.anthropic.com/en/docs/build-with-claude/tool-use/token-efficient-tool-use
assert gathered.usage_metadata
assert response.usage_metadata
assert (
gathered.usage_metadata["total_tokens"]
< response.usage_metadata["total_tokens"]
)
# Test passing response back to model
stream = llm_with_tools.stream(
[
input,
query,
gathered,
ToolMessage(content="sunny and warm", tool_call_id=tool_call["id"]),
]
@@ -455,6 +473,17 @@ def test_tool_use() -> None:
assert len(chunks) > 1
def test_builtin_tools() -> None:
llm = ChatAnthropic(model="claude-3-7-sonnet-20250219")
tool = {"type": "text_editor_20250124", "name": "str_replace_editor"}
llm_with_tools = llm.bind_tools([tool])
response = llm_with_tools.invoke(
"There's a syntax error in my primes.py file. Can you help me fix it?"
)
assert isinstance(response, AIMessage)
assert response.tool_calls
class GenerateUsername(BaseModel):
"Get a username based on someone's name and hair color."

View File

@@ -1,6 +1,12 @@
from langchain_anthropic import __all__
EXPECTED_ALL = ["ChatAnthropicMessages", "ChatAnthropic", "Anthropic", "AnthropicLLM"]
EXPECTED_ALL = [
"ChatAnthropicMessages",
"ChatAnthropic",
"convert_to_anthropic_tool",
"Anthropic",
"AnthropicLLM",
]
def test_all_imports() -> None:

View File

@@ -17,7 +17,7 @@ wheels = [
[[package]]
name = "anthropic"
version = "0.47.0"
version = "0.49.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@@ -28,9 +28,9 @@ dependencies = [
{ name = "sniffio" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/7c/bf/39f8fd5f199bcdc5f5af8050f888c5928100c3c138b8292fcb8ba6535d80/anthropic-0.47.0.tar.gz", hash = "sha256:6e19994d3a9fc7527c8505b62b1494ca3f39d6bb993a4885014575c09905ebfc", size = 207642 }
sdist = { url = "https://files.pythonhosted.org/packages/86/e3/a88c8494ce4d1a88252b9e053607e885f9b14d0a32273d47b727cbee4228/anthropic-0.49.0.tar.gz", hash = "sha256:c09e885b0f674b9119b4f296d8508907f6cff0009bc20d5cf6b35936c40b4398", size = 210016 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/4a/62/c43e334ae8c1ea90f5d763016df8e4ef058643a2dbcf78d8cb63cbc474bc/anthropic-0.47.0-py3-none-any.whl", hash = "sha256:294b10e9ca800f57949f635be7b611e8ecfe8f9f2a56a2c165200841a61bddb0", size = 239500 },
{ url = "https://files.pythonhosted.org/packages/76/74/5d90ad14d55fbe3f9c474fdcb6e34b4bed99e3be8efac98734a5ddce88c1/anthropic-0.49.0-py3-none-any.whl", hash = "sha256:bbc17ad4e7094988d2fa86b87753ded8dce12498f4b85fe5810f208f454a8375", size = 243368 },
]
[[package]]
@@ -449,7 +449,7 @@ typing = [
[package.metadata]
requires-dist = [
{ name = "anthropic", specifier = ">=0.47.0,<1" },
{ name = "anthropic", specifier = ">=0.49.0,<1" },
{ name = "langchain-core", editable = "../../core" },
{ name = "pydantic", specifier = ">=2.7.4,<3.0.0" },
]
@@ -483,7 +483,7 @@ typing = [
[[package]]
name = "langchain-core"
version = "0.3.41"
version = "0.3.45rc1"
source = { editable = "../../core" }
dependencies = [
{ name = "jsonpatch" },
@@ -541,7 +541,7 @@ typing = [
[[package]]
name = "langchain-tests"
version = "0.3.12"
version = "0.3.14"
source = { editable = "../../standard-tests" }
dependencies = [
{ name = "httpx" },