anthropic[patch]: Release 0.1.13, tool_choice support (#21773)

This commit is contained in:
Bagatur
2024-05-16 10:56:29 -07:00
committed by GitHub
parent 040597e832
commit 6416d16d39
6 changed files with 179 additions and 24 deletions

View File

@@ -80,7 +80,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 2, "execution_count": 1,
"id": "238bdbaa-526a-4130-89e9-523aa44bb196", "id": "238bdbaa-526a-4130-89e9-523aa44bb196",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
@@ -250,16 +250,7 @@
"execution_count": 3, "execution_count": 3,
"id": "42f87466-cb8e-490d-a9f8-aa0f8e9b4217", "id": "42f87466-cb8e-490d-a9f8-aa0f8e9b4217",
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [],
{
"name": "stderr",
"output_type": "stream",
"text": [
"/Users/bagatur/langchain/libs/core/langchain_core/_api/beta_decorator.py:87: LangChainBetaWarning: The function `bind_tools` is in beta. It is actively being worked on, so the API may change.\n",
" warn_beta(\n"
]
}
],
"source": [ "source": [
"from langchain_core.pydantic_v1 import BaseModel, Field\n", "from langchain_core.pydantic_v1 import BaseModel, Field\n",
"\n", "\n",
@@ -369,13 +360,49 @@
"id": "90e015e0-c6e5-4ff5-8fb9-be0cd3c86395", "id": "90e015e0-c6e5-4ff5-8fb9-be0cd3c86395",
"metadata": {}, "metadata": {},
"source": [ "source": [
"::: {.callout-tip}\n", ":::tip\n",
"\n", "\n",
"ChatAnthropic model outputs are always a single AI message that can have either a single string or a list of content blocks. The content blocks can be text blocks or tool-duse blocks. There can be multiple of each and they can be interspersed.\n", "ChatAnthropic model outputs are always a single AI message that can have either a single string or a list of content blocks. The content blocks can be text blocks or tool-duse blocks. There can be multiple of each and they can be interspersed.\n",
"\n", "\n",
":::" ":::"
] ]
}, },
{
"cell_type": "markdown",
"id": "b5145dea-0183-4cab-b9e2-0e35fb8370cf",
"metadata": {},
"source": [
"### Forcing tool calls\n",
"\n",
"By default the model can choose whether to call any tools. To force the model to call at least one tool we can specify `bind_tools(..., tool_choice=\"any\")` and to force the model to call a specific tool we can pass in that tool name `bind_tools(..., tool_choice=\"GetWeather\")`"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "05993626-060c-449f-8069-e52d31442977",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"[{'name': 'GetWeather',\n",
" 'args': {'location': '<UNKNOWN>'},\n",
" 'id': 'toolu_01DwWjKzHPs6EHCUPxsGm9bN'}]"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"llm_with_force_tools = llm.bind_tools([GetWeather], tool_choice=\"GetWeather\")\n",
"# Notice the model will still return tool calls despite a message that\n",
"# doesn't have anything to do with the tools.\n",
"llm_with_force_tools.invoke(\"this doesn't really require tool use\").tool_calls"
]
},
{ {
"cell_type": "markdown", "cell_type": "markdown",
"id": "8652ee98-814c-4ed6-9def-275eeaa9651e", "id": "8652ee98-814c-4ed6-9def-275eeaa9651e",
@@ -656,9 +683,9 @@
], ],
"metadata": { "metadata": {
"kernelspec": { "kernelspec": {
"display_name": ".venv", "display_name": "poetry-venv-2",
"language": "python", "language": "python",
"name": "python3" "name": "poetry-venv-2"
}, },
"language_info": { "language_info": {
"codemirror_mode": { "codemirror_mode": {
@@ -670,7 +697,7 @@
"name": "python", "name": "python",
"nbconvert_exporter": "python", "nbconvert_exporter": "python",
"pygments_lexer": "ipython3", "pygments_lexer": "ipython3",
"version": "3.11.4" "version": "3.9.1"
} }
}, },
"nbformat": 4, "nbformat": 4,

View File

@@ -542,6 +542,10 @@ class ChatAnthropic(BaseChatModel):
def bind_tools( def bind_tools(
self, self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]], tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
*,
tool_choice: Optional[
Union[Dict[str, str], Literal["any", "auto"], str]
] = None,
**kwargs: Any, **kwargs: Any,
) -> Runnable[LanguageModelInput, BaseMessage]: ) -> Runnable[LanguageModelInput, BaseMessage]:
"""Bind tool-like objects to this chat model. """Bind tool-like objects to this chat model.
@@ -551,6 +555,15 @@ class ChatAnthropic(BaseChatModel):
Can be a dictionary, pydantic model, callable, or BaseTool. Pydantic Can be a dictionary, pydantic model, callable, or BaseTool. Pydantic
models, callables, and BaseTools will be automatically converted to models, callables, and BaseTools will be automatically converted to
their schema dictionary representation. their schema dictionary representation.
tool_choice: Which tool to require the model to call.
Options are:
name of the tool (str): calls corresponding tool;
"auto" or None: automatically selects a tool (including no tool);
"any": force at least one tool to be called;
or a dict of the form:
{"type": "tool", "name": "tool_name"},
or {"type: "any"},
or {"type: "auto"};
**kwargs: Any additional parameters to bind. **kwargs: Any additional parameters to bind.
Example: Example:
@@ -564,9 +577,14 @@ class ChatAnthropic(BaseChatModel):
location: str = Field(..., description="The city and state, e.g. San Francisco, CA") location: str = Field(..., description="The city and state, e.g. San Francisco, CA")
class GetPrice(BaseModel):
'''Get the price of a specific product.'''
product: str = Field(..., description="The product to look up.")
llm = ChatAnthropic(model="claude-3-opus-20240229", temperature=0) llm = ChatAnthropic(model="claude-3-opus-20240229", temperature=0)
llm_with_tools = llm.bind_tools([GetWeather]) llm_with_tools = llm.bind_tools([GetWeather, GetPrice])
llm_with_tools.invoke("what is the weather like in San Francisco",) llm_with_tools.invoke("what is the weather like in San Francisco",)
# -> AIMessage( # -> AIMessage(
# content=[ # content=[
@@ -576,8 +594,64 @@ class ChatAnthropic(BaseChatModel):
# response_metadata={'id': 'msg_01GM3zQtoFv8jGQMW7abLnhi', 'model': 'claude-3-opus-20240229', 'stop_reason': 'tool_use', 'stop_sequence': None, 'usage': {'input_tokens': 487, 'output_tokens': 145}}, # response_metadata={'id': 'msg_01GM3zQtoFv8jGQMW7abLnhi', 'model': 'claude-3-opus-20240229', 'stop_reason': 'tool_use', 'stop_sequence': None, 'usage': {'input_tokens': 487, 'output_tokens': 145}},
# id='run-87b1331e-9251-4a68-acef-f0a018b639cc-0' # id='run-87b1331e-9251-4a68-acef-f0a018b639cc-0'
# ) # )
Example — force tool call with tool_choice 'any':
.. code-block:: python
from langchain_anthropic import ChatAnthropic
from langchain_core.pydantic_v1 import BaseModel, Field
class GetWeather(BaseModel):
'''Get the current weather in a given location'''
location: str = Field(..., description="The city and state, e.g. San Francisco, CA")
class GetPrice(BaseModel):
'''Get the price of a specific product.'''
product: str = Field(..., description="The product to look up.")
llm = ChatAnthropic(model="claude-3-opus-20240229", temperature=0)
llm_with_tools = llm.bind_tools([GetWeather, GetPrice], tool_choice="any")
llm_with_tools.invoke("what is the weather like in San Francisco",)
Example — force specific tool call with tool_choice '<name_of_tool>':
.. code-block:: python
from langchain_anthropic import ChatAnthropic
from langchain_core.pydantic_v1 import BaseModel, Field
class GetWeather(BaseModel):
'''Get the current weather in a given location'''
location: str = Field(..., description="The city and state, e.g. San Francisco, CA")
class GetPrice(BaseModel):
'''Get the price of a specific product.'''
product: str = Field(..., description="The product to look up.")
llm = ChatAnthropic(model="claude-3-opus-20240229", temperature=0)
llm_with_tools = llm.bind_tools([GetWeather, GetPrice], tool_choice="GetWeather")
llm_with_tools.invoke("what is the weather like in San Francisco",)
""" # noqa: E501 """ # noqa: E501
formatted_tools = [convert_to_anthropic_tool(tool) for tool in tools] formatted_tools = [convert_to_anthropic_tool(tool) for tool in tools]
if not tool_choice:
pass
elif isinstance(tool_choice, dict):
kwargs["tool_choice"] = tool_choice
elif isinstance(tool_choice, str) and tool_choice in ("any", "auto"):
kwargs["tool_choice"] = {"type": tool_choice}
elif isinstance(tool_choice, str):
kwargs["tool_choice"] = {"type": "tool", "name": tool_choice}
else:
raise ValueError(
f"Unrecognized 'tool_choice' type {tool_choice=}. Expected dict, "
f"str, or None."
)
return self.bind(tools=formatted_tools, **kwargs) return self.bind(tools=formatted_tools, **kwargs)
def with_structured_output( def with_structured_output(
@@ -683,7 +757,7 @@ class ChatAnthropic(BaseChatModel):
# } # }
""" # noqa: E501 """ # noqa: E501
llm = self.bind_tools([schema]) llm = self.bind_tools([schema], tool_choice="any")
if isinstance(schema, type) and issubclass(schema, BaseModel): if isinstance(schema, type) and issubclass(schema, BaseModel):
output_parser = ToolsOutputParser( output_parser = ToolsOutputParser(
first_tool_only=True, pydantic_schemas=[schema] first_tool_only=True, pydantic_schemas=[schema]

View File

@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. # This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
[[package]] [[package]]
name = "annotated-types" name = "annotated-types"
@@ -16,13 +16,13 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""}
[[package]] [[package]]
name = "anthropic" name = "anthropic"
version = "0.25.9" version = "0.26.0"
description = "The official Python library for the anthropic API" description = "The official Python library for the anthropic API"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
{file = "anthropic-0.25.9-py3-none-any.whl", hash = "sha256:d0b17d442160356a531593b237de55d3125cc6fa708f1268c214107e61c81c57"}, {file = "anthropic-0.26.0-py3-none-any.whl", hash = "sha256:38fc415561d71dcf263b89da0cc6ecec498379b56256fc4242e9128bc707b283"},
{file = "anthropic-0.25.9.tar.gz", hash = "sha256:a4ec810b1cfbf3340af99b6f5bf599a83d66986e0f572a5f3bc4ebcab284f629"}, {file = "anthropic-0.26.0.tar.gz", hash = "sha256:6aaffeb05d515cf9788eef57150a5f827f3786883628ccac71dbe5671ab6f44e"},
] ]
[package.dependencies] [package.dependencies]
@@ -1212,4 +1212,4 @@ watchmedo = ["PyYAML (>=3.10)"]
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = ">=3.8.1,<4.0" python-versions = ">=3.8.1,<4.0"
content-hash = "970ba277f7d892e877bf2ec55326417f4424d38a1fbf4e24149bad9d7862f7cd" content-hash = "cd763107b1195eb05aee179f906103be2ec292914f37d546818c89960ebf6d75"

View File

@@ -1,6 +1,6 @@
[tool.poetry] [tool.poetry]
name = "langchain-anthropic" name = "langchain-anthropic"
version = "0.1.12" version = "0.1.13"
description = "An integration package connecting AnthropicMessages and LangChain" description = "An integration package connecting AnthropicMessages and LangChain"
authors = [] authors = []
readme = "README.md" readme = "README.md"
@@ -13,7 +13,7 @@ license = "MIT"
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = ">=3.8.1,<4.0" python = ">=3.8.1,<4.0"
langchain-core = ">=0.1.43,<0.3" langchain-core = ">=0.1.43,<0.3"
anthropic = ">=0.23.0,<1" anthropic = ">=0.26.0,<1"
defusedxml = { version = "^0.7.1", optional = true } defusedxml = { version = "^0.7.1", optional = true }
[tool.poetry.group.test] [tool.poetry.group.test]

View File

@@ -3,6 +3,7 @@
import json import json
from typing import List from typing import List
import pytest
from langchain_core.callbacks import CallbackManager from langchain_core.callbacks import CallbackManager
from langchain_core.messages import ( from langchain_core.messages import (
AIMessage, AIMessage,
@@ -14,6 +15,7 @@ from langchain_core.messages import (
) )
from langchain_core.outputs import ChatGeneration, LLMResult from langchain_core.outputs import ChatGeneration, LLMResult
from langchain_core.prompts import ChatPromptTemplate from langchain_core.prompts import ChatPromptTemplate
from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_core.tools import tool from langchain_core.tools import tool
from langchain_anthropic import ChatAnthropic, ChatAnthropicMessages from langchain_anthropic import ChatAnthropic, ChatAnthropicMessages
@@ -329,3 +331,19 @@ def test_with_structured_output() -> None:
response = structured_llm.invoke("what's the weather in san francisco, ca") response = structured_llm.invoke("what's the weather in san francisco, ca")
assert isinstance(response, dict) assert isinstance(response, dict)
assert response["location"] assert response["location"]
class GetWeather(BaseModel):
"""Get the current weather in a given location"""
location: str = Field(..., description="The city and state, e.g. San Francisco, CA")
@pytest.mark.parametrize("tool_choice", ["GetWeather", "auto", "any"])
def test_anthropic_bind_tools_tool_choice(tool_choice: str) -> None:
chat_model = ChatAnthropic(
model="claude-3-sonnet-20240229",
)
chat_model_with_tools = chat_model.bind_tools([GetWeather], tool_choice=tool_choice)
response = chat_model_with_tools.invoke("what's the weather in ny and la")
assert isinstance(response, AIMessage)

View File

@@ -8,6 +8,7 @@ from anthropic.types import ContentBlock, Message, Usage
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage, ToolMessage from langchain_core.messages import AIMessage, HumanMessage, SystemMessage, ToolMessage
from langchain_core.outputs import ChatGeneration, ChatResult from langchain_core.outputs import ChatGeneration, ChatResult
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr
from langchain_core.runnables import RunnableBinding
from langchain_core.tools import BaseTool from langchain_core.tools import BaseTool
from pytest import CaptureFixture, MonkeyPatch from pytest import CaptureFixture, MonkeyPatch
@@ -469,3 +470,38 @@ def test_anthropic_uses_actual_secret_value_from_secretstr() -> None:
cast(SecretStr, chat_model.anthropic_api_key).get_secret_value() cast(SecretStr, chat_model.anthropic_api_key).get_secret_value()
== "secret-api-key" == "secret-api-key"
) )
class GetWeather(BaseModel):
"""Get the current weather in a given location"""
location: str = Field(..., description="The city and state, e.g. San Francisco, CA")
def test_anthropic_bind_tools_tool_choice() -> None:
chat_model = ChatAnthropic(
model="claude-3-opus-20240229",
anthropic_api_key="secret-api-key",
)
chat_model_with_tools = chat_model.bind_tools(
[GetWeather], tool_choice={"type": "tool", "name": "GetWeather"}
)
assert cast(RunnableBinding, chat_model_with_tools).kwargs["tool_choice"] == {
"type": "tool",
"name": "GetWeather",
}
chat_model_with_tools = chat_model.bind_tools(
[GetWeather], tool_choice="GetWeather"
)
assert cast(RunnableBinding, chat_model_with_tools).kwargs["tool_choice"] == {
"type": "tool",
"name": "GetWeather",
}
chat_model_with_tools = chat_model.bind_tools([GetWeather], tool_choice="auto")
assert cast(RunnableBinding, chat_model_with_tools).kwargs["tool_choice"] == {
"type": "auto"
}
chat_model_with_tools = chat_model.bind_tools([GetWeather], tool_choice="any")
assert cast(RunnableBinding, chat_model_with_tools).kwargs["tool_choice"] == {
"type": "any"
}