anthropic[patch]: support claude 3.7 sonnet (#29971)

This commit is contained in:
ccurme 2025-02-24 15:17:47 -05:00 committed by GitHub
parent d00d645829
commit ded886f622
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 133 additions and 16 deletions

View File

@ -244,6 +244,23 @@ def _format_messages(
if k in ("type", "text", "cache_control") if k in ("type", "text", "cache_control")
} }
) )
elif block["type"] == "thinking":
content.append(
{
k: v
for k, v in block.items()
if k
in ("type", "thinking", "cache_control", "signature")
}
)
elif block["type"] == "redacted_thinking":
content.append(
{
k: v
for k, v in block.items()
if k in ("type", "cache_control", "data")
}
)
elif block["type"] == "tool_result": elif block["type"] == "tool_result":
tool_content = _format_messages( tool_content = _format_messages(
[HumanMessage(block["content"])] [HumanMessage(block["content"])]
@ -600,6 +617,10 @@ class ChatAnthropic(BaseChatModel):
message chunks will be generated during the stream including usage metadata. message chunks will be generated during the stream including usage metadata.
""" """
thinking: Optional[Dict[str, Any]] = Field(default=None)
"""Parameters for Claude reasoning,
e.g., ``{"type": "enabled", "budget_tokens": 10_000}``"""
@property @property
def _llm_type(self) -> str: def _llm_type(self) -> str:
"""Return type of chat model.""" """Return type of chat model."""
@ -631,6 +652,7 @@ class ChatAnthropic(BaseChatModel):
"streaming": self.streaming, "streaming": self.streaming,
"max_retries": self.max_retries, "max_retries": self.max_retries,
"default_request_timeout": self.default_request_timeout, "default_request_timeout": self.default_request_timeout,
"thinking": self.thinking,
} }
def _get_ls_params( def _get_ls_params(
@ -702,6 +724,8 @@ class ChatAnthropic(BaseChatModel):
**self.model_kwargs, **self.model_kwargs,
**kwargs, **kwargs,
} }
if self.thinking is not None:
payload["thinking"] = self.thinking
return {k: v for k, v in payload.items() if v is not None} return {k: v for k, v in payload.items() if v is not None}
def _stream( def _stream(
@ -718,9 +742,11 @@ class ChatAnthropic(BaseChatModel):
kwargs["stream"] = True kwargs["stream"] = True
payload = self._get_request_payload(messages, stop=stop, **kwargs) payload = self._get_request_payload(messages, stop=stop, **kwargs)
stream = self._client.messages.create(**payload) stream = self._client.messages.create(**payload)
coerce_content_to_string = not _tools_in_params( coerce_content_to_string = (
payload not _tools_in_params(payload)
) and not _documents_in_params(payload) and not _documents_in_params(payload)
and not _thinking_in_params(payload)
)
for event in stream: for event in stream:
msg = _make_message_chunk_from_anthropic_event( msg = _make_message_chunk_from_anthropic_event(
event, event,
@ -747,9 +773,11 @@ class ChatAnthropic(BaseChatModel):
kwargs["stream"] = True kwargs["stream"] = True
payload = self._get_request_payload(messages, stop=stop, **kwargs) payload = self._get_request_payload(messages, stop=stop, **kwargs)
stream = await self._async_client.messages.create(**payload) stream = await self._async_client.messages.create(**payload)
coerce_content_to_string = not _tools_in_params( coerce_content_to_string = (
payload not _tools_in_params(payload)
) and not _documents_in_params(payload) and not _documents_in_params(payload)
and not _thinking_in_params(payload)
)
async for event in stream: async for event in stream:
msg = _make_message_chunk_from_anthropic_event( msg = _make_message_chunk_from_anthropic_event(
event, event,
@ -774,6 +802,13 @@ class ChatAnthropic(BaseChatModel):
and block["citations"] is None and block["citations"] is None
): ):
block.pop("citations") block.pop("citations")
if (
isinstance(block, dict)
and block.get("type") == "thinking"
and "text" in block
and block["text"] is None
):
block.pop("text")
llm_output = { llm_output = {
k: v for k, v in data_dict.items() if k not in ("content", "role", "type") k: v for k, v in data_dict.items() if k not in ("content", "role", "type")
@ -1268,6 +1303,10 @@ def _tools_in_params(params: dict) -> bool:
) )
def _thinking_in_params(params: dict) -> bool:
return params.get("thinking", {}).get("type") == "enabled"
def _documents_in_params(params: dict) -> bool: def _documents_in_params(params: dict) -> bool:
for message in params.get("messages", []): for message in params.get("messages", []):
if isinstance(message.get("content"), list): if isinstance(message.get("content"), list):
@ -1326,7 +1365,7 @@ def _make_message_chunk_from_anthropic_event(
elif ( elif (
event.type == "content_block_start" event.type == "content_block_start"
and event.content_block is not None and event.content_block is not None
and event.content_block.type in ("tool_use", "document") and event.content_block.type in ("tool_use", "document", "redacted_thinking")
): ):
if coerce_content_to_string: if coerce_content_to_string:
warnings.warn("Received unexpected tool content block.") warnings.warn("Received unexpected tool content block.")
@ -1358,6 +1397,20 @@ def _make_message_chunk_from_anthropic_event(
if "citation" in content_block: if "citation" in content_block:
content_block["citations"] = [content_block.pop("citation")] content_block["citations"] = [content_block.pop("citation")]
message_chunk = AIMessageChunk(content=[content_block]) message_chunk = AIMessageChunk(content=[content_block])
elif event.delta.type == "thinking_delta":
content_block = event.delta.model_dump()
if "text" in content_block and content_block["text"] is None:
content_block.pop("text")
content_block["index"] = event.index
content_block["type"] = "thinking"
message_chunk = AIMessageChunk(content=[content_block])
elif event.delta.type == "signature_delta":
content_block = event.delta.model_dump()
if "text" in content_block and content_block["text"] is None:
content_block.pop("text")
content_block["index"] = event.index
content_block["type"] = "thinking"
message_chunk = AIMessageChunk(content=[content_block])
elif event.delta.type == "input_json_delta": elif event.delta.type == "input_json_delta":
content_block = event.delta.model_dump() content_block = event.delta.model_dump()
content_block["index"] = event.index content_block["index"] = event.index

View File

@ -7,8 +7,8 @@ authors = []
license = { text = "MIT" } license = { text = "MIT" }
requires-python = "<4.0,>=3.9" requires-python = "<4.0,>=3.9"
dependencies = [ dependencies = [
"anthropic<1,>=0.45.0", "anthropic<1,>=0.47.0",
"langchain-core<1.0.0,>=0.3.34", "langchain-core<1.0.0,>=0.3.39",
"pydantic<3.0.0,>=2.7.4", "pydantic<3.0.0,>=2.7.4",
] ]
name = "langchain-anthropic" name = "langchain-anthropic"

View File

@ -661,3 +661,67 @@ def test_citations() -> None:
assert isinstance(full.content, list) assert isinstance(full.content, list)
assert any("citations" in block for block in full.content) assert any("citations" in block for block in full.content)
assert not any("citation" in block for block in full.content) assert not any("citation" in block for block in full.content)
def test_thinking() -> None:
llm = ChatAnthropic(
model="claude-3-7-sonnet-latest",
max_tokens=5_000,
thinking={"type": "enabled", "budget_tokens": 2_000},
)
response = llm.invoke("Hello")
assert any("thinking" in block for block in response.content)
for block in response.content:
assert isinstance(block, dict)
if block["type"] == "thinking":
assert set(block.keys()) == {"type", "thinking", "signature"}
assert block["thinking"] and isinstance(block["thinking"], str)
assert block["signature"] and isinstance(block["signature"], str)
# Test streaming
full: Optional[BaseMessageChunk] = None
for chunk in llm.stream("Hello"):
full = chunk if full is None else full + chunk
assert isinstance(full, AIMessageChunk)
assert isinstance(full.content, list)
assert any("thinking" in block for block in full.content)
for block in full.content:
assert isinstance(block, dict)
if block["type"] == "thinking":
assert set(block.keys()) == {"type", "thinking", "signature", "index"}
assert block["thinking"] and isinstance(block["thinking"], str)
assert block["signature"] and isinstance(block["signature"], str)
def test_redacted_thinking() -> None:
llm = ChatAnthropic(
model="claude-3-7-sonnet-latest",
max_tokens=5_000,
thinking={"type": "enabled", "budget_tokens": 2_000},
)
query = "ANTHROPIC_MAGIC_STRING_TRIGGER_REDACTED_THINKING_46C9A13E193C177646C7398A98432ECCCE4C1253D5E2D82641AC0E52CC2876CB" # noqa: E501
response = llm.invoke(query)
has_reasoning = False
for block in response.content:
assert isinstance(block, dict)
if block["type"] == "redacted_thinking":
has_reasoning = True
assert set(block.keys()) == {"type", "data"}
assert block["data"] and isinstance(block["data"], str)
assert has_reasoning
# Test streaming
full: Optional[BaseMessageChunk] = None
for chunk in llm.stream(query):
full = chunk if full is None else full + chunk
assert isinstance(full, AIMessageChunk)
assert isinstance(full.content, list)
stream_has_reasoning = False
for block in full.content:
assert isinstance(block, dict)
if block["type"] == "redacted_thinking":
stream_has_reasoning = True
assert set(block.keys()) == {"type", "data", "index"}
assert block["data"] and isinstance(block["data"], str)
assert stream_has_reasoning

View File

@ -17,7 +17,7 @@ wheels = [
[[package]] [[package]]
name = "anthropic" name = "anthropic"
version = "0.45.2" version = "0.47.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "anyio" }, { name = "anyio" },
@ -28,9 +28,9 @@ dependencies = [
{ name = "sniffio" }, { name = "sniffio" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/15/74/2b2485fc120da834c0c5be07462541ec082e9fa8851d845f2587e480535a/anthropic-0.45.2.tar.gz", hash = "sha256:32a18b9ecd12c91b2be4cae6ca2ab46a06937b5aa01b21308d97a6d29794fb5e", size = 200901 } sdist = { url = "https://files.pythonhosted.org/packages/7c/bf/39f8fd5f199bcdc5f5af8050f888c5928100c3c138b8292fcb8ba6535d80/anthropic-0.47.0.tar.gz", hash = "sha256:6e19994d3a9fc7527c8505b62b1494ca3f39d6bb993a4885014575c09905ebfc", size = 207642 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/74/86/e81814e542d1eaeec84d2312bec93a99b9ef1d78d9bfae1fc5dd74abdf15/anthropic-0.45.2-py3-none-any.whl", hash = "sha256:ecd746f7274451dfcb7e1180571ead624c7e1195d1d46cb7c70143d2aedb4d35", size = 222797 }, { url = "https://files.pythonhosted.org/packages/4a/62/c43e334ae8c1ea90f5d763016df8e4ef058643a2dbcf78d8cb63cbc474bc/anthropic-0.47.0-py3-none-any.whl", hash = "sha256:294b10e9ca800f57949f635be7b611e8ecfe8f9f2a56a2c165200841a61bddb0", size = 239500 },
] ]
[[package]] [[package]]
@ -449,7 +449,7 @@ typing = [
[package.metadata] [package.metadata]
requires-dist = [ requires-dist = [
{ name = "anthropic", specifier = ">=0.45.0,<1" }, { name = "anthropic", specifier = ">=0.47.0,<1" },
{ name = "langchain-core", editable = "../../core" }, { name = "langchain-core", editable = "../../core" },
{ name = "pydantic", specifier = ">=2.7.4,<3.0.0" }, { name = "pydantic", specifier = ">=2.7.4,<3.0.0" },
] ]
@ -483,7 +483,7 @@ typing = [
[[package]] [[package]]
name = "langchain-core" name = "langchain-core"
version = "0.3.35" version = "0.3.39"
source = { editable = "../../core" } source = { editable = "../../core" }
dependencies = [ dependencies = [
{ name = "jsonpatch" }, { name = "jsonpatch" },
@ -515,7 +515,7 @@ dev = [
] ]
lint = [{ name = "ruff", specifier = ">=0.9.2,<1.0.0" }] lint = [{ name = "ruff", specifier = ">=0.9.2,<1.0.0" }]
test = [ test = [
{ name = "blockbuster", specifier = "~=1.5.11" }, { name = "blockbuster", specifier = "~=1.5.18" },
{ name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, { name = "freezegun", specifier = ">=1.2.2,<2.0.0" },
{ name = "grandalf", specifier = ">=0.8,<1.0" }, { name = "grandalf", specifier = ">=0.8,<1.0" },
{ name = "langchain-tests", directory = "../../standard-tests" }, { name = "langchain-tests", directory = "../../standard-tests" },
@ -541,7 +541,7 @@ typing = [
[[package]] [[package]]
name = "langchain-tests" name = "langchain-tests"
version = "0.3.11" version = "0.3.12"
source = { editable = "../../standard-tests" } source = { editable = "../../standard-tests" }
dependencies = [ dependencies = [
{ name = "httpx" }, { name = "httpx" },