mirror of
https://github.com/hwchase17/langchain.git
synced 2026-02-21 22:56:05 +00:00
fix(anthropic): replace retired model IDs in tests and docstrings (#35365)
## Summary - Replace `claude-3-5-haiku-20241022` and `claude-3-7-sonnet-20250219` with `claude-haiku-4-5-20251001` and `claude-sonnet-4-5-20250929` respectively - Both models were retired by Anthropic on February 19, 2026, causing all anthropic integration tests to fail - Updates integration tests, a unit test, and docstring examples in `langchain-core` See: https://platform.claude.com/docs/en/docs/resources/model-deprecations
This commit is contained in:
@@ -24,7 +24,7 @@ class UsageMetadataCallbackHandler(BaseCallbackHandler):
|
||||
from langchain_core.callbacks import UsageMetadataCallbackHandler
|
||||
|
||||
llm_1 = init_chat_model(model="openai:gpt-4o-mini")
|
||||
llm_2 = init_chat_model(model="anthropic:claude-3-5-haiku-20241022")
|
||||
llm_2 = init_chat_model(model="anthropic:claude-haiku-4-5-20251001")
|
||||
|
||||
callback = UsageMetadataCallbackHandler()
|
||||
result_1 = llm_1.invoke("Hello", config={"callbacks": [callback]})
|
||||
@@ -38,7 +38,7 @@ class UsageMetadataCallbackHandler(BaseCallbackHandler):
|
||||
'total_tokens': 18,
|
||||
'input_token_details': {'audio': 0, 'cache_read': 0},
|
||||
'output_token_details': {'audio': 0, 'reasoning': 0}},
|
||||
'claude-3-5-haiku-20241022': {'input_tokens': 8,
|
||||
'claude-haiku-4-5-20251001': {'input_tokens': 8,
|
||||
'output_tokens': 21,
|
||||
'total_tokens': 29,
|
||||
'input_token_details': {'cache_read': 0, 'cache_creation': 0}}}
|
||||
@@ -110,7 +110,7 @@ def get_usage_metadata_callback(
|
||||
from langchain_core.callbacks import get_usage_metadata_callback
|
||||
|
||||
llm_1 = init_chat_model(model="openai:gpt-4o-mini")
|
||||
llm_2 = init_chat_model(model="anthropic:claude-3-5-haiku-20241022")
|
||||
llm_2 = init_chat_model(model="anthropic:claude-haiku-4-5-20251001")
|
||||
|
||||
with get_usage_metadata_callback() as cb:
|
||||
llm_1.invoke("Hello")
|
||||
@@ -127,7 +127,7 @@ def get_usage_metadata_callback(
|
||||
"input_token_details": {"audio": 0, "cache_read": 0},
|
||||
"output_token_details": {"audio": 0, "reasoning": 0},
|
||||
},
|
||||
"claude-3-5-haiku-20241022": {
|
||||
"claude-haiku-4-5-20251001": {
|
||||
"input_tokens": 8,
|
||||
"output_tokens": 21,
|
||||
"total_tokens": 29,
|
||||
|
||||
@@ -35,7 +35,7 @@ from langchain_anthropic import ChatAnthropic
|
||||
from langchain_anthropic._compat import _convert_from_v1_to_anthropic
|
||||
from tests.unit_tests._utils import FakeCallbackHandler
|
||||
|
||||
MODEL_NAME = "claude-3-5-haiku-20241022"
|
||||
MODEL_NAME = "claude-haiku-4-5-20251001"
|
||||
|
||||
|
||||
def test_stream() -> None:
|
||||
@@ -454,7 +454,7 @@ async def test_astreaming() -> None:
|
||||
|
||||
def test_tool_use() -> None:
|
||||
llm = ChatAnthropic(
|
||||
model="claude-3-7-sonnet-20250219", # type: ignore[call-arg]
|
||||
model="claude-sonnet-4-5-20250929", # type: ignore[call-arg]
|
||||
temperature=0,
|
||||
)
|
||||
tool_definition = {
|
||||
@@ -487,7 +487,7 @@ def test_tool_use() -> None:
|
||||
|
||||
# Test streaming
|
||||
llm = ChatAnthropic(
|
||||
model="claude-3-7-sonnet-20250219", # type: ignore[call-arg]
|
||||
model="claude-sonnet-4-5-20250929", # type: ignore[call-arg]
|
||||
temperature=0,
|
||||
# Add extra headers to also test token-efficient tools
|
||||
model_kwargs={
|
||||
|
||||
@@ -12,7 +12,7 @@ from langchain_anthropic import ChatAnthropic
|
||||
|
||||
REPO_ROOT_DIR = Path(__file__).parents[5]
|
||||
|
||||
MODEL = "claude-3-5-haiku-20241022"
|
||||
MODEL = "claude-haiku-4-5-20251001"
|
||||
|
||||
|
||||
class TestAnthropicStandard(ChatModelIntegrationTests):
|
||||
|
||||
@@ -48,7 +48,7 @@ def test_init_time_with_client(benchmark: BenchmarkFixture) -> None:
|
||||
|
||||
def _init_in_loop_with_clients() -> None:
|
||||
for _ in range(10):
|
||||
llm = ChatAnthropic(model="claude-3-5-haiku-20241022")
|
||||
llm = ChatAnthropic(model="claude-haiku-4-5-20251001")
|
||||
_ = llm._client
|
||||
_ = llm._async_client
|
||||
|
||||
|
||||
Reference in New Issue
Block a user