mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-17 15:35:14 +00:00
infra: bump anthropic mypy 1 (#22373)
This commit is contained in:
@@ -26,7 +26,7 @@ MODEL_NAME = "claude-3-sonnet-20240229"
|
||||
|
||||
def test_stream() -> None:
|
||||
"""Test streaming tokens from Anthropic."""
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
for token in llm.stream("I'm Pickle Rick"):
|
||||
assert isinstance(token.content, str)
|
||||
@@ -34,7 +34,7 @@ def test_stream() -> None:
|
||||
|
||||
async def test_astream() -> None:
|
||||
"""Test streaming tokens from Anthropic."""
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
async for token in llm.astream("I'm Pickle Rick"):
|
||||
assert isinstance(token.content, str)
|
||||
@@ -42,7 +42,7 @@ async def test_astream() -> None:
|
||||
|
||||
async def test_abatch() -> None:
|
||||
"""Test streaming tokens from ChatAnthropicMessages."""
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
result = await llm.abatch(["I'm Pickle Rick", "I'm not Pickle Rick"])
|
||||
for token in result:
|
||||
@@ -51,7 +51,7 @@ async def test_abatch() -> None:
|
||||
|
||||
async def test_abatch_tags() -> None:
|
||||
"""Test batch tokens from ChatAnthropicMessages."""
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
result = await llm.abatch(
|
||||
["I'm Pickle Rick", "I'm not Pickle Rick"], config={"tags": ["foo"]}
|
||||
@@ -62,7 +62,7 @@ async def test_abatch_tags() -> None:
|
||||
|
||||
def test_batch() -> None:
|
||||
"""Test batch tokens from ChatAnthropicMessages."""
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
result = llm.batch(["I'm Pickle Rick", "I'm not Pickle Rick"])
|
||||
for token in result:
|
||||
@@ -71,7 +71,7 @@ def test_batch() -> None:
|
||||
|
||||
async def test_ainvoke() -> None:
|
||||
"""Test invoke tokens from ChatAnthropicMessages."""
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
result = await llm.ainvoke("I'm Pickle Rick", config={"tags": ["foo"]})
|
||||
assert isinstance(result.content, str)
|
||||
@@ -79,7 +79,7 @@ async def test_ainvoke() -> None:
|
||||
|
||||
def test_invoke() -> None:
|
||||
"""Test invoke tokens from ChatAnthropicMessages."""
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
result = llm.invoke("I'm Pickle Rick", config=dict(tags=["foo"]))
|
||||
assert isinstance(result.content, str)
|
||||
@@ -87,7 +87,7 @@ def test_invoke() -> None:
|
||||
|
||||
def test_system_invoke() -> None:
|
||||
"""Test invoke tokens with a system message"""
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
prompt = ChatPromptTemplate.from_messages(
|
||||
[
|
||||
@@ -108,7 +108,7 @@ def test_system_invoke() -> None:
|
||||
|
||||
def test_anthropic_call() -> None:
|
||||
"""Test valid call to anthropic."""
|
||||
chat = ChatAnthropic(model="test")
|
||||
chat = ChatAnthropic(model="test") # type: ignore[call-arg]
|
||||
message = HumanMessage(content="Hello")
|
||||
response = chat.invoke([message])
|
||||
assert isinstance(response, AIMessage)
|
||||
@@ -117,7 +117,7 @@ def test_anthropic_call() -> None:
|
||||
|
||||
def test_anthropic_generate() -> None:
|
||||
"""Test generate method of anthropic."""
|
||||
chat = ChatAnthropic(model="test")
|
||||
chat = ChatAnthropic(model="test") # type: ignore[call-arg]
|
||||
chat_messages: List[List[BaseMessage]] = [
|
||||
[HumanMessage(content="How many toes do dogs have?")]
|
||||
]
|
||||
@@ -133,7 +133,7 @@ def test_anthropic_generate() -> None:
|
||||
|
||||
def test_anthropic_streaming() -> None:
|
||||
"""Test streaming tokens from anthropic."""
|
||||
chat = ChatAnthropic(model="test")
|
||||
chat = ChatAnthropic(model="test") # type: ignore[call-arg]
|
||||
message = HumanMessage(content="Hello")
|
||||
response = chat.stream([message])
|
||||
for token in response:
|
||||
@@ -145,7 +145,7 @@ def test_anthropic_streaming_callback() -> None:
|
||||
"""Test that streaming correctly invokes on_llm_new_token callback."""
|
||||
callback_handler = FakeCallbackHandler()
|
||||
callback_manager = CallbackManager([callback_handler])
|
||||
chat = ChatAnthropic(
|
||||
chat = ChatAnthropic( # type: ignore[call-arg]
|
||||
model="test",
|
||||
callback_manager=callback_manager,
|
||||
verbose=True,
|
||||
@@ -161,7 +161,7 @@ async def test_anthropic_async_streaming_callback() -> None:
|
||||
"""Test that streaming correctly invokes on_llm_new_token callback."""
|
||||
callback_handler = FakeCallbackHandler()
|
||||
callback_manager = CallbackManager([callback_handler])
|
||||
chat = ChatAnthropic(
|
||||
chat = ChatAnthropic( # type: ignore[call-arg]
|
||||
model="test",
|
||||
callback_manager=callback_manager,
|
||||
verbose=True,
|
||||
@@ -177,7 +177,7 @@ async def test_anthropic_async_streaming_callback() -> None:
|
||||
|
||||
def test_anthropic_multimodal() -> None:
|
||||
"""Test that multimodal inputs are handled correctly."""
|
||||
chat = ChatAnthropic(model=MODEL_NAME)
|
||||
chat = ChatAnthropic(model=MODEL_NAME) # type: ignore[call-arg]
|
||||
messages = [
|
||||
HumanMessage(
|
||||
content=[
|
||||
@@ -202,7 +202,7 @@ def test_streaming() -> None:
|
||||
callback_handler = FakeCallbackHandler()
|
||||
callback_manager = CallbackManager([callback_handler])
|
||||
|
||||
llm = ChatAnthropicMessages(
|
||||
llm = ChatAnthropicMessages( # type: ignore[call-arg, call-arg]
|
||||
model_name=MODEL_NAME, streaming=True, callback_manager=callback_manager
|
||||
)
|
||||
|
||||
@@ -216,7 +216,7 @@ async def test_astreaming() -> None:
|
||||
callback_handler = FakeCallbackHandler()
|
||||
callback_manager = CallbackManager([callback_handler])
|
||||
|
||||
llm = ChatAnthropicMessages(
|
||||
llm = ChatAnthropicMessages( # type: ignore[call-arg, call-arg]
|
||||
model_name=MODEL_NAME, streaming=True, callback_manager=callback_manager
|
||||
)
|
||||
|
||||
@@ -226,7 +226,7 @@ async def test_astreaming() -> None:
|
||||
|
||||
|
||||
def test_tool_use() -> None:
|
||||
llm = ChatAnthropic(
|
||||
llm = ChatAnthropic( # type: ignore[call-arg]
|
||||
model="claude-3-opus-20240229",
|
||||
)
|
||||
|
||||
@@ -277,7 +277,7 @@ def test_anthropic_with_empty_text_block() -> None:
|
||||
"""Type the given letter."""
|
||||
return "OK"
|
||||
|
||||
model = ChatAnthropic(model="claude-3-opus-20240229", temperature=0).bind_tools(
|
||||
model = ChatAnthropic(model="claude-3-opus-20240229", temperature=0).bind_tools( # type: ignore[call-arg]
|
||||
[type_letter]
|
||||
)
|
||||
|
||||
@@ -314,7 +314,7 @@ def test_anthropic_with_empty_text_block() -> None:
|
||||
|
||||
|
||||
def test_with_structured_output() -> None:
|
||||
llm = ChatAnthropic(
|
||||
llm = ChatAnthropic( # type: ignore[call-arg]
|
||||
model="claude-3-opus-20240229",
|
||||
)
|
||||
|
||||
@@ -341,7 +341,7 @@ class GetWeather(BaseModel):
|
||||
|
||||
@pytest.mark.parametrize("tool_choice", ["GetWeather", "auto", "any"])
|
||||
def test_anthropic_bind_tools_tool_choice(tool_choice: str) -> None:
|
||||
chat_model = ChatAnthropic(
|
||||
chat_model = ChatAnthropic( # type: ignore[call-arg]
|
||||
model="claude-3-sonnet-20240229",
|
||||
)
|
||||
chat_model_with_tools = chat_model.bind_tools([GetWeather], tool_choice=tool_choice)
|
||||
|
Reference in New Issue
Block a user