mirror of
https://github.com/hwchase17/langchain.git
synced 2025-05-30 03:28:40 +00:00
infra: bump anthropic mypy 1 (#22373)
This commit is contained in:
parent
ceb73ad06f
commit
678a19a5f7
@ -104,7 +104,7 @@ def _merge_messages(
|
||||
curr = curr.copy(deep=True)
|
||||
if isinstance(curr, ToolMessage):
|
||||
if isinstance(curr.content, str):
|
||||
curr = HumanMessage(
|
||||
curr = HumanMessage( # type: ignore[misc]
|
||||
[
|
||||
{
|
||||
"type": "tool_result",
|
||||
@ -114,7 +114,7 @@ def _merge_messages(
|
||||
]
|
||||
)
|
||||
else:
|
||||
curr = HumanMessage(curr.content)
|
||||
curr = HumanMessage(curr.content) # type: ignore[misc]
|
||||
last = merged[-1] if merged else None
|
||||
if isinstance(last, HumanMessage) and isinstance(curr, HumanMessage):
|
||||
if isinstance(last.content, str):
|
||||
@ -425,7 +425,7 @@ class ChatAnthropic(BaseChatModel):
|
||||
]
|
||||
message_chunk = AIMessageChunk(
|
||||
content=message.content,
|
||||
tool_call_chunks=tool_call_chunks,
|
||||
tool_call_chunks=tool_call_chunks, # type: ignore[arg-type]
|
||||
)
|
||||
yield ChatGenerationChunk(message=message_chunk)
|
||||
else:
|
||||
@ -464,7 +464,7 @@ class ChatAnthropic(BaseChatModel):
|
||||
]
|
||||
message_chunk = AIMessageChunk(
|
||||
content=message.content,
|
||||
tool_call_chunks=tool_call_chunks,
|
||||
tool_call_chunks=tool_call_chunks, # type: ignore[arg-type]
|
||||
)
|
||||
yield ChatGenerationChunk(message=message_chunk)
|
||||
else:
|
||||
|
69
libs/partners/anthropic/poetry.lock
generated
69
libs/partners/anthropic/poetry.lock
generated
@ -568,52 +568,49 @@ requests = ">=2,<3"
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "0.991"
|
||||
version = "1.10.0"
|
||||
description = "Optional static typing for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"},
|
||||
{file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"},
|
||||
{file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"},
|
||||
{file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"},
|
||||
{file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"},
|
||||
{file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"},
|
||||
{file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"},
|
||||
{file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"},
|
||||
{file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"},
|
||||
{file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"},
|
||||
{file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"},
|
||||
{file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"},
|
||||
{file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"},
|
||||
{file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"},
|
||||
{file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"},
|
||||
{file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"},
|
||||
{file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"},
|
||||
{file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"},
|
||||
{file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"},
|
||||
{file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"},
|
||||
{file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"},
|
||||
{file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"},
|
||||
{file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"},
|
||||
{file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"},
|
||||
{file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"},
|
||||
{file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"},
|
||||
{file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"},
|
||||
{file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"},
|
||||
{file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"},
|
||||
{file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"},
|
||||
{file = "mypy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da1cbf08fb3b851ab3b9523a884c232774008267b1f83371ace57f412fe308c2"},
|
||||
{file = "mypy-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:12b6bfc1b1a66095ab413160a6e520e1dc076a28f3e22f7fb25ba3b000b4ef99"},
|
||||
{file = "mypy-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e36fb078cce9904c7989b9693e41cb9711e0600139ce3970c6ef814b6ebc2b2"},
|
||||
{file = "mypy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b0695d605ddcd3eb2f736cd8b4e388288c21e7de85001e9f85df9187f2b50f9"},
|
||||
{file = "mypy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:cd777b780312ddb135bceb9bc8722a73ec95e042f911cc279e2ec3c667076051"},
|
||||
{file = "mypy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3be66771aa5c97602f382230165b856c231d1277c511c9a8dd058be4784472e1"},
|
||||
{file = "mypy-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b2cbaca148d0754a54d44121b5825ae71868c7592a53b7292eeb0f3fdae95ee"},
|
||||
{file = "mypy-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ec404a7cbe9fc0e92cb0e67f55ce0c025014e26d33e54d9e506a0f2d07fe5de"},
|
||||
{file = "mypy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e22e1527dc3d4aa94311d246b59e47f6455b8729f4968765ac1eacf9a4760bc7"},
|
||||
{file = "mypy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:a87dbfa85971e8d59c9cc1fcf534efe664d8949e4c0b6b44e8ca548e746a8d53"},
|
||||
{file = "mypy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a781f6ad4bab20eef8b65174a57e5203f4be627b46291f4589879bf4e257b97b"},
|
||||
{file = "mypy-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b808e12113505b97d9023b0b5e0c0705a90571c6feefc6f215c1df9381256e30"},
|
||||
{file = "mypy-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f55583b12156c399dce2df7d16f8a5095291354f1e839c252ec6c0611e86e2e"},
|
||||
{file = "mypy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cf18f9d0efa1b16478c4c129eabec36148032575391095f73cae2e722fcf9d5"},
|
||||
{file = "mypy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc6ac273b23c6b82da3bb25f4136c4fd42665f17f2cd850771cb600bdd2ebeda"},
|
||||
{file = "mypy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9fd50226364cd2737351c79807775136b0abe084433b55b2e29181a4c3c878c0"},
|
||||
{file = "mypy-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f90cff89eea89273727d8783fef5d4a934be2fdca11b47def50cf5d311aff727"},
|
||||
{file = "mypy-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcfc70599efde5c67862a07a1aaf50e55bce629ace26bb19dc17cece5dd31ca4"},
|
||||
{file = "mypy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:075cbf81f3e134eadaf247de187bd604748171d6b79736fa9b6c9685b4083061"},
|
||||
{file = "mypy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3f298531bca95ff615b6e9f2fc0333aae27fa48052903a0ac90215021cdcfa4f"},
|
||||
{file = "mypy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa7ef5244615a2523b56c034becde4e9e3f9b034854c93639adb667ec9ec2976"},
|
||||
{file = "mypy-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3236a4c8f535a0631f85f5fcdffba71c7feeef76a6002fcba7c1a8e57c8be1ec"},
|
||||
{file = "mypy-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2b5cdbb5dd35aa08ea9114436e0d79aceb2f38e32c21684dcf8e24e1e92821"},
|
||||
{file = "mypy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92f93b21c0fe73dc00abf91022234c79d793318b8a96faac147cd579c1671746"},
|
||||
{file = "mypy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:28d0e038361b45f099cc086d9dd99c15ff14d0188f44ac883010e172ce86c38a"},
|
||||
{file = "mypy-1.10.0-py3-none-any.whl", hash = "sha256:f8c083976eb530019175aabadb60921e73b4f45736760826aa1689dda8208aee"},
|
||||
{file = "mypy-1.10.0.tar.gz", hash = "sha256:3d087fcbec056c4ee34974da493a826ce316947485cef3901f511848e687c131"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
mypy-extensions = ">=0.4.3"
|
||||
mypy-extensions = ">=1.0.0"
|
||||
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
||||
typing-extensions = ">=3.10"
|
||||
typing-extensions = ">=4.1.0"
|
||||
|
||||
[package.extras]
|
||||
dmypy = ["psutil (>=4.0)"]
|
||||
install-types = ["pip"]
|
||||
python2 = ["typed-ast (>=1.4.0,<2)"]
|
||||
mypyc = ["setuptools (>=50)"]
|
||||
reports = ["lxml"]
|
||||
|
||||
[[package]]
|
||||
@ -1285,4 +1282,4 @@ watchmedo = ["PyYAML (>=3.10)"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.8.1,<4.0"
|
||||
content-hash = "a88c10c902a287792de08135f1c17391a89c7363a30c8d55a185f0c90efc22ac"
|
||||
content-hash = "e1cff75b89d41dd6b5bf1fc13f2a8c777f3820936773ed5ebaecae185db28249"
|
||||
|
@ -41,10 +41,9 @@ optional = true
|
||||
|
||||
[tool.poetry.group.lint.dependencies]
|
||||
ruff = ">=0.2.2,<1"
|
||||
mypy = "^0.991"
|
||||
|
||||
[tool.poetry.group.typing.dependencies]
|
||||
mypy = "^0.991"
|
||||
mypy = "^1"
|
||||
langchain-core = { path = "../../core", develop = true }
|
||||
|
||||
[tool.poetry.group.dev]
|
||||
|
@ -26,7 +26,7 @@ MODEL_NAME = "claude-3-sonnet-20240229"
|
||||
|
||||
def test_stream() -> None:
|
||||
"""Test streaming tokens from Anthropic."""
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
for token in llm.stream("I'm Pickle Rick"):
|
||||
assert isinstance(token.content, str)
|
||||
@ -34,7 +34,7 @@ def test_stream() -> None:
|
||||
|
||||
async def test_astream() -> None:
|
||||
"""Test streaming tokens from Anthropic."""
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
async for token in llm.astream("I'm Pickle Rick"):
|
||||
assert isinstance(token.content, str)
|
||||
@ -42,7 +42,7 @@ async def test_astream() -> None:
|
||||
|
||||
async def test_abatch() -> None:
|
||||
"""Test streaming tokens from ChatAnthropicMessages."""
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
result = await llm.abatch(["I'm Pickle Rick", "I'm not Pickle Rick"])
|
||||
for token in result:
|
||||
@ -51,7 +51,7 @@ async def test_abatch() -> None:
|
||||
|
||||
async def test_abatch_tags() -> None:
|
||||
"""Test batch tokens from ChatAnthropicMessages."""
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
result = await llm.abatch(
|
||||
["I'm Pickle Rick", "I'm not Pickle Rick"], config={"tags": ["foo"]}
|
||||
@ -62,7 +62,7 @@ async def test_abatch_tags() -> None:
|
||||
|
||||
def test_batch() -> None:
|
||||
"""Test batch tokens from ChatAnthropicMessages."""
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
result = llm.batch(["I'm Pickle Rick", "I'm not Pickle Rick"])
|
||||
for token in result:
|
||||
@ -71,7 +71,7 @@ def test_batch() -> None:
|
||||
|
||||
async def test_ainvoke() -> None:
|
||||
"""Test invoke tokens from ChatAnthropicMessages."""
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
result = await llm.ainvoke("I'm Pickle Rick", config={"tags": ["foo"]})
|
||||
assert isinstance(result.content, str)
|
||||
@ -79,7 +79,7 @@ async def test_ainvoke() -> None:
|
||||
|
||||
def test_invoke() -> None:
|
||||
"""Test invoke tokens from ChatAnthropicMessages."""
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
result = llm.invoke("I'm Pickle Rick", config=dict(tags=["foo"]))
|
||||
assert isinstance(result.content, str)
|
||||
@ -87,7 +87,7 @@ def test_invoke() -> None:
|
||||
|
||||
def test_system_invoke() -> None:
|
||||
"""Test invoke tokens with a system message"""
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
prompt = ChatPromptTemplate.from_messages(
|
||||
[
|
||||
@ -108,7 +108,7 @@ def test_system_invoke() -> None:
|
||||
|
||||
def test_anthropic_call() -> None:
|
||||
"""Test valid call to anthropic."""
|
||||
chat = ChatAnthropic(model="test")
|
||||
chat = ChatAnthropic(model="test") # type: ignore[call-arg]
|
||||
message = HumanMessage(content="Hello")
|
||||
response = chat.invoke([message])
|
||||
assert isinstance(response, AIMessage)
|
||||
@ -117,7 +117,7 @@ def test_anthropic_call() -> None:
|
||||
|
||||
def test_anthropic_generate() -> None:
|
||||
"""Test generate method of anthropic."""
|
||||
chat = ChatAnthropic(model="test")
|
||||
chat = ChatAnthropic(model="test") # type: ignore[call-arg]
|
||||
chat_messages: List[List[BaseMessage]] = [
|
||||
[HumanMessage(content="How many toes do dogs have?")]
|
||||
]
|
||||
@ -133,7 +133,7 @@ def test_anthropic_generate() -> None:
|
||||
|
||||
def test_anthropic_streaming() -> None:
|
||||
"""Test streaming tokens from anthropic."""
|
||||
chat = ChatAnthropic(model="test")
|
||||
chat = ChatAnthropic(model="test") # type: ignore[call-arg]
|
||||
message = HumanMessage(content="Hello")
|
||||
response = chat.stream([message])
|
||||
for token in response:
|
||||
@ -145,7 +145,7 @@ def test_anthropic_streaming_callback() -> None:
|
||||
"""Test that streaming correctly invokes on_llm_new_token callback."""
|
||||
callback_handler = FakeCallbackHandler()
|
||||
callback_manager = CallbackManager([callback_handler])
|
||||
chat = ChatAnthropic(
|
||||
chat = ChatAnthropic( # type: ignore[call-arg]
|
||||
model="test",
|
||||
callback_manager=callback_manager,
|
||||
verbose=True,
|
||||
@ -161,7 +161,7 @@ async def test_anthropic_async_streaming_callback() -> None:
|
||||
"""Test that streaming correctly invokes on_llm_new_token callback."""
|
||||
callback_handler = FakeCallbackHandler()
|
||||
callback_manager = CallbackManager([callback_handler])
|
||||
chat = ChatAnthropic(
|
||||
chat = ChatAnthropic( # type: ignore[call-arg]
|
||||
model="test",
|
||||
callback_manager=callback_manager,
|
||||
verbose=True,
|
||||
@ -177,7 +177,7 @@ async def test_anthropic_async_streaming_callback() -> None:
|
||||
|
||||
def test_anthropic_multimodal() -> None:
|
||||
"""Test that multimodal inputs are handled correctly."""
|
||||
chat = ChatAnthropic(model=MODEL_NAME)
|
||||
chat = ChatAnthropic(model=MODEL_NAME) # type: ignore[call-arg]
|
||||
messages = [
|
||||
HumanMessage(
|
||||
content=[
|
||||
@ -202,7 +202,7 @@ def test_streaming() -> None:
|
||||
callback_handler = FakeCallbackHandler()
|
||||
callback_manager = CallbackManager([callback_handler])
|
||||
|
||||
llm = ChatAnthropicMessages(
|
||||
llm = ChatAnthropicMessages( # type: ignore[call-arg, call-arg]
|
||||
model_name=MODEL_NAME, streaming=True, callback_manager=callback_manager
|
||||
)
|
||||
|
||||
@ -216,7 +216,7 @@ async def test_astreaming() -> None:
|
||||
callback_handler = FakeCallbackHandler()
|
||||
callback_manager = CallbackManager([callback_handler])
|
||||
|
||||
llm = ChatAnthropicMessages(
|
||||
llm = ChatAnthropicMessages( # type: ignore[call-arg, call-arg]
|
||||
model_name=MODEL_NAME, streaming=True, callback_manager=callback_manager
|
||||
)
|
||||
|
||||
@ -226,7 +226,7 @@ async def test_astreaming() -> None:
|
||||
|
||||
|
||||
def test_tool_use() -> None:
|
||||
llm = ChatAnthropic(
|
||||
llm = ChatAnthropic( # type: ignore[call-arg]
|
||||
model="claude-3-opus-20240229",
|
||||
)
|
||||
|
||||
@ -277,7 +277,7 @@ def test_anthropic_with_empty_text_block() -> None:
|
||||
"""Type the given letter."""
|
||||
return "OK"
|
||||
|
||||
model = ChatAnthropic(model="claude-3-opus-20240229", temperature=0).bind_tools(
|
||||
model = ChatAnthropic(model="claude-3-opus-20240229", temperature=0).bind_tools( # type: ignore[call-arg]
|
||||
[type_letter]
|
||||
)
|
||||
|
||||
@ -314,7 +314,7 @@ def test_anthropic_with_empty_text_block() -> None:
|
||||
|
||||
|
||||
def test_with_structured_output() -> None:
|
||||
llm = ChatAnthropic(
|
||||
llm = ChatAnthropic( # type: ignore[call-arg]
|
||||
model="claude-3-opus-20240229",
|
||||
)
|
||||
|
||||
@ -341,7 +341,7 @@ class GetWeather(BaseModel):
|
||||
|
||||
@pytest.mark.parametrize("tool_choice", ["GetWeather", "auto", "any"])
|
||||
def test_anthropic_bind_tools_tool_choice(tool_choice: str) -> None:
|
||||
chat_model = ChatAnthropic(
|
||||
chat_model = ChatAnthropic( # type: ignore[call-arg]
|
||||
model="claude-3-sonnet-20240229",
|
||||
)
|
||||
chat_model_with_tools = chat_model.bind_tools([GetWeather], tool_choice=tool_choice)
|
||||
|
@ -18,7 +18,7 @@ BIG_MODEL_NAME = "claude-3-opus-20240229"
|
||||
|
||||
def test_stream() -> None:
|
||||
"""Test streaming tokens from Anthropic."""
|
||||
llm = ChatAnthropicTools(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicTools(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
for token in llm.stream("I'm Pickle Rick"):
|
||||
assert isinstance(token.content, str)
|
||||
@ -26,7 +26,7 @@ def test_stream() -> None:
|
||||
|
||||
async def test_astream() -> None:
|
||||
"""Test streaming tokens from Anthropic."""
|
||||
llm = ChatAnthropicTools(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicTools(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
async for token in llm.astream("I'm Pickle Rick"):
|
||||
assert isinstance(token.content, str)
|
||||
@ -34,7 +34,7 @@ async def test_astream() -> None:
|
||||
|
||||
async def test_abatch() -> None:
|
||||
"""Test streaming tokens from ChatAnthropicTools."""
|
||||
llm = ChatAnthropicTools(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicTools(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
result = await llm.abatch(["I'm Pickle Rick", "I'm not Pickle Rick"])
|
||||
for token in result:
|
||||
@ -43,7 +43,7 @@ async def test_abatch() -> None:
|
||||
|
||||
async def test_abatch_tags() -> None:
|
||||
"""Test batch tokens from ChatAnthropicTools."""
|
||||
llm = ChatAnthropicTools(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicTools(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
result = await llm.abatch(
|
||||
["I'm Pickle Rick", "I'm not Pickle Rick"], config={"tags": ["foo"]}
|
||||
@ -54,7 +54,7 @@ async def test_abatch_tags() -> None:
|
||||
|
||||
def test_batch() -> None:
|
||||
"""Test batch tokens from ChatAnthropicTools."""
|
||||
llm = ChatAnthropicTools(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicTools(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
result = llm.batch(["I'm Pickle Rick", "I'm not Pickle Rick"])
|
||||
for token in result:
|
||||
@ -63,7 +63,7 @@ def test_batch() -> None:
|
||||
|
||||
async def test_ainvoke() -> None:
|
||||
"""Test invoke tokens from ChatAnthropicTools."""
|
||||
llm = ChatAnthropicTools(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicTools(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
result = await llm.ainvoke("I'm Pickle Rick", config={"tags": ["foo"]})
|
||||
assert isinstance(result.content, str)
|
||||
@ -71,7 +71,7 @@ async def test_ainvoke() -> None:
|
||||
|
||||
def test_invoke() -> None:
|
||||
"""Test invoke tokens from ChatAnthropicTools."""
|
||||
llm = ChatAnthropicTools(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicTools(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
result = llm.invoke("I'm Pickle Rick", config=dict(tags=["foo"]))
|
||||
assert isinstance(result.content, str)
|
||||
@ -79,7 +79,7 @@ def test_invoke() -> None:
|
||||
|
||||
def test_system_invoke() -> None:
|
||||
"""Test invoke tokens with a system message"""
|
||||
llm = ChatAnthropicTools(model_name=MODEL_NAME)
|
||||
llm = ChatAnthropicTools(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
|
||||
prompt = ChatPromptTemplate.from_messages(
|
||||
[
|
||||
@ -108,7 +108,7 @@ def test_with_structured_output() -> None:
|
||||
name: str
|
||||
age: int
|
||||
|
||||
chain = ChatAnthropicTools(
|
||||
chain = ChatAnthropicTools( # type: ignore[call-arg, call-arg]
|
||||
model_name=BIG_MODEL_NAME,
|
||||
temperature=0,
|
||||
default_headers={"anthropic-beta": "tools-2024-04-04"},
|
||||
@ -153,7 +153,7 @@ def test_anthropic_complex_structured_output() -> None:
|
||||
]
|
||||
)
|
||||
|
||||
llm = ChatAnthropicTools(
|
||||
llm = ChatAnthropicTools( # type: ignore[call-arg, call-arg]
|
||||
temperature=0,
|
||||
model_name=BIG_MODEL_NAME,
|
||||
default_headers={"anthropic-beta": "tools-2024-04-04"},
|
||||
|
@ -18,20 +18,20 @@ def test_anthropic_model_name_param() -> None:
|
||||
|
||||
@pytest.mark.requires("anthropic")
|
||||
def test_anthropic_model_param() -> None:
|
||||
llm = Anthropic(model="foo")
|
||||
llm = Anthropic(model="foo") # type: ignore[call-arg]
|
||||
assert llm.model == "foo"
|
||||
|
||||
|
||||
def test_anthropic_call() -> None:
|
||||
"""Test valid call to anthropic."""
|
||||
llm = Anthropic(model="claude-instant-1")
|
||||
llm = Anthropic(model="claude-instant-1") # type: ignore[call-arg]
|
||||
output = llm.invoke("Say foo:")
|
||||
assert isinstance(output, str)
|
||||
|
||||
|
||||
def test_anthropic_streaming() -> None:
|
||||
"""Test streaming tokens from anthropic."""
|
||||
llm = Anthropic(model="claude-instant-1")
|
||||
llm = Anthropic(model="claude-instant-1") # type: ignore[call-arg]
|
||||
generator = llm.stream("I'm Pickle Rick")
|
||||
|
||||
assert isinstance(generator, Generator)
|
||||
|
@ -25,8 +25,8 @@ os.environ["ANTHROPIC_API_KEY"] = "foo"
|
||||
def test_initialization() -> None:
|
||||
"""Test chat model initialization."""
|
||||
for model in [
|
||||
ChatAnthropic(model_name="claude-instant-1.2", api_key="xyz", timeout=2),
|
||||
ChatAnthropic(
|
||||
ChatAnthropic(model_name="claude-instant-1.2", api_key="xyz", timeout=2), # type: ignore[arg-type]
|
||||
ChatAnthropic( # type: ignore[call-arg, call-arg, call-arg]
|
||||
model="claude-instant-1.2",
|
||||
anthropic_api_key="xyz",
|
||||
default_request_timeout=2,
|
||||
@ -39,32 +39,32 @@ def test_initialization() -> None:
|
||||
|
||||
@pytest.mark.requires("anthropic")
|
||||
def test_anthropic_model_name_param() -> None:
|
||||
llm = ChatAnthropic(model_name="foo")
|
||||
llm = ChatAnthropic(model_name="foo") # type: ignore[call-arg, call-arg]
|
||||
assert llm.model == "foo"
|
||||
|
||||
|
||||
@pytest.mark.requires("anthropic")
|
||||
def test_anthropic_model_param() -> None:
|
||||
llm = ChatAnthropic(model="foo")
|
||||
llm = ChatAnthropic(model="foo") # type: ignore[call-arg]
|
||||
assert llm.model == "foo"
|
||||
|
||||
|
||||
@pytest.mark.requires("anthropic")
|
||||
def test_anthropic_model_kwargs() -> None:
|
||||
llm = ChatAnthropic(model_name="foo", model_kwargs={"foo": "bar"})
|
||||
llm = ChatAnthropic(model_name="foo", model_kwargs={"foo": "bar"}) # type: ignore[call-arg, call-arg]
|
||||
assert llm.model_kwargs == {"foo": "bar"}
|
||||
|
||||
|
||||
@pytest.mark.requires("anthropic")
|
||||
def test_anthropic_invalid_model_kwargs() -> None:
|
||||
with pytest.raises(ValueError):
|
||||
ChatAnthropic(model="foo", model_kwargs={"max_tokens_to_sample": 5})
|
||||
ChatAnthropic(model="foo", model_kwargs={"max_tokens_to_sample": 5}) # type: ignore[call-arg]
|
||||
|
||||
|
||||
@pytest.mark.requires("anthropic")
|
||||
def test_anthropic_incorrect_field() -> None:
|
||||
with pytest.warns(match="not default parameter"):
|
||||
llm = ChatAnthropic(model="foo", foo="bar")
|
||||
llm = ChatAnthropic(model="foo", foo="bar") # type: ignore[call-arg, call-arg]
|
||||
assert llm.model_kwargs == {"foo": "bar"}
|
||||
|
||||
|
||||
@ -73,7 +73,7 @@ def test_anthropic_initialization() -> None:
|
||||
"""Test anthropic initialization."""
|
||||
# Verify that chat anthropic can be initialized using a secret key provided
|
||||
# as a parameter rather than an environment variable.
|
||||
ChatAnthropic(model="test", anthropic_api_key="test")
|
||||
ChatAnthropic(model="test", anthropic_api_key="test") # type: ignore[call-arg, call-arg]
|
||||
|
||||
|
||||
def test__format_output() -> None:
|
||||
@ -90,7 +90,7 @@ def test__format_output() -> None:
|
||||
expected = ChatResult(
|
||||
generations=[
|
||||
ChatGeneration(
|
||||
message=AIMessage(
|
||||
message=AIMessage( # type: ignore[misc]
|
||||
"bar",
|
||||
usage_metadata={
|
||||
"input_tokens": 2,
|
||||
@ -108,16 +108,16 @@ def test__format_output() -> None:
|
||||
"usage": {"input_tokens": 2, "output_tokens": 1},
|
||||
},
|
||||
)
|
||||
llm = ChatAnthropic(model="test", anthropic_api_key="test")
|
||||
llm = ChatAnthropic(model="test", anthropic_api_key="test") # type: ignore[call-arg, call-arg]
|
||||
actual = llm._format_output(anthropic_msg)
|
||||
assert expected == actual
|
||||
|
||||
|
||||
def test__merge_messages() -> None:
|
||||
messages = [
|
||||
SystemMessage("foo"),
|
||||
HumanMessage("bar"),
|
||||
AIMessage(
|
||||
SystemMessage("foo"), # type: ignore[misc]
|
||||
HumanMessage("bar"), # type: ignore[misc]
|
||||
AIMessage( # type: ignore[misc]
|
||||
[
|
||||
{"text": "baz", "type": "text"},
|
||||
{
|
||||
@ -137,14 +137,14 @@ def test__merge_messages() -> None:
|
||||
},
|
||||
]
|
||||
),
|
||||
ToolMessage("buz output", tool_call_id="1"),
|
||||
ToolMessage("blah output", tool_call_id="2"),
|
||||
HumanMessage("next thing"),
|
||||
ToolMessage("buz output", tool_call_id="1"), # type: ignore[misc]
|
||||
ToolMessage("blah output", tool_call_id="2"), # type: ignore[misc]
|
||||
HumanMessage("next thing"), # type: ignore[misc]
|
||||
]
|
||||
expected = [
|
||||
SystemMessage("foo"),
|
||||
HumanMessage("bar"),
|
||||
AIMessage(
|
||||
SystemMessage("foo"), # type: ignore[misc]
|
||||
HumanMessage("bar"), # type: ignore[misc]
|
||||
AIMessage( # type: ignore[misc]
|
||||
[
|
||||
{"text": "baz", "type": "text"},
|
||||
{
|
||||
@ -164,7 +164,7 @@ def test__merge_messages() -> None:
|
||||
},
|
||||
]
|
||||
),
|
||||
HumanMessage(
|
||||
HumanMessage( # type: ignore[misc]
|
||||
[
|
||||
{"type": "tool_result", "content": "buz output", "tool_use_id": "1"},
|
||||
{"type": "tool_result", "content": "blah output", "tool_use_id": "2"},
|
||||
@ -178,15 +178,15 @@ def test__merge_messages() -> None:
|
||||
|
||||
def test__merge_messages_mutation() -> None:
|
||||
original_messages = [
|
||||
HumanMessage([{"type": "text", "text": "bar"}]),
|
||||
HumanMessage("next thing"),
|
||||
HumanMessage([{"type": "text", "text": "bar"}]), # type: ignore[misc]
|
||||
HumanMessage("next thing"), # type: ignore[misc]
|
||||
]
|
||||
messages = [
|
||||
HumanMessage([{"type": "text", "text": "bar"}]),
|
||||
HumanMessage("next thing"),
|
||||
HumanMessage([{"type": "text", "text": "bar"}]), # type: ignore[misc]
|
||||
HumanMessage("next thing"), # type: ignore[misc]
|
||||
]
|
||||
expected = [
|
||||
HumanMessage(
|
||||
HumanMessage( # type: ignore[misc]
|
||||
[{"type": "text", "text": "bar"}, {"type": "text", "text": "next thing"}]
|
||||
),
|
||||
]
|
||||
@ -305,13 +305,13 @@ def test_convert_to_anthropic_tool(
|
||||
|
||||
|
||||
def test__format_messages_with_tool_calls() -> None:
|
||||
system = SystemMessage("fuzz")
|
||||
human = HumanMessage("foo")
|
||||
ai = AIMessage(
|
||||
system = SystemMessage("fuzz") # type: ignore[misc]
|
||||
human = HumanMessage("foo") # type: ignore[misc]
|
||||
ai = AIMessage( # type: ignore[misc]
|
||||
"",
|
||||
tool_calls=[{"name": "bar", "id": "1", "args": {"baz": "buzz"}}],
|
||||
)
|
||||
tool = ToolMessage(
|
||||
tool = ToolMessage( # type: ignore[misc]
|
||||
"blurb",
|
||||
tool_call_id="1",
|
||||
)
|
||||
@ -344,15 +344,15 @@ def test__format_messages_with_tool_calls() -> None:
|
||||
|
||||
|
||||
def test__format_messages_with_str_content_and_tool_calls() -> None:
|
||||
system = SystemMessage("fuzz")
|
||||
human = HumanMessage("foo")
|
||||
system = SystemMessage("fuzz") # type: ignore[misc]
|
||||
human = HumanMessage("foo") # type: ignore[misc]
|
||||
# If content and tool_calls are specified and content is a string, then both are
|
||||
# included with content first.
|
||||
ai = AIMessage(
|
||||
ai = AIMessage( # type: ignore[misc]
|
||||
"thought",
|
||||
tool_calls=[{"name": "bar", "id": "1", "args": {"baz": "buzz"}}],
|
||||
)
|
||||
tool = ToolMessage("blurb", tool_call_id="1")
|
||||
tool = ToolMessage("blurb", tool_call_id="1") # type: ignore[misc]
|
||||
messages = [system, human, ai, tool]
|
||||
expected = (
|
||||
"fuzz",
|
||||
@ -383,15 +383,15 @@ def test__format_messages_with_str_content_and_tool_calls() -> None:
|
||||
|
||||
|
||||
def test__format_messages_with_list_content_and_tool_calls() -> None:
|
||||
system = SystemMessage("fuzz")
|
||||
human = HumanMessage("foo")
|
||||
system = SystemMessage("fuzz") # type: ignore[misc]
|
||||
human = HumanMessage("foo") # type: ignore[misc]
|
||||
# If content and tool_calls are specified and content is a list, then content is
|
||||
# preferred.
|
||||
ai = AIMessage(
|
||||
ai = AIMessage( # type: ignore[misc]
|
||||
[{"type": "text", "text": "thought"}],
|
||||
tool_calls=[{"name": "bar", "id": "1", "args": {"baz": "buzz"}}],
|
||||
)
|
||||
tool = ToolMessage(
|
||||
tool = ToolMessage( # type: ignore[misc]
|
||||
"blurb",
|
||||
tool_call_id="1",
|
||||
)
|
||||
@ -418,10 +418,10 @@ def test__format_messages_with_list_content_and_tool_calls() -> None:
|
||||
|
||||
def test__format_messages_with_tool_use_blocks_and_tool_calls() -> None:
|
||||
"""Show that tool_calls are preferred to tool_use blocks when both have same id."""
|
||||
system = SystemMessage("fuzz")
|
||||
human = HumanMessage("foo")
|
||||
system = SystemMessage("fuzz") # type: ignore[misc]
|
||||
human = HumanMessage("foo") # type: ignore[misc]
|
||||
# NOTE: tool_use block in contents and tool_calls have different arguments.
|
||||
ai = AIMessage(
|
||||
ai = AIMessage( # type: ignore[misc]
|
||||
[
|
||||
{"type": "text", "text": "thought"},
|
||||
{
|
||||
@ -433,7 +433,7 @@ def test__format_messages_with_tool_use_blocks_and_tool_calls() -> None:
|
||||
],
|
||||
tool_calls=[{"name": "bar", "id": "1", "args": {"baz": "BUZZ"}}],
|
||||
)
|
||||
tool = ToolMessage("blurb", tool_call_id="1")
|
||||
tool = ToolMessage("blurb", tool_call_id="1") # type: ignore[misc]
|
||||
messages = [system, human, ai, tool]
|
||||
expected = (
|
||||
"fuzz",
|
||||
@ -465,7 +465,7 @@ def test__format_messages_with_tool_use_blocks_and_tool_calls() -> None:
|
||||
|
||||
def test_anthropic_api_key_is_secret_string() -> None:
|
||||
"""Test that the API key is stored as a SecretStr."""
|
||||
chat_model = ChatAnthropic(
|
||||
chat_model = ChatAnthropic( # type: ignore[call-arg, call-arg]
|
||||
model="claude-3-opus-20240229",
|
||||
anthropic_api_key="secret-api-key",
|
||||
)
|
||||
@ -477,7 +477,7 @@ def test_anthropic_api_key_masked_when_passed_from_env(
|
||||
) -> None:
|
||||
"""Test that the API key is masked when passed from an environment variable."""
|
||||
monkeypatch.setenv("ANTHROPIC_API_KEY ", "secret-api-key")
|
||||
chat_model = ChatAnthropic(
|
||||
chat_model = ChatAnthropic( # type: ignore[call-arg]
|
||||
model="claude-3-opus-20240229",
|
||||
)
|
||||
print(chat_model.anthropic_api_key, end="") # noqa: T201
|
||||
@ -490,7 +490,7 @@ def test_anthropic_api_key_masked_when_passed_via_constructor(
|
||||
capsys: CaptureFixture,
|
||||
) -> None:
|
||||
"""Test that the API key is masked when passed via the constructor."""
|
||||
chat_model = ChatAnthropic(
|
||||
chat_model = ChatAnthropic( # type: ignore[call-arg, call-arg]
|
||||
model="claude-3-opus-20240229",
|
||||
anthropic_api_key="secret-api-key",
|
||||
)
|
||||
@ -502,7 +502,7 @@ def test_anthropic_api_key_masked_when_passed_via_constructor(
|
||||
|
||||
def test_anthropic_uses_actual_secret_value_from_secretstr() -> None:
|
||||
"""Test that the actual secret value is correctly retrieved."""
|
||||
chat_model = ChatAnthropic(
|
||||
chat_model = ChatAnthropic( # type: ignore[call-arg, call-arg]
|
||||
model="claude-3-opus-20240229",
|
||||
anthropic_api_key="secret-api-key",
|
||||
)
|
||||
@ -519,7 +519,7 @@ class GetWeather(BaseModel):
|
||||
|
||||
|
||||
def test_anthropic_bind_tools_tool_choice() -> None:
|
||||
chat_model = ChatAnthropic(
|
||||
chat_model = ChatAnthropic( # type: ignore[call-arg, call-arg]
|
||||
model="claude-3-opus-20240229",
|
||||
anthropic_api_key="secret-api-key",
|
||||
)
|
||||
|
@ -19,7 +19,7 @@ _CONTENT: List = [
|
||||
{"type": "tool_use", "input": {"baz": "a"}, "id": "2", "name": "_Foo2"},
|
||||
]
|
||||
|
||||
_RESULT: List = [ChatGeneration(message=AIMessage(_CONTENT))]
|
||||
_RESULT: List = [ChatGeneration(message=AIMessage(_CONTENT))] # type: ignore[misc]
|
||||
|
||||
|
||||
class _Foo1(BaseModel):
|
||||
@ -50,7 +50,7 @@ def test_tools_output_parser_args_only() -> None:
|
||||
assert expected == actual
|
||||
|
||||
expected = []
|
||||
actual = output_parser.parse_result([ChatGeneration(message=AIMessage(""))])
|
||||
actual = output_parser.parse_result([ChatGeneration(message=AIMessage(""))]) # type: ignore[misc]
|
||||
assert expected == actual
|
||||
|
||||
|
||||
@ -61,7 +61,7 @@ def test_tools_output_parser_first_tool_only() -> None:
|
||||
assert expected == actual
|
||||
|
||||
expected = None
|
||||
actual = output_parser.parse_result([ChatGeneration(message=AIMessage(""))])
|
||||
actual = output_parser.parse_result([ChatGeneration(message=AIMessage(""))]) # type: ignore[misc]
|
||||
assert expected == actual
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user