This commit is contained in:
Chester Curme 2025-05-08 13:05:28 -04:00
parent d9715ad797
commit 395008456e

View File

@ -9,12 +9,12 @@ from langchain_anthropic import ChatAnthropic, ChatAnthropicMessages
MODEL_NAME = "claude-3-5-haiku-latest"
def test_stream() -> None:
"""Test streaming tokens from Anthropic."""
llm = ChatAnthropicMessages(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
# def test_stream() -> None:
# """Test streaming tokens from Anthropic."""
# llm = ChatAnthropicMessages(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
for token in llm.stream("I'm Pickle Rick"):
pass
# for token in llm.stream("I'm Pickle Rick"):
# pass
async def test_astream() -> None:
@ -37,18 +37,18 @@ async def test_astream() -> None:
pass
# async def test_stream_usage() -> None:
# model = ChatAnthropic(model_name=MODEL_NAME, stream_usage=False) # type: ignore[call-arg]
# async for token in model.astream("hi"):
# assert isinstance(token, AIMessageChunk)
# assert token.usage_metadata is None
async def test_stream_usage() -> None:
model = ChatAnthropic(model_name=MODEL_NAME, stream_usage=False) # type: ignore[call-arg]
async for token in model.astream("hi"):
assert isinstance(token, AIMessageChunk)
assert token.usage_metadata is None
# # check we override with kwarg
# model = ChatAnthropic(model_name=MODEL_NAME) # type: ignore[call-arg]
# assert model.stream_usage
# async for token in model.astream("hi", stream_usage=False):
# assert isinstance(token, AIMessageChunk)
# assert token.usage_metadata is None
# check we override with kwarg
model = ChatAnthropic(model_name=MODEL_NAME) # type: ignore[call-arg]
assert model.stream_usage
async for token in model.astream("hi", stream_usage=False):
assert isinstance(token, AIMessageChunk)
assert token.usage_metadata is None
async def test_async_stream_twice() -> None: