mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-15 06:26:12 +00:00
anthropic[minor]: Adds streaming tool call support for Anthropic (#22687)
Preserves string content chunks for non tool call requests for convenience. One thing - Anthropic events look like this: ``` RawContentBlockStartEvent(content_block=TextBlock(text='', type='text'), index=0, type='content_block_start') RawContentBlockDeltaEvent(delta=TextDelta(text='<thinking>\nThe', type='text_delta'), index=0, type='content_block_delta') RawContentBlockDeltaEvent(delta=TextDelta(text=' provide', type='text_delta'), index=0, type='content_block_delta') ... RawContentBlockStartEvent(content_block=ToolUseBlock(id='toolu_01GJ6x2ddcMG3psDNNe4eDqb', input={}, name='get_weather', type='tool_use'), index=1, type='content_block_start') RawContentBlockDeltaEvent(delta=InputJsonDelta(partial_json='', type='input_json_delta'), index=1, type='content_block_delta') ``` Note that `delta` has a `type` field. With this implementation, I'm dropping it because `merge_list` behavior will concatenate strings. We currently have `index` as a special field when merging lists, would it be worth adding `type` too? If so, what do we set as a context block chunk? `text` vs. `text_delta`/`tool_use` vs `input_json_delta`? CC @ccurme @efriis @baskaryan
This commit is contained in:
@@ -146,6 +146,55 @@ async def test_abatch_tags() -> None:
|
||||
assert isinstance(token.content, str)
|
||||
|
||||
|
||||
async def test_async_tool_use() -> None:
|
||||
llm = ChatAnthropic( # type: ignore[call-arg]
|
||||
model=MODEL_NAME,
|
||||
)
|
||||
|
||||
llm_with_tools = llm.bind_tools(
|
||||
[
|
||||
{
|
||||
"name": "get_weather",
|
||||
"description": "Get weather report for a city",
|
||||
"input_schema": {
|
||||
"type": "object",
|
||||
"properties": {"location": {"type": "string"}},
|
||||
},
|
||||
}
|
||||
]
|
||||
)
|
||||
response = await llm_with_tools.ainvoke("what's the weather in san francisco, ca")
|
||||
assert isinstance(response, AIMessage)
|
||||
assert isinstance(response.content, list)
|
||||
assert isinstance(response.tool_calls, list)
|
||||
assert len(response.tool_calls) == 1
|
||||
tool_call = response.tool_calls[0]
|
||||
assert tool_call["name"] == "get_weather"
|
||||
assert isinstance(tool_call["args"], dict)
|
||||
assert "location" in tool_call["args"]
|
||||
|
||||
# Test streaming
|
||||
first = True
|
||||
chunks = [] # type: ignore
|
||||
async for chunk in llm_with_tools.astream(
|
||||
"what's the weather in san francisco, ca"
|
||||
):
|
||||
chunks = chunks + [chunk]
|
||||
if first:
|
||||
gathered = chunk
|
||||
first = False
|
||||
else:
|
||||
gathered = gathered + chunk # type: ignore
|
||||
assert len(chunks) > 1
|
||||
assert isinstance(gathered, AIMessageChunk)
|
||||
assert isinstance(gathered.tool_call_chunks, list)
|
||||
assert len(gathered.tool_call_chunks) == 1
|
||||
tool_call_chunk = gathered.tool_call_chunks[0]
|
||||
assert tool_call_chunk["name"] == "get_weather"
|
||||
assert isinstance(tool_call_chunk["args"], str)
|
||||
assert "location" in json.loads(tool_call_chunk["args"])
|
||||
|
||||
|
||||
def test_batch() -> None:
|
||||
"""Test batch tokens from ChatAnthropicMessages."""
|
||||
llm = ChatAnthropicMessages(model_name=MODEL_NAME) # type: ignore[call-arg, call-arg]
|
||||
@@ -313,7 +362,7 @@ async def test_astreaming() -> None:
|
||||
|
||||
def test_tool_use() -> None:
|
||||
llm = ChatAnthropic( # type: ignore[call-arg]
|
||||
model="claude-3-opus-20240229",
|
||||
model=MODEL_NAME,
|
||||
)
|
||||
|
||||
llm_with_tools = llm.bind_tools(
|
||||
@@ -339,20 +388,55 @@ def test_tool_use() -> None:
|
||||
assert "location" in tool_call["args"]
|
||||
|
||||
# Test streaming
|
||||
input = "how are you? what's the weather in san francisco, ca"
|
||||
first = True
|
||||
for chunk in llm_with_tools.stream("what's the weather in san francisco, ca"):
|
||||
chunks = [] # type: ignore
|
||||
for chunk in llm_with_tools.stream(input):
|
||||
chunks = chunks + [chunk]
|
||||
if first:
|
||||
gathered = chunk
|
||||
first = False
|
||||
else:
|
||||
gathered = gathered + chunk # type: ignore
|
||||
assert len(chunks) > 1
|
||||
assert isinstance(gathered.content, list)
|
||||
assert len(gathered.content) == 2
|
||||
tool_use_block = None
|
||||
for content_block in gathered.content:
|
||||
assert isinstance(content_block, dict)
|
||||
if content_block["type"] == "tool_use":
|
||||
tool_use_block = content_block
|
||||
break
|
||||
assert tool_use_block is not None
|
||||
assert tool_use_block["name"] == "get_weather"
|
||||
assert "location" in json.loads(tool_use_block["partial_json"])
|
||||
assert isinstance(gathered, AIMessageChunk)
|
||||
assert isinstance(gathered.tool_call_chunks, list)
|
||||
assert len(gathered.tool_call_chunks) == 1
|
||||
tool_call_chunk = gathered.tool_call_chunks[0]
|
||||
assert tool_call_chunk["name"] == "get_weather"
|
||||
assert isinstance(tool_call_chunk["args"], str)
|
||||
assert "location" in json.loads(tool_call_chunk["args"])
|
||||
assert isinstance(gathered.tool_calls, list)
|
||||
assert len(gathered.tool_calls) == 1
|
||||
tool_call = gathered.tool_calls[0]
|
||||
assert tool_call["name"] == "get_weather"
|
||||
assert isinstance(tool_call["args"], dict)
|
||||
assert "location" in tool_call["args"]
|
||||
assert tool_call["id"] is not None
|
||||
|
||||
# Test passing response back to model
|
||||
stream = llm_with_tools.stream(
|
||||
[
|
||||
input,
|
||||
gathered,
|
||||
ToolMessage(content="sunny and warm", tool_call_id=tool_call["id"]),
|
||||
]
|
||||
)
|
||||
chunks = [] # type: ignore
|
||||
first = True
|
||||
for chunk in stream:
|
||||
chunks = chunks + [chunk]
|
||||
if first:
|
||||
gathered = chunk
|
||||
first = False
|
||||
else:
|
||||
gathered = gathered + chunk # type: ignore
|
||||
assert len(chunks) > 1
|
||||
|
||||
|
||||
def test_anthropic_with_empty_text_block() -> None:
|
||||
@@ -428,7 +512,7 @@ class GetWeather(BaseModel):
|
||||
@pytest.mark.parametrize("tool_choice", ["GetWeather", "auto", "any"])
|
||||
def test_anthropic_bind_tools_tool_choice(tool_choice: str) -> None:
|
||||
chat_model = ChatAnthropic( # type: ignore[call-arg]
|
||||
model="claude-3-sonnet-20240229",
|
||||
model=MODEL_NAME,
|
||||
)
|
||||
chat_model_with_tools = chat_model.bind_tools([GetWeather], tool_choice=tool_choice)
|
||||
response = chat_model_with_tools.invoke("what's the weather in ny and la")
|
||||
|
Reference in New Issue
Block a user