langchain/libs/community/tests/integration_tests/chat_models/test_baiduqianfan.py
maang-h 721f709dec
community: Improve QianfanChatEndpoint tool result to model (#24466)
- **Description:** `QianfanChatEndpoint` When using tool result to
answer questions, the content of the tool is required to be in Dict
format. Of course, this can require users to return Dict format when
calling the tool, but in order to be consistent with other Chat Models,
I think such modifications are necessary.
2024-07-22 11:29:00 -04:00

40 lines
1.3 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

from langchain_core.messages import AIMessage, HumanMessage, ToolMessage
from langchain_core.messages.tool import ToolCall
from langchain_core.tools import tool
from langchain_community.chat_models import QianfanChatEndpoint
@tool
def get_current_weather(location: str, unit: str = "摄氏度") -> str:
"""获取指定地点的天气"""
return f"{location}是晴朗25{unit}左右。"
def test_chat_qianfan_tool_result_to_model() -> None:
"""Test QianfanChatEndpoint invoke with tool_calling result."""
messages = [
HumanMessage("上海天气怎么样?"),
AIMessage(
content=" ",
tool_calls=[
ToolCall(
name="get_current_weather",
args={"location": "上海", "unit": "摄氏度"},
id="foo",
type="tool_call",
),
],
),
ToolMessage(
content="上海是晴天25度左右。",
tool_call_id="foo",
name="get_current_weather",
),
]
chat = QianfanChatEndpoint(model="ERNIE-3.5-8K") # type: ignore[call-arg]
llm_with_tool = chat.bind_tools([get_current_weather])
response = llm_with_tool.invoke(messages)
assert isinstance(response, AIMessage)
print(response.content) # noqa: T201