mirror of
https://github.com/hwchase17/langchain.git
synced 2025-08-20 09:57:32 +00:00
ainvoke
This commit is contained in:
parent
aee80f0e48
commit
d4f2aad929
@ -1039,7 +1039,16 @@ class BaseChatOpenAI(BaseChatModel):
|
|||||||
response = raw_response.parse()
|
response = raw_response.parse()
|
||||||
generation_info = {"headers": dict(raw_response.headers)}
|
generation_info = {"headers": dict(raw_response.headers)}
|
||||||
else:
|
else:
|
||||||
response = await self.async_client.create(**payload)
|
if "tools" in payload and any(
|
||||||
|
_is_builtin_tool(tool) for tool in payload["tools"]
|
||||||
|
):
|
||||||
|
responses_payload = _transform_payload_for_responses(payload)
|
||||||
|
response = await self.root_async_client.responses.create(
|
||||||
|
**responses_payload
|
||||||
|
)
|
||||||
|
return self._create_chat_result_responses(response, generation_info)
|
||||||
|
else:
|
||||||
|
response = await self.async_client.create(**payload)
|
||||||
return await run_in_executor(
|
return await run_in_executor(
|
||||||
None, self._create_chat_result, response, generation_info
|
None, self._create_chat_result, response, generation_info
|
||||||
)
|
)
|
||||||
|
@ -1228,3 +1228,33 @@ def test_structured_output_and_tools() -> None:
|
|||||||
assert len(full.tool_calls) == 1
|
assert len(full.tool_calls) == 1
|
||||||
tool_call = full.tool_calls[0]
|
tool_call = full.tool_calls[0]
|
||||||
assert tool_call["name"] == "GenerateUsername"
|
assert tool_call["name"] == "GenerateUsername"
|
||||||
|
|
||||||
|
|
||||||
|
def test_web_search() -> None:
|
||||||
|
llm = ChatOpenAI(model="gpt-4o")
|
||||||
|
response = llm.invoke(
|
||||||
|
"What was a positive news story from today?",
|
||||||
|
tools=[{"type": "web_search_preview"}],
|
||||||
|
)
|
||||||
|
assert isinstance(response, AIMessage)
|
||||||
|
assert response.content
|
||||||
|
assert response.usage_metadata["input_tokens"] > 0
|
||||||
|
assert response.usage_metadata["output_tokens"] > 0
|
||||||
|
assert response.usage_metadata["total_tokens"] > 0
|
||||||
|
assert response.response_metadata["model_name"]
|
||||||
|
assert response.response_metadata["status"]
|
||||||
|
|
||||||
|
|
||||||
|
async def test_web_search_async() -> None:
|
||||||
|
llm = ChatOpenAI(model="gpt-4o")
|
||||||
|
response = await llm.ainvoke(
|
||||||
|
"What was a positive news story from today?",
|
||||||
|
tools=[{"type": "web_search_preview"}],
|
||||||
|
)
|
||||||
|
assert isinstance(response, AIMessage)
|
||||||
|
assert response.content
|
||||||
|
assert response.usage_metadata["input_tokens"] > 0
|
||||||
|
assert response.usage_metadata["output_tokens"] > 0
|
||||||
|
assert response.usage_metadata["total_tokens"] > 0
|
||||||
|
assert response.response_metadata["model_name"]
|
||||||
|
assert response.response_metadata["status"]
|
||||||
|
Loading…
Reference in New Issue
Block a user