mirror of
https://github.com/hwchase17/langchain.git
synced 2025-08-20 09:57:32 +00:00
add tool output to response metadata
This commit is contained in:
parent
909387202a
commit
5da74b5053
@ -1025,6 +1025,12 @@ class BaseChatOpenAI(BaseChatModel):
|
|||||||
generation_info["status"] = output.status
|
generation_info["status"] = output.status
|
||||||
gen = ChatGeneration(message=message, generation_info=generation_info)
|
gen = ChatGeneration(message=message, generation_info=generation_info)
|
||||||
generations.append(gen)
|
generations.append(gen)
|
||||||
|
else:
|
||||||
|
tool_output = output.model_dump()
|
||||||
|
if "tool_outputs" in generation_info:
|
||||||
|
generation_info["tool_outputs"].append(tool_output)
|
||||||
|
else:
|
||||||
|
generation_info["tool_outputs"] = [tool_output]
|
||||||
llm_output = {"model_name": response.model}
|
llm_output = {"model_name": response.model}
|
||||||
|
|
||||||
return ChatResult(generations=generations, llm_output=llm_output)
|
return ChatResult(generations=generations, llm_output=llm_output)
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
import base64
|
import base64
|
||||||
import json
|
import json
|
||||||
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
from typing import Any, AsyncIterator, List, Literal, Optional, cast
|
from typing import Any, AsyncIterator, List, Literal, Optional, cast
|
||||||
@ -1248,6 +1249,10 @@ def _check_response(response: Optional[BaseMessage]) -> None:
|
|||||||
assert response.usage_metadata["output_tokens"] > 0
|
assert response.usage_metadata["output_tokens"] > 0
|
||||||
assert response.usage_metadata["total_tokens"] > 0
|
assert response.usage_metadata["total_tokens"] > 0
|
||||||
assert response.response_metadata["model_name"]
|
assert response.response_metadata["model_name"]
|
||||||
|
for tool_output in response.response_metadata["tool_outputs"]:
|
||||||
|
assert tool_output["id"]
|
||||||
|
assert tool_output["status"]
|
||||||
|
assert tool_output["type"]
|
||||||
|
|
||||||
|
|
||||||
def test_web_search() -> None:
|
def test_web_search() -> None:
|
||||||
@ -1279,6 +1284,7 @@ async def test_web_search_async() -> None:
|
|||||||
_check_response(response)
|
_check_response(response)
|
||||||
assert response.response_metadata["status"]
|
assert response.response_metadata["status"]
|
||||||
|
|
||||||
|
# Test streaming
|
||||||
full: Optional[BaseMessageChunk] = None
|
full: Optional[BaseMessageChunk] = None
|
||||||
async for chunk in llm.astream(
|
async for chunk in llm.astream(
|
||||||
"What was a positive news story from today?",
|
"What was a positive news story from today?",
|
||||||
@ -1287,4 +1293,4 @@ async def test_web_search_async() -> None:
|
|||||||
assert isinstance(chunk, AIMessageChunk)
|
assert isinstance(chunk, AIMessageChunk)
|
||||||
full = chunk if full is None else full + chunk
|
full = chunk if full is None else full + chunk
|
||||||
assert isinstance(full, AIMessageChunk)
|
assert isinstance(full, AIMessageChunk)
|
||||||
_check_response(response)
|
_check_response(full)
|
||||||
|
Loading…
Reference in New Issue
Block a user