add tool output to response metadata

This commit is contained in:
Chester Curme 2025-03-11 18:56:47 -04:00
parent 909387202a
commit 5da74b5053
2 changed files with 13 additions and 1 deletions

View File

@ -1025,6 +1025,12 @@ class BaseChatOpenAI(BaseChatModel):
generation_info["status"] = output.status
gen = ChatGeneration(message=message, generation_info=generation_info)
generations.append(gen)
else:
tool_output = output.model_dump()
if "tool_outputs" in generation_info:
generation_info["tool_outputs"].append(tool_output)
else:
generation_info["tool_outputs"] = [tool_output]
llm_output = {"model_name": response.model}
return ChatResult(generations=generations, llm_output=llm_output)

View File

@ -2,6 +2,7 @@
import base64
import json
import os
from pathlib import Path
from textwrap import dedent
from typing import Any, AsyncIterator, List, Literal, Optional, cast
@ -1248,6 +1249,10 @@ def _check_response(response: Optional[BaseMessage]) -> None:
assert response.usage_metadata["output_tokens"] > 0
assert response.usage_metadata["total_tokens"] > 0
assert response.response_metadata["model_name"]
for tool_output in response.response_metadata["tool_outputs"]:
assert tool_output["id"]
assert tool_output["status"]
assert tool_output["type"]
def test_web_search() -> None:
@ -1279,6 +1284,7 @@ async def test_web_search_async() -> None:
_check_response(response)
assert response.response_metadata["status"]
# Test streaming
full: Optional[BaseMessageChunk] = None
async for chunk in llm.astream(
"What was a positive news story from today?",
@ -1287,4 +1293,4 @@ async def test_web_search_async() -> None:
assert isinstance(chunk, AIMessageChunk)
full = chunk if full is None else full + chunk
assert isinstance(full, AIMessageChunk)
_check_response(response)
_check_response(full)