From 307dffdc9200bcea2c9164f92ed1f07961d5a85b Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Tue, 11 Mar 2025 19:14:14 -0400 Subject: [PATCH] add tool output in streaming case --- .../openai/langchain_openai/chat_models/base.py | 12 ++++++++++++ .../tests/integration_tests/chat_models/test_base.py | 1 - 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/libs/partners/openai/langchain_openai/chat_models/base.py b/libs/partners/openai/langchain_openai/chat_models/base.py index a92c2c7f5cb..139ab643e7a 100644 --- a/libs/partners/openai/langchain_openai/chat_models/base.py +++ b/libs/partners/openai/langchain_openai/chat_models/base.py @@ -375,6 +375,18 @@ def _convert_responses_chunk_to_generation_chunk( token_usage = chunk.response.usage.model_dump() if chunk.response.usage else {} usage_metadata = _create_usage_metadata_responses(token_usage) generation_info = {"model_name": chunk.response.model} + elif chunk.type in ( + "response.web_search_call.completed", + "response.file_search_call.completed", + ): + tool_output = chunk.model_dump() + if "item_id" in tool_output: + tool_output["id"] = tool_output.pop("item_id") + tool_output["type"] = ( + tool_output["type"].replace("response.", "").replace(".completed", "") + ) + tool_output["status"] = "completed" + generation_info = {"tool_outputs": [tool_output]} else: return None diff --git a/libs/partners/openai/tests/integration_tests/chat_models/test_base.py b/libs/partners/openai/tests/integration_tests/chat_models/test_base.py index 0780e98ee2f..28d14b3b89f 100644 --- a/libs/partners/openai/tests/integration_tests/chat_models/test_base.py +++ b/libs/partners/openai/tests/integration_tests/chat_models/test_base.py @@ -2,7 +2,6 @@ import base64 import json -import os from pathlib import Path from textwrap import dedent from typing import Any, AsyncIterator, List, Literal, Optional, cast