From 909387202a7c6df30b5ee54c699662fea7151ea7 Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Tue, 11 Mar 2025 18:12:26 -0400 Subject: [PATCH] support annotations in streaming case --- libs/partners/openai/langchain_openai/chat_models/base.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/libs/partners/openai/langchain_openai/chat_models/base.py b/libs/partners/openai/langchain_openai/chat_models/base.py index c624858950c..2e98aad3515 100644 --- a/libs/partners/openai/langchain_openai/chat_models/base.py +++ b/libs/partners/openai/langchain_openai/chat_models/base.py @@ -364,6 +364,13 @@ def _convert_responses_chunk_to_generation_chunk( usage_metadata = None if chunk.type == "response.output_text.delta": content += [{"type": "text", "text": chunk.delta, "index": chunk.content_index}] + elif chunk.type == "response.output_text.annotation.added": + content += [ + { + "annotations": [chunk.annotation.model_dump()], + "index": chunk.content_index, + } + ] elif chunk.type == "response.completed": token_usage = chunk.response.usage.model_dump() if chunk.response.usage else {} usage_metadata = _create_usage_metadata_responses(token_usage)