From 8a3bb73c05b30d7d79ea5fbd399164ae035e5729 Mon Sep 17 00:00:00 2001 From: Mason Daugherty Date: Fri, 14 Nov 2025 19:18:50 -0500 Subject: [PATCH] release(openai): 1.0.3 (#33981) - Respect 300k token limit for embeddings API requests #33668 - fix create_agent / response_format for Responses API #33939 - fix response.incomplete event is not handled when using stream_mode=['messages'] #33871 --- libs/partners/openai/pyproject.toml | 2 +- libs/partners/openai/uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/partners/openai/pyproject.toml b/libs/partners/openai/pyproject.toml index 362f6db73b4..48133ab35a3 100644 --- a/libs/partners/openai/pyproject.toml +++ b/libs/partners/openai/pyproject.toml @@ -9,7 +9,7 @@ license = { text = "MIT" } readme = "README.md" authors = [] -version = "1.0.2" +version = "1.0.3" requires-python = ">=3.10.0,<4.0.0" dependencies = [ "langchain-core>=1.0.2,<2.0.0", diff --git a/libs/partners/openai/uv.lock b/libs/partners/openai/uv.lock index bc52eeefbad..55aa04bb6bc 100644 --- a/libs/partners/openai/uv.lock +++ b/libs/partners/openai/uv.lock @@ -670,7 +670,7 @@ typing = [ [[package]] name = "langchain-openai" -version = "1.0.2" +version = "1.0.3" source = { editable = "." } dependencies = [ { name = "langchain-core" },