mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-11 16:01:33 +00:00
openai[patch]: release 0.2.6 (#27924)
Some additions in support of [predicted outputs](https://platform.openai.com/docs/guides/latency-optimization#use-predicted-outputs) feature: - Bump openai sdk version - Add integration test - Add example to integration docs The `prediction` kwarg is already plumbed through model invocation.
This commit is contained in:
16
libs/partners/openai/poetry.lock
generated
16
libs/partners/openai/poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "annotated-types"
|
||||
@@ -518,7 +518,7 @@ url = "../../core"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-standard-tests"
|
||||
version = "0.1.1"
|
||||
version = "0.3.0"
|
||||
description = "Standard tests for LangChain implementations"
|
||||
optional = false
|
||||
python-versions = ">=3.9,<4.0"
|
||||
@@ -527,7 +527,7 @@ develop = true
|
||||
|
||||
[package.dependencies]
|
||||
httpx = "^0.27.0"
|
||||
langchain-core = "^0.3.0"
|
||||
langchain-core = "^0.3.15"
|
||||
pytest = ">=7,<9"
|
||||
syrupy = "^4"
|
||||
|
||||
@@ -667,13 +667,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "1.52.2"
|
||||
version = "1.54.1"
|
||||
description = "The official Python library for the openai API"
|
||||
optional = false
|
||||
python-versions = ">=3.7.1"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "openai-1.52.2-py3-none-any.whl", hash = "sha256:57e9e37bc407f39bb6ec3a27d7e8fb9728b2779936daa1fcf95df17d3edfaccc"},
|
||||
{file = "openai-1.52.2.tar.gz", hash = "sha256:87b7d0f69d85f5641678d414b7ee3082363647a5c66a462ed7f3ccb59582da0d"},
|
||||
{file = "openai-1.54.1-py3-none-any.whl", hash = "sha256:3cb49ccb6bfdc724ad01cc397d323ef8314fc7d45e19e9de2afdd6484a533324"},
|
||||
{file = "openai-1.54.1.tar.gz", hash = "sha256:5b832bf82002ba8c4f6e5e25c1c0f5d468c22f043711544c716eaffdb30dd6f1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1561,4 +1561,4 @@ watchmedo = ["PyYAML (>=3.10)"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.9,<4.0"
|
||||
content-hash = "985f183aed7188a51964c30831939cae8060d906e8b07d4257e423016b6f3dd4"
|
||||
content-hash = "77af861c052decd1c194936575c15491a606fd763556b427e2c659d5ea7aae72"
|
||||
|
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry]
|
||||
name = "langchain-openai"
|
||||
version = "0.2.5"
|
||||
version = "0.2.6"
|
||||
description = "An integration package connecting OpenAI and LangChain"
|
||||
authors = []
|
||||
readme = "README.md"
|
||||
@@ -24,7 +24,7 @@ ignore_missing_imports = true
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<4.0"
|
||||
langchain-core = "^0.3.15"
|
||||
openai = "^1.52.0"
|
||||
openai = "^1.54.0"
|
||||
tiktoken = ">=0.7,<1"
|
||||
|
||||
[tool.ruff.lint]
|
||||
|
@@ -3,6 +3,7 @@
|
||||
import base64
|
||||
import json
|
||||
from pathlib import Path
|
||||
from textwrap import dedent
|
||||
from typing import Any, AsyncIterator, List, Literal, Optional, cast
|
||||
|
||||
import httpx
|
||||
@@ -1018,3 +1019,45 @@ def test_audio_input_modality() -> None:
|
||||
|
||||
assert isinstance(output, AIMessage)
|
||||
assert "audio" in output.additional_kwargs
|
||||
|
||||
|
||||
def test_prediction_tokens() -> None:
|
||||
code = dedent("""
|
||||
/// <summary>
|
||||
/// Represents a user with a first name, last name, and username.
|
||||
/// </summary>
|
||||
public class User
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets the user's first name.
|
||||
/// </summary>
|
||||
public string FirstName { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the user's last name.
|
||||
/// </summary>
|
||||
public string LastName { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the user's username.
|
||||
/// </summary>
|
||||
public string Username { get; set; }
|
||||
}
|
||||
""")
|
||||
|
||||
llm = ChatOpenAI(model="gpt-4o")
|
||||
query = (
|
||||
"Replace the Username property with an Email property. "
|
||||
"Respond only with code, and with no markdown formatting."
|
||||
)
|
||||
response = llm.invoke(
|
||||
[{"role": "user", "content": query}, {"role": "user", "content": code}],
|
||||
prediction={"type": "content", "content": code},
|
||||
)
|
||||
assert isinstance(response, AIMessage)
|
||||
assert response.response_metadata is not None
|
||||
output_token_details = response.response_metadata["token_usage"][
|
||||
"completion_tokens_details"
|
||||
]
|
||||
assert output_token_details["accepted_prediction_tokens"] > 0
|
||||
assert output_token_details["rejected_prediction_tokens"] > 0
|
||||
|
Reference in New Issue
Block a user