openai[patch]: fix int test (#29395)

This commit is contained in:
Bagatur 2025-01-23 13:23:01 -08:00 committed by GitHub
parent 8d566a8fe7
commit 317fb86fd9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 38 additions and 33 deletions

View File

@ -21,7 +21,7 @@ test tests:
TIKTOKEN_CACHE_DIR=tiktoken_cache poetry run pytest --disable-socket --allow-unix-socket $(TEST_FILE)
integration_test integration_tests:
poetry run pytest $(TEST_FILE)
poetry run pytest -n auto $(TEST_FILE)
test_watch:
poetry run ptw --snapshot-update --now . -- -vv $(TEST_FILE)

View File

@ -354,6 +354,20 @@ files = [
[package.extras]
test = ["pytest (>=6)"]
[[package]]
name = "execnet"
version = "2.1.1"
description = "execnet: rapid multi-Python deployment"
optional = false
python-versions = ">=3.8"
files = [
{file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"},
{file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"},
]
[package.extras]
testing = ["hatch", "pre-commit", "pytest", "tox"]
[[package]]
name = "freezegun"
version = "1.5.1"
@ -1234,6 +1248,26 @@ files = [
tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version < \"3.11\""}
watchdog = ">=2.0.0"
[[package]]
name = "pytest-xdist"
version = "3.6.1"
description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs"
optional = false
python-versions = ">=3.8"
files = [
{file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"},
{file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"},
]
[package.dependencies]
execnet = ">=2.1"
pytest = ">=7.0.0"
[package.extras]
psutil = ["psutil (>=3.0)"]
setproctitle = ["setproctitle"]
testing = ["filelock"]
[[package]]
name = "python-dateutil"
version = "2.9.0.post0"
@ -1848,4 +1882,4 @@ cffi = ["cffi (>=1.11)"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<4.0"
content-hash = "cdb63207c1dc6597eb2ade4ef0f12e56ac128e7277c9952fa35bc6880b537c9e"
content-hash = "164cc02f120947dbd289f2ad8aa10f714cf2a983121b4b9c088c07fb1e99e8a4"

View File

@ -67,6 +67,7 @@ pytest-watcher = "^0.3.4"
pytest-asyncio = "^0.21.1"
pytest-cov = "^4.1.0"
pytest-socket = "^0.6.0"
pytest-xdist = "^3.6.1"
[[tool.poetry.group.test.dependencies.numpy]]
version = "^1"
python = "<3.12"

View File

@ -27,7 +27,6 @@ from langchain_tests.integration_tests.chat_models import (
magic_function as invalid_magic_function,
)
from pydantic import BaseModel, Field
from typing_extensions import TypedDict
from langchain_openai import ChatOpenAI
from tests.unit_tests.fake.callbacks import FakeCallbackHandler
@ -643,7 +642,7 @@ def test_disable_parallel_tool_calling() -> None:
assert len(result.tool_calls) == 1
@pytest.mark.parametrize("model", ["gpt-4o-mini", "o1"])
@pytest.mark.parametrize("model", ["gpt-4o-mini", "o1", "gpt-4"])
def test_openai_structured_output(model: str) -> None:
class MyModel(BaseModel):
"""A Person"""
@ -658,24 +657,6 @@ def test_openai_structured_output(model: str) -> None:
assert result.age == 27
def test_structured_output_errors_with_legacy_models() -> None:
class MyModel(BaseModel):
"""A Person"""
name: str
age: int
llm = ChatOpenAI(model="gpt-4").with_structured_output(MyModel)
with pytest.warns(UserWarning, match="with_structured_output"):
with pytest.raises(openai.BadRequestError):
_ = llm.invoke("I'm a 27 year old named Erick")
with pytest.warns(UserWarning, match="with_structured_output"):
with pytest.raises(openai.BadRequestError):
_ = list(llm.stream("I'm a 27 year old named Erick"))
def test_openai_proxy() -> None:
"""Test ChatOpenAI with proxy."""
chat_openai = ChatOpenAI(openai_proxy="http://localhost:8080")
@ -1221,14 +1202,3 @@ def test_o1_doesnt_stream() -> None:
def test_o1_stream_default_works() -> None:
result = list(ChatOpenAI(model="o1").stream("say 'hi'"))
assert len(result) > 0
def test_structured_output_old_model() -> None:
class Output(TypedDict):
"""output."""
foo: str
llm = ChatOpenAI(model="gpt-4").with_structured_output(Output)
output = llm.invoke("bar")
assert "foo" in output