From a99e6449134235e12eddb754cc3b8f35a72d970f Mon Sep 17 00:00:00 2001 From: Erick Friis Date: Fri, 22 Mar 2024 14:43:24 -0700 Subject: [PATCH] openai[patch]: integration test structured output (#19459) --- .../integration_tests/chat_models/test_azure.py | 17 +++++++++++++++++ .../integration_tests/chat_models/test_base.py | 15 +++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/libs/partners/openai/tests/integration_tests/chat_models/test_azure.py b/libs/partners/openai/tests/integration_tests/chat_models/test_azure.py index 24a5e26ee79..001b5296e31 100644 --- a/libs/partners/openai/tests/integration_tests/chat_models/test_azure.py +++ b/libs/partners/openai/tests/integration_tests/chat_models/test_azure.py @@ -1,4 +1,5 @@ """Test AzureChatOpenAI wrapper.""" + import os from typing import Any, Optional @@ -6,6 +7,7 @@ import pytest from langchain_core.callbacks import CallbackManager from langchain_core.messages import BaseMessage, BaseMessageChunk, HumanMessage from langchain_core.outputs import ChatGeneration, ChatResult, LLMResult +from langchain_core.pydantic_v1 import BaseModel from langchain_openai import AzureChatOpenAI from tests.unit_tests.fake.callbacks import FakeCallbackHandler @@ -223,3 +225,18 @@ def test_openai_invoke(llm: AzureChatOpenAI) -> None: result = llm.invoke("I'm Pickle Rick", config=dict(tags=["foo"])) assert isinstance(result.content, str) + + +@pytest.mark.skip(reason="Need tool calling model deployed on azure") +def test_openai_structured_output(llm: AzureChatOpenAI) -> None: + class MyModel(BaseModel): + """A Person""" + + name: str + age: int + + llm_structure = llm.with_structured_output(MyModel) + result = llm_structure.invoke("I'm a 27 year old named Erick") + assert isinstance(result, MyModel) + assert result.name == "Erick" + assert result.age == 27 diff --git a/libs/partners/openai/tests/integration_tests/chat_models/test_base.py b/libs/partners/openai/tests/integration_tests/chat_models/test_base.py index e40ab2e654a..d5599c54ab7 100644 --- a/libs/partners/openai/tests/integration_tests/chat_models/test_base.py +++ b/libs/partners/openai/tests/integration_tests/chat_models/test_base.py @@ -1,4 +1,5 @@ """Test ChatOpenAI chat model.""" + from typing import Any, Optional, cast import pytest @@ -467,3 +468,17 @@ async def test_async_response_metadata_streaming() -> None: ) ) assert "content" in cast(BaseMessageChunk, full).response_metadata["logprobs"] + + +def test_openai_structured_output() -> None: + class MyModel(BaseModel): + """A Person""" + + name: str + age: int + + llm = ChatOpenAI().with_structured_output(MyModel) + result = llm.invoke("I'm a 27 year old named Erick") + assert isinstance(result, MyModel) + assert result.name == "Erick" + assert result.age == 27