langchain/libs/partners/openai/tests/unit_tests/test_load.py
ccurme f7c4965fb6
openai[patch]: update imports in test (#30828)
Quick fix to unblock CI, will need to address in core separately.
2025-04-14 19:33:38 +00:00

46 lines
1.5 KiB
Python

from langchain_core.load import dumpd, dumps, load, loads
from langchain_openai import ChatOpenAI, OpenAI
def test_loads_openai_llm() -> None:
llm = OpenAI(model="davinci", temperature=0.5, openai_api_key="hello", top_p=0.8) # type: ignore[call-arg]
llm_string = dumps(llm)
llm2 = loads(llm_string, secrets_map={"OPENAI_API_KEY": "hello"})
assert llm2.dict() == llm.dict()
llm_string_2 = dumps(llm2)
assert llm_string_2 == llm_string
assert isinstance(llm2, OpenAI)
def test_load_openai_llm() -> None:
llm = OpenAI(model="davinci", temperature=0.5, openai_api_key="hello") # type: ignore[call-arg]
llm_obj = dumpd(llm)
llm2 = load(llm_obj, secrets_map={"OPENAI_API_KEY": "hello"})
assert llm2.dict() == llm.dict()
assert dumpd(llm2) == llm_obj
assert isinstance(llm2, OpenAI)
def test_loads_openai_chat() -> None:
llm = ChatOpenAI(model="gpt-3.5-turbo", temperature=0.5, openai_api_key="hello") # type: ignore[call-arg]
llm_string = dumps(llm)
llm2 = loads(llm_string, secrets_map={"OPENAI_API_KEY": "hello"})
assert llm2.dict() == llm.dict()
llm_string_2 = dumps(llm2)
assert llm_string_2 == llm_string
assert isinstance(llm2, ChatOpenAI)
def test_load_openai_chat() -> None:
llm = ChatOpenAI(model="gpt-3.5-turbo", temperature=0.5, openai_api_key="hello") # type: ignore[call-arg]
llm_obj = dumpd(llm)
llm2 = load(llm_obj, secrets_map={"OPENAI_API_KEY": "hello"})
assert llm2.dict() == llm.dict()
assert dumpd(llm2) == llm_obj
assert isinstance(llm2, ChatOpenAI)