mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-20 03:21:33 +00:00
Signed-off-by: ChengZi <chen.zhang@zilliz.com> Co-authored-by: Eugene Yurtsev <eyurtsev@gmail.com> Co-authored-by: Bagatur <22008038+baskaryan@users.noreply.github.com> Co-authored-by: Dan O'Donovan <dan.odonovan@gmail.com> Co-authored-by: Tom Daniel Grande <tomdgrande@gmail.com> Co-authored-by: Grande <Tom.Daniel.Grande@statsbygg.no> Co-authored-by: Bagatur <baskaryan@gmail.com> Co-authored-by: ccurme <chester.curme@gmail.com> Co-authored-by: Harrison Chase <hw.chase.17@gmail.com> Co-authored-by: Tomaz Bratanic <bratanic.tomaz@gmail.com> Co-authored-by: ZhangShenao <15201440436@163.com> Co-authored-by: Friso H. Kingma <fhkingma@gmail.com> Co-authored-by: ChengZi <chen.zhang@zilliz.com> Co-authored-by: Nuno Campos <nuno@langchain.dev> Co-authored-by: Morgante Pell <morgantep@google.com>
47 lines
1.6 KiB
Python
47 lines
1.6 KiB
Python
from langchain_core.load.dump import dumpd, dumps
|
|
from langchain_core.load.load import load, loads
|
|
|
|
from langchain_openai import ChatOpenAI, OpenAI
|
|
|
|
|
|
def test_loads_openai_llm() -> None:
|
|
llm = OpenAI(model="davinci", temperature=0.5, openai_api_key="hello", top_p=0.8) # type: ignore[call-arg]
|
|
llm_string = dumps(llm)
|
|
llm2 = loads(llm_string, secrets_map={"OPENAI_API_KEY": "hello"})
|
|
|
|
assert llm2.dict() == llm.dict()
|
|
llm_string_2 = dumps(llm2)
|
|
assert llm_string_2 == llm_string
|
|
assert isinstance(llm2, OpenAI)
|
|
|
|
|
|
def test_load_openai_llm() -> None:
|
|
llm = OpenAI(model="davinci", temperature=0.5, openai_api_key="hello") # type: ignore[call-arg]
|
|
llm_obj = dumpd(llm)
|
|
llm2 = load(llm_obj, secrets_map={"OPENAI_API_KEY": "hello"})
|
|
|
|
assert llm2.dict() == llm.dict()
|
|
assert dumpd(llm2) == llm_obj
|
|
assert isinstance(llm2, OpenAI)
|
|
|
|
|
|
def test_loads_openai_chat() -> None:
|
|
llm = ChatOpenAI(model="gpt-3.5-turbo", temperature=0.5, openai_api_key="hello") # type: ignore[call-arg]
|
|
llm_string = dumps(llm)
|
|
llm2 = loads(llm_string, secrets_map={"OPENAI_API_KEY": "hello"})
|
|
|
|
assert llm2.dict() == llm.dict()
|
|
llm_string_2 = dumps(llm2)
|
|
assert llm_string_2 == llm_string
|
|
assert isinstance(llm2, ChatOpenAI)
|
|
|
|
|
|
def test_load_openai_chat() -> None:
|
|
llm = ChatOpenAI(model="gpt-3.5-turbo", temperature=0.5, openai_api_key="hello") # type: ignore[call-arg]
|
|
llm_obj = dumpd(llm)
|
|
llm2 = load(llm_obj, secrets_map={"OPENAI_API_KEY": "hello"})
|
|
|
|
assert llm2.dict() == llm.dict()
|
|
assert dumpd(llm2) == llm_obj
|
|
assert isinstance(llm2, ChatOpenAI)
|