mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-16 06:53:16 +00:00
Harrison/vertex (#5049)
Co-authored-by: Leonid Kuligin <kuligin@google.com> Co-authored-by: Leonid Kuligin <lkuligin@yandex.ru> Co-authored-by: sasha-gitg <44654632+sasha-gitg@users.noreply.github.com> Co-authored-by: Justin Flick <Justinjayflick@gmail.com> Co-authored-by: Justin Flick <jflick@homesite.com>
This commit is contained in:
88
tests/integration_tests/chat_models/test_vertexai.py
Normal file
88
tests/integration_tests/chat_models/test_vertexai.py
Normal file
@@ -0,0 +1,88 @@
|
||||
"""Test Vertex AI API wrapper.
|
||||
In order to run this test, you need to install VertexAI SDK (that is is the private
|
||||
preview) and be whitelisted to list the models themselves:
|
||||
In order to run this test, you need to install VertexAI SDK
|
||||
pip install google-cloud-aiplatform>=1.25.0
|
||||
|
||||
Your end-user credentials would be used to make the calls (make sure you've run
|
||||
`gcloud auth login` first).
|
||||
"""
|
||||
import pytest
|
||||
|
||||
from langchain.chat_models import ChatVertexAI
|
||||
from langchain.chat_models.vertexai import _MessagePair, _parse_chat_history
|
||||
from langchain.schema import (
|
||||
AIMessage,
|
||||
HumanMessage,
|
||||
SystemMessage,
|
||||
)
|
||||
|
||||
|
||||
def test_vertexai_single_call() -> None:
|
||||
model = ChatVertexAI()
|
||||
message = HumanMessage(content="Hello")
|
||||
response = model([message])
|
||||
assert isinstance(response, AIMessage)
|
||||
assert isinstance(response.content, str)
|
||||
assert model._llm_type == "vertexai"
|
||||
assert model.model_name == model.client._model_id
|
||||
|
||||
|
||||
def test_vertexai_single_call_with_context() -> None:
|
||||
model = ChatVertexAI()
|
||||
raw_context = (
|
||||
"My name is Ned. You are my personal assistant. My favorite movies "
|
||||
"are Lord of the Rings and Hobbit."
|
||||
)
|
||||
question = (
|
||||
"Hello, could you recommend a good movie for me to watch this evening, please?"
|
||||
)
|
||||
context = SystemMessage(content=raw_context)
|
||||
message = HumanMessage(content=question)
|
||||
response = model([context, message])
|
||||
assert isinstance(response, AIMessage)
|
||||
assert isinstance(response.content, str)
|
||||
|
||||
|
||||
def test_parse_chat_history_correct() -> None:
|
||||
text_context = (
|
||||
"My name is Ned. You are my personal assistant. My "
|
||||
"favorite movies are Lord of the Rings and Hobbit."
|
||||
)
|
||||
context = SystemMessage(content=text_context)
|
||||
text_question = (
|
||||
"Hello, could you recommend a good movie for me to watch this evening, please?"
|
||||
)
|
||||
question = HumanMessage(content=text_question)
|
||||
text_answer = (
|
||||
"Sure, You might enjoy The Lord of the Rings: The Fellowship of the Ring "
|
||||
"(2001): This is the first movie in the Lord of the Rings trilogy."
|
||||
)
|
||||
answer = AIMessage(content=text_answer)
|
||||
history = _parse_chat_history([context, question, answer, question, answer])
|
||||
assert history.system_message == context
|
||||
assert len(history.history) == 2
|
||||
assert history.history[0] == _MessagePair(question=question, answer=answer)
|
||||
|
||||
|
||||
def test_parse_chat_history_wrong_sequence() -> None:
|
||||
text_question = (
|
||||
"Hello, could you recommend a good movie for me to watch this evening, please?"
|
||||
)
|
||||
question = HumanMessage(content=text_question)
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
_ = _parse_chat_history([question, question])
|
||||
assert (
|
||||
str(exc_info.value)
|
||||
== "A human message should follow a bot one, got human, human."
|
||||
)
|
||||
|
||||
|
||||
def test_vertexai_single_call_failes_no_message() -> None:
|
||||
chat = ChatVertexAI()
|
||||
with pytest.raises(ValueError) as exc_info:
|
||||
_ = chat([])
|
||||
assert (
|
||||
str(exc_info.value)
|
||||
== "You should provide at least one message to start the chat!"
|
||||
)
|
25
tests/integration_tests/embeddings/test_vertexai.py
Normal file
25
tests/integration_tests/embeddings/test_vertexai.py
Normal file
@@ -0,0 +1,25 @@
|
||||
"""Test Vertex AI API wrapper.
|
||||
In order to run this test, you need to install VertexAI SDK
|
||||
pip install google-cloud-aiplatform>=1.25.0
|
||||
|
||||
Your end-user credentials would be used to make the calls (make sure you've run
|
||||
`gcloud auth login` first).
|
||||
"""
|
||||
from langchain.embeddings import VertexAIEmbeddings
|
||||
|
||||
|
||||
def test_embedding_documents() -> None:
|
||||
documents = ["foo bar"]
|
||||
model = VertexAIEmbeddings()
|
||||
output = model.embed_documents(documents)
|
||||
assert len(output) == 1
|
||||
assert len(output[0]) == 768
|
||||
assert model._llm_type == "vertexai"
|
||||
assert model.model_name == model.client._model_id
|
||||
|
||||
|
||||
def test_embedding_query() -> None:
|
||||
document = "foo bar"
|
||||
model = VertexAIEmbeddings()
|
||||
output = model.embed_query(document)
|
||||
assert len(output) == 768
|
18
tests/integration_tests/llms/test_vertexai.py
Normal file
18
tests/integration_tests/llms/test_vertexai.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Test Vertex AI API wrapper.
|
||||
In order to run this test, you need to install VertexAI SDK (that is is the private
|
||||
preview) and be whitelisted to list the models themselves:
|
||||
In order to run this test, you need to install VertexAI SDK
|
||||
pip install google-cloud-aiplatform>=1.25.0
|
||||
|
||||
Your end-user credentials would be used to make the calls (make sure you've run
|
||||
`gcloud auth login` first).
|
||||
"""
|
||||
from langchain.llms import VertexAI
|
||||
|
||||
|
||||
def test_vertex_call() -> None:
|
||||
llm = VertexAI()
|
||||
output = llm("Say foo:")
|
||||
assert isinstance(output, str)
|
||||
assert llm._llm_type == "vertexai"
|
||||
assert llm.model_name == llm.client._model_id
|
Reference in New Issue
Block a user