cli: standard tests in cli, test that they run, skip vectorstore tests (#28521)

This commit is contained in:
Erick Friis
2024-12-05 00:38:32 -08:00
committed by GitHub
parent c5acedddc2
commit 43c35d19d4
36 changed files with 1573 additions and 631 deletions

View File

@@ -1,64 +1,21 @@
"""Test Chat__ModuleName__ chat model."""
from typing import Type
from __module_name__.chat_models import Chat__ModuleName__
from langchain_tests.integration_tests import ChatModelIntegrationTests
def test_stream() -> None:
"""Test streaming tokens from OpenAI."""
llm = Chat__ModuleName__()
class TestChatParrotLinkIntegration(ChatModelIntegrationTests):
@property
def chat_model_class(self) -> Type[Chat__ModuleName__]:
return Chat__ModuleName__
for token in llm.stream("I'm Pickle Rick"):
assert isinstance(token.content, str)
async def test_astream() -> None:
"""Test streaming tokens from OpenAI."""
llm = Chat__ModuleName__()
async for token in llm.astream("I'm Pickle Rick"):
assert isinstance(token.content, str)
async def test_abatch() -> None:
"""Test streaming tokens from Chat__ModuleName__."""
llm = Chat__ModuleName__()
result = await llm.abatch(["I'm Pickle Rick", "I'm not Pickle Rick"])
for token in result:
assert isinstance(token.content, str)
async def test_abatch_tags() -> None:
"""Test batch tokens from Chat__ModuleName__."""
llm = Chat__ModuleName__()
result = await llm.abatch(
["I'm Pickle Rick", "I'm not Pickle Rick"], config={"tags": ["foo"]}
)
for token in result:
assert isinstance(token.content, str)
def test_batch() -> None:
"""Test batch tokens from Chat__ModuleName__."""
llm = Chat__ModuleName__()
result = llm.batch(["I'm Pickle Rick", "I'm not Pickle Rick"])
for token in result:
assert isinstance(token.content, str)
async def test_ainvoke() -> None:
"""Test invoke tokens from Chat__ModuleName__."""
llm = Chat__ModuleName__()
result = await llm.ainvoke("I'm Pickle Rick", config={"tags": ["foo"]})
assert isinstance(result.content, str)
def test_invoke() -> None:
"""Test invoke tokens from Chat__ModuleName__."""
llm = Chat__ModuleName__()
result = llm.invoke("I'm Pickle Rick", config=dict(tags=["foo"]))
assert isinstance(result.content, str)
@property
def chat_model_params(self) -> dict:
# These should be parameters used to initialize your integration for testing
return {
"model": "bird-brain-001",
"temperature": 0,
"parrot_buffer_length": 50,
}

View File

@@ -1,20 +1,16 @@
"""Test __ModuleName__ embeddings."""
from typing import Type
from __module_name__.embeddings import __ModuleName__Embeddings
from langchain_tests.integration_tests import EmbeddingsIntegrationTests
def test___module_name___embedding_documents() -> None:
"""Test cohere embeddings."""
documents = ["foo bar"]
embedding = __ModuleName__Embeddings()
output = embedding.embed_documents(documents)
assert len(output) == 1
assert len(output[0]) > 0
class TestParrotLinkEmbeddingsIntegration(EmbeddingsIntegrationTests):
@property
def embeddings_class(self) -> Type[__ModuleName__Embeddings]:
return __ModuleName__Embeddings
def test___module_name___embedding_query() -> None:
"""Test cohere embeddings."""
document = "foo bar"
embedding = __ModuleName__Embeddings()
output = embedding.embed_query(document)
assert len(output) > 0
@property
def embedding_model_params(self) -> dict:
return {"model": "nest-embed-001"}

View File

@@ -1,64 +0,0 @@
"""Test __ModuleName__LLM llm."""
from __module_name__.llms import __ModuleName__LLM
def test_stream() -> None:
"""Test streaming tokens from OpenAI."""
llm = __ModuleName__LLM()
for token in llm.stream("I'm Pickle Rick"):
assert isinstance(token, str)
async def test_astream() -> None:
"""Test streaming tokens from OpenAI."""
llm = __ModuleName__LLM()
async for token in llm.astream("I'm Pickle Rick"):
assert isinstance(token, str)
async def test_abatch() -> None:
"""Test streaming tokens from __ModuleName__LLM."""
llm = __ModuleName__LLM()
result = await llm.abatch(["I'm Pickle Rick", "I'm not Pickle Rick"])
for token in result:
assert isinstance(token, str)
async def test_abatch_tags() -> None:
"""Test batch tokens from __ModuleName__LLM."""
llm = __ModuleName__LLM()
result = await llm.abatch(
["I'm Pickle Rick", "I'm not Pickle Rick"], config={"tags": ["foo"]}
)
for token in result:
assert isinstance(token, str)
def test_batch() -> None:
"""Test batch tokens from __ModuleName__LLM."""
llm = __ModuleName__LLM()
result = llm.batch(["I'm Pickle Rick", "I'm not Pickle Rick"])
for token in result:
assert isinstance(token, str)
async def test_ainvoke() -> None:
"""Test invoke tokens from __ModuleName__LLM."""
llm = __ModuleName__LLM()
result = await llm.ainvoke("I'm Pickle Rick", config={"tags": ["foo"]})
assert isinstance(result, str)
def test_invoke() -> None:
"""Test invoke tokens from __ModuleName__LLM."""
llm = __ModuleName__LLM()
result = llm.invoke("I'm Pickle Rick", config=dict(tags=["foo"]))
assert isinstance(result, str)

View File

@@ -0,0 +1,24 @@
from typing import Type
from __module_name__.retrievers import __ModuleName__Retriever
from langchain_tests.integration_tests import (
RetrieversIntegrationTests,
)
class Test__ModuleName__Retriever(RetrieversIntegrationTests):
@property
def retriever_constructor(self) -> Type[__ModuleName__Retriever]:
"""Get an empty vectorstore for unit tests."""
return __ModuleName__Retriever
@property
def retriever_constructor_params(self) -> dict:
return {"k": 2}
@property
def retriever_query_example(self) -> str:
"""
Returns a dictionary representing the "args" of an example retriever call.
"""
return "example query"

View File

@@ -0,0 +1,27 @@
from typing import Type
from __module_name__.tools import __ModuleName__Tool
from langchain_tests.integration_tests import ToolsIntegrationTests
class TestParrotMultiplyToolIntegration(ToolsIntegrationTests):
@property
def tool_constructor(self) -> Type[__ModuleName__Tool]:
return __ModuleName__Tool
@property
def tool_constructor_params(self) -> dict:
# if your tool constructor instead required initialization arguments like
# `def __init__(self, some_arg: int):`, you would return those here
# as a dictionary, e.g.: `return {'some_arg': 42}`
return {}
@property
def tool_invoke_params_example(self) -> dict:
"""
Returns a dictionary representing the "args" of an example tool call.
This should NOT be a ToolCall dict - i.e. it should not
have {"name", "id", "args"} keys.
"""
return {"a": 2, "b": 3}

View File

@@ -0,0 +1,37 @@
from typing import AsyncGenerator, Generator
import pytest
from __module_name__.vectorstores import __ModuleName__VectorStore
from langchain_core.vectorstores import VectorStore
from langchain_tests.integration_tests import (
AsyncReadWriteTestSuite,
ReadWriteTestSuite,
)
class Test__ModuleName__VectorStoreSync(ReadWriteTestSuite):
@pytest.fixture()
def vectorstore(self) -> Generator[VectorStore, None, None]: # type: ignore
"""Get an empty vectorstore for unit tests."""
store = __ModuleName__VectorStore()
# note: store should be EMPTY at this point
# if you need to delete data, you may do so here
try:
yield store
finally:
# cleanup operations, or deleting data
pass
class Test__ModuleName__VectorStoreAsync(AsyncReadWriteTestSuite):
@pytest.fixture()
async def vectorstore(self) -> AsyncGenerator[VectorStore, None]: # type: ignore
"""Get an empty vectorstore for unit tests."""
store = __ModuleName__VectorStore()
# note: store should be EMPTY at this point
# if you need to delete data, you may do so here
try:
yield store
finally:
# cleanup operations, or deleting data
pass

View File

@@ -1,8 +1,21 @@
"""Test chat model integration."""
from typing import Type
from __module_name__.chat_models import Chat__ModuleName__
from langchain_tests.unit_tests import ChatModelUnitTests
def test_initialization() -> None:
"""Test chat model initialization."""
Chat__ModuleName__()
class TestChat__ModuleName__Unit(ChatModelUnitTests):
@property
def chat_model_class(self) -> Type[Chat__ModuleName__]:
return Chat__ModuleName__
@property
def chat_model_params(self) -> dict:
# These should be parameters used to initialize your integration for testing
return {
"model": "bird-brain-001",
"temperature": 0,
"parrot_buffer_length": 50,
}

View File

@@ -1,8 +1,16 @@
"""Test embedding model integration."""
from typing import Type
from __module_name__.embeddings import __ModuleName__Embeddings
from langchain_tests.unit_tests import EmbeddingsUnitTests
def test_initialization() -> None:
"""Test embedding model initialization."""
__ModuleName__Embeddings()
class TestParrotLinkEmbeddingsUnit(EmbeddingsUnitTests):
@property
def embeddings_class(self) -> Type[__ModuleName__Embeddings]:
return __ModuleName__Embeddings
@property
def embedding_model_params(self) -> dict:
return {"model": "nest-embed-001"}

View File

@@ -1,12 +0,0 @@
from __module_name__ import __all__
EXPECTED_ALL = [
"__ModuleName__LLM",
"Chat__ModuleName__",
"__ModuleName__VectorStore",
"__ModuleName__Embeddings",
]
def test_all_imports() -> None:
assert sorted(EXPECTED_ALL) == sorted(__all__)

View File

@@ -1,8 +0,0 @@
"""Test __ModuleName__ Chat API wrapper."""
from __module_name__ import __ModuleName__LLM
def test_initialization() -> None:
"""Test integration initialization."""
__ModuleName__LLM()

View File

@@ -0,0 +1,27 @@
from typing import Type
from __module_name__.tools import __ModuleName__Tool
from langchain_tests.unit_tests import ToolsUnitTests
class TestParrotMultiplyToolUnit(ToolsUnitTests):
@property
def tool_constructor(self) -> Type[__ModuleName__Tool]:
return __ModuleName__Tool
@property
def tool_constructor_params(self) -> dict:
# if your tool constructor instead required initialization arguments like
# `def __init__(self, some_arg: int):`, you would return those here
# as a dictionary, e.g.: `return {'some_arg': 42}`
return {}
@property
def tool_invoke_params_example(self) -> dict:
"""
Returns a dictionary representing the "args" of an example tool call.
This should NOT be a ToolCall dict - i.e. it should not
have {"name", "id", "args"} keys.
"""
return {"a": 2, "b": 3}

View File

@@ -1,6 +0,0 @@
from __module_name__.vectorstores import __ModuleName__VectorStore
def test_initialization() -> None:
"""Test integration vectorstore initialization."""
__ModuleName__VectorStore()