This commit is contained in:
Erick Friis
2024-11-15 10:06:24 -08:00
parent 4b641f87ae
commit 796da053c6
6 changed files with 61 additions and 245 deletions

View File

@@ -1,64 +0,0 @@
"""Test __ModuleName__LLM llm."""
from __module_name__.llms import __ModuleName__LLM
def test_stream() -> None:
"""Test streaming tokens from OpenAI."""
llm = __ModuleName__LLM()
for token in llm.stream("I'm Pickle Rick"):
assert isinstance(token, str)
async def test_astream() -> None:
"""Test streaming tokens from OpenAI."""
llm = __ModuleName__LLM()
async for token in llm.astream("I'm Pickle Rick"):
assert isinstance(token, str)
async def test_abatch() -> None:
"""Test streaming tokens from __ModuleName__LLM."""
llm = __ModuleName__LLM()
result = await llm.abatch(["I'm Pickle Rick", "I'm not Pickle Rick"])
for token in result:
assert isinstance(token, str)
async def test_abatch_tags() -> None:
"""Test batch tokens from __ModuleName__LLM."""
llm = __ModuleName__LLM()
result = await llm.abatch(
["I'm Pickle Rick", "I'm not Pickle Rick"], config={"tags": ["foo"]}
)
for token in result:
assert isinstance(token, str)
def test_batch() -> None:
"""Test batch tokens from __ModuleName__LLM."""
llm = __ModuleName__LLM()
result = llm.batch(["I'm Pickle Rick", "I'm not Pickle Rick"])
for token in result:
assert isinstance(token, str)
async def test_ainvoke() -> None:
"""Test invoke tokens from __ModuleName__LLM."""
llm = __ModuleName__LLM()
result = await llm.ainvoke("I'm Pickle Rick", config={"tags": ["foo"]})
assert isinstance(result, str)
def test_invoke() -> None:
"""Test invoke tokens from __ModuleName__LLM."""
llm = __ModuleName__LLM()
result = llm.invoke("I'm Pickle Rick", config=dict(tags=["foo"]))
assert isinstance(result, str)

View File

@@ -1,8 +1,23 @@
"""Test chat model integration."""
from langchain_standard_tests.unit_tests import ChatModelUnitTests
from typing import Tuple, Type
from langchain_core.language_models import BaseChatModel
from __module_name__.chat_models import Chat__ModuleName__
def test_initialization() -> None:
"""Test chat model initialization."""
Chat__ModuleName__()
class TestChat__ModuleName__StandardUnitTests(ChatModelUnitTests):
"""Standard LangChain interface tests, applied to __ModuleName__."""
@property
def chat_model_class(self) -> Type[BaseChatModel]:
"""Return chat model class."""
return Chat__ModuleName__
@property
def chat_model_params(self) -> dict:
# TODO: Update with chat model parameters
# e.g. return {"model": "claude-3-haiku-20240307"} for ChatAnthropic
return {}

View File

@@ -1,8 +1,43 @@
"""Test embedding model integration."""
from typing import Tuple, Type
from langchain_core.embeddings import Embeddings
from langchain_standard_tests.unit_tests.embeddings import EmbeddingsUnitTests
from __module_name__.embeddings import __ModuleName__Embeddings
class TestFireworksStandard(EmbeddingsUnitTests):
@property
def embeddings_class(self) -> Type[Embeddings]:
return __ModuleName__Embeddings
@property
def embeddings_params(self) -> dict:
return {"api_key": "test api key"}
@property
def init_from_env_params(self) -> Tuple[dict, dict, dict]:
"""Return env vars, init args, and expected instance attrs for initializing
from env vars.
This powers tests for initializing from environment variables."""
return (
# env vars
{
"__MODULE_NAME___API_KEY": "test api key",
},
# init vars - only pass things that are required and CAN'T be set
# via env vars
{},
# expected attributes once the object has been constructed
{
"api_key": "test api key",
},
)
def test_initialization() -> None:
"""Test embedding model initialization."""
__ModuleName__Embeddings()

View File

@@ -1,8 +0,0 @@
"""Test __ModuleName__ Chat API wrapper."""
from __module_name__ import __ModuleName__LLM
def test_initialization() -> None:
"""Test integration initialization."""
__ModuleName__LLM()