community: add standard tests for Perplexity (#29534)

This commit is contained in:
ccurme
2025-02-01 17:02:57 -05:00
committed by GitHub
parent 21d8d41595
commit 16a422f3fa
2 changed files with 51 additions and 1 deletions

View File

@@ -0,0 +1,33 @@
"""Standard LangChain interface tests"""
from typing import Type
import pytest
from langchain_core.language_models import BaseChatModel
from langchain_tests.integration_tests import ChatModelIntegrationTests
from langchain_community.chat_models import ChatPerplexity
class TestPerplexityStandard(ChatModelIntegrationTests):
@property
def chat_model_class(self) -> Type[BaseChatModel]:
return ChatPerplexity
@property
def chat_model_params(self) -> dict:
return {"model": "sonar"}
@property
def returns_usage_metadata(self) -> bool:
# TODO: add usage metadata and delete this property
# https://docs.perplexity.ai/api-reference/chat-completions#response-usage
return False
@pytest.mark.xfail(reason="TODO: handle in integration.")
def test_double_messages_conversation(self, model: BaseChatModel) -> None:
super().test_double_messages_conversation(model)
@pytest.mark.xfail(reason="Raises 400: Custom stop words not supported.")
def test_stop_sequence(self, model: BaseChatModel) -> None:
super().test_stop_sequence(model)