mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-03 19:57:51 +00:00
standard-tests[patch]: update groq and structured output test (#24781)
- Mixtral with Groq has started consistently failing tool calling tests. Here we restrict testing to llama 3.1. - `.schema` is deprecated in pydantic proper in favor of `.model_json_schema`.
This commit is contained in:
parent
4a05679fdb
commit
e264ccf484
@ -4,12 +4,15 @@ from typing import Type
|
||||
|
||||
import pytest
|
||||
from langchain_core.language_models import BaseChatModel
|
||||
from langchain_standard_tests.integration_tests import ( # type: ignore[import-not-found]
|
||||
ChatModelIntegrationTests, # type: ignore[import-not-found]
|
||||
from langchain_core.rate_limiters import InMemoryRateLimiter
|
||||
from langchain_standard_tests.integration_tests import (
|
||||
ChatModelIntegrationTests,
|
||||
)
|
||||
|
||||
from langchain_groq import ChatGroq
|
||||
|
||||
rate_limiter = InMemoryRateLimiter(requests_per_second=0.45)
|
||||
|
||||
|
||||
class BaseTestGroq(ChatModelIntegrationTests):
|
||||
@property
|
||||
@ -21,32 +24,13 @@ class BaseTestGroq(ChatModelIntegrationTests):
|
||||
super().test_tool_message_histories_list_content(model)
|
||||
|
||||
|
||||
class TestGroqMixtral(BaseTestGroq):
|
||||
@property
|
||||
def chat_model_params(self) -> dict:
|
||||
return {
|
||||
"temperature": 0,
|
||||
}
|
||||
|
||||
@pytest.mark.xfail(
|
||||
reason=("Fails with 'Failed to call a function. Please adjust your prompt.'")
|
||||
)
|
||||
def test_structured_output(self, model: BaseChatModel) -> None:
|
||||
super().test_structured_output(model)
|
||||
|
||||
@pytest.mark.xfail(
|
||||
reason=("May pass arguments: {'properties': {}, 'type': 'object'}")
|
||||
)
|
||||
def test_tool_calling_with_no_arguments(self, model: BaseChatModel) -> None:
|
||||
super().test_tool_calling_with_no_arguments(model)
|
||||
|
||||
|
||||
class TestGroqLlama(BaseTestGroq):
|
||||
@property
|
||||
def chat_model_params(self) -> dict:
|
||||
return {
|
||||
"model": "llama3-8b-8192",
|
||||
"model": "llama-3.1-8b-instant",
|
||||
"temperature": 0,
|
||||
"rate_limiter": rate_limiter,
|
||||
}
|
||||
|
||||
@pytest.mark.xfail(
|
||||
|
@ -237,7 +237,7 @@ class ChatModelIntegrationTests(ChatModelTests):
|
||||
assert isinstance(chunk, Joke)
|
||||
|
||||
# Schema
|
||||
chat = model.with_structured_output(Joke.schema())
|
||||
chat = model.with_structured_output(Joke.model_json_schema())
|
||||
result = chat.invoke("Tell me a joke about cats.")
|
||||
assert isinstance(result, dict)
|
||||
assert set(result.keys()) == {"setup", "punchline"}
|
||||
|
Loading…
Reference in New Issue
Block a user