mirror of
https://github.com/hwchase17/langchain.git
synced 2025-08-13 14:50:00 +00:00
groq[patch]: update model for integration tests (#31440)
Llama-3.1 started failing consistently with > groq.BadRequestError: Error code: 400 - ***'error': ***'message': "Failed to call a function. Please adjust your prompt. See 'failed_generation' for more details.", 'type': 'invalid_request_error', 'code': 'tool_use_failed', 'failed_generation': '<function=brave_search>***"query": "Hello!"***</function>'***
This commit is contained in:
parent
5b9394319b
commit
5bf89628bf
@ -29,14 +29,10 @@ class BaseTestGroq(ChatModelIntegrationTests):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
class TestGroqLlama(BaseTestGroq):
|
class TestGroqGemma(BaseTestGroq):
|
||||||
@property
|
@property
|
||||||
def chat_model_params(self) -> dict:
|
def chat_model_params(self) -> dict:
|
||||||
return {
|
return {"model": "gemma2-9b-it", "rate_limiter": rate_limiter}
|
||||||
"model": "llama-3.1-8b-instant",
|
|
||||||
"temperature": 0,
|
|
||||||
"rate_limiter": rate_limiter,
|
|
||||||
}
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def supports_json_mode(self) -> bool:
|
def supports_json_mode(self) -> bool:
|
||||||
|
Loading…
Reference in New Issue
Block a user