From ae5695ad3210d70b5bde5be749f8657c9dc86e94 Mon Sep 17 00:00:00 2001 From: bair82 <60368178+bair82@users.noreply.github.com> Date: Mon, 30 Jan 2023 23:55:44 +0100 Subject: [PATCH] Update cohere.py (#795) When stop tokens are set in Cohere LLM constructor, they are currently not stripped from the response, and they should be stripped --- langchain/llms/cohere.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/langchain/llms/cohere.py b/langchain/llms/cohere.py index fd7b91067dd..adb50ad3cd8 100644 --- a/langchain/llms/cohere.py +++ b/langchain/llms/cohere.py @@ -122,6 +122,6 @@ class Cohere(LLM, BaseModel): text = response.generations[0].text # If stop tokens are provided, Cohere's endpoint returns them. # In order to make this consistent with other endpoints, we strip them. - if stop is not None: - text = enforce_stop_tokens(text, stop) + if stop is not None or self.stop is not None: + text = enforce_stop_tokens(text, params["stop_sequences"]) return text