From 0e462b72ef8c2ce6c4864743a8e0c146f2418bfa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Burak=20=C3=96m=C3=BCr?= Date: Wed, 29 Nov 2023 04:02:38 +0100 Subject: [PATCH] Update openai/create_llm_result function to consider kwargs (#13815) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Replace this entire comment with: - **Description:** updates `create_llm_result` function within `openai.py` to consider latest `params`, - **Issue:** #8928 - **Dependencies:** -, - **Tag maintainer:** - - **Twitter handle:** [burkomr](https://twitter.com/burkomr) --------- Co-authored-by: Burak Ömür Co-authored-by: Harrison Chase --- libs/langchain/langchain/llms/openai.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/libs/langchain/langchain/llms/openai.py b/libs/langchain/langchain/llms/openai.py index 5cdcc680218..eca554f5571 100644 --- a/libs/langchain/langchain/llms/openai.py +++ b/libs/langchain/langchain/llms/openai.py @@ -467,6 +467,7 @@ class BaseOpenAI(BaseLLM): return self.create_llm_result( choices, prompts, + params, token_usage, system_fingerprint=system_fingerprint, ) @@ -524,6 +525,7 @@ class BaseOpenAI(BaseLLM): return self.create_llm_result( choices, prompts, + params, token_usage, system_fingerprint=system_fingerprint, ) @@ -555,14 +557,16 @@ class BaseOpenAI(BaseLLM): self, choices: Any, prompts: List[str], + params: Dict[str, Any], token_usage: Dict[str, int], *, system_fingerprint: Optional[str] = None, ) -> LLMResult: """Create the LLMResult from the choices and prompts.""" generations = [] + n = params.get("n", self.n) for i, _ in enumerate(prompts): - sub_choices = choices[i * self.n : (i + 1) * self.n] + sub_choices = choices[i * n : (i + 1) * n] generations.append( [ Generation(