From 69d39e2173fcb44cdcd334cb912acaf7b148dff6 Mon Sep 17 00:00:00 2001 From: umair mehmood Date: Mon, 20 Nov 2023 06:49:55 +0500 Subject: [PATCH] fix: VLLMOpenAI -- create() got an unexpected keyword argument 'api_key' (#13517) The issue was accuring because of `openai` update in Completions. its not accepting `api_key` and 'api_base' args. The fix is we check for the openai version and if ats v1 then remove these keys from args before passing them to `Compilation.create(...)` when sending from `VLLMOpenAI` Fixed: #13507 @eyu @efriis @hwchase17 --------- Co-authored-by: Erick Friis --- libs/langchain/langchain/llms/vllm.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/libs/langchain/langchain/llms/vllm.py b/libs/langchain/langchain/llms/vllm.py index 537a9bbb6f3..f33e3cef96d 100644 --- a/libs/langchain/langchain/llms/vllm.py +++ b/libs/langchain/langchain/llms/vllm.py @@ -5,6 +5,7 @@ from langchain.llms.base import BaseLLM from langchain.llms.openai import BaseOpenAI from langchain.pydantic_v1 import Field, root_validator from langchain.schema.output import Generation, LLMResult +from langchain.utils.openai import is_openai_v1 class VLLM(BaseLLM): @@ -148,17 +149,21 @@ class VLLMOpenAI(BaseOpenAI): @property def _invocation_params(self) -> Dict[str, Any]: """Get the parameters used to invoke the model.""" - openai_creds: Dict[str, Any] = { - "api_key": self.openai_api_key, - "api_base": self.openai_api_base, - } - return { + params: Dict[str, Any] = { "model": self.model_name, - **openai_creds, **self._default_params, "logit_bias": None, } + if not is_openai_v1(): + params.update( + { + "api_key": self.openai_api_key, + "api_base": self.openai_api_base, + } + ) + + return params @property def _llm_type(self) -> str: