From 42d5d988fa8f43684bc733e4e0842e40d4227d6e Mon Sep 17 00:00:00 2001 From: Benjamin Date: Fri, 6 Jan 2023 16:48:52 +0100 Subject: [PATCH] add openai logit bias (#553) Add [`logit_bias`](https://beta.openai.com/docs/api-reference/completions/create#completions/create-logit_bias) params to OpenAI See [here](https://beta.openai.com/tokenizer) for the tokenizer. NB: I see that others (like Cohere) have the same parameter, but since I don't have an access to it, I don't want to make a mistake. --- Just to make sure the default "{}" works for openai: ``` from langchain.llms import OpenAI OPENAI_API_KEY="XXX" llm = OpenAI(openai_api_key=OPENAI_API_KEY) llm.generate('Write "test":') llm = OpenAI(openai_api_key=OPENAI_API_KEY, logit_bias={'9288': -100, '1332': -100, '14402': -100, '6208': -100}) llm.generate('Write "test":') ``` --- langchain/llms/openai.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/langchain/llms/openai.py b/langchain/llms/openai.py index 22840dcb5b2..7f1d360c3ee 100644 --- a/langchain/llms/openai.py +++ b/langchain/llms/openai.py @@ -54,6 +54,8 @@ class BaseOpenAI(BaseLLM, BaseModel): """Batch size to use when passing multiple documents to generate.""" request_timeout: Optional[Union[float, Tuple[float, float]]] = None """Timeout for requests to OpenAI completion API. Default is 600 seconds.""" + logit_bias: Optional[Dict[str, float]] = Field(default_factory=dict) + """Adjust the probability of specific tokens being generated.""" class Config: """Configuration for this pydantic object.""" @@ -109,6 +111,7 @@ class BaseOpenAI(BaseLLM, BaseModel): "n": self.n, "best_of": self.best_of, "request_timeout": self.request_timeout, + "logit_bias": self.logit_bias, } return {**normal_params, **self.model_kwargs}