mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-06 05:08:20 +00:00
openai[patch]: add support for extra_body (#23404)
**Description:** Add support passing extra_body parameter Some OpenAI compatible API's have additional parameters (for example [vLLM](https://docs.vllm.ai/en/latest/serving/openai_compatible_server.html#extra-parameters)) that can be passed thought `extra_body`. Same question in https://github.com/openai/openai-python/issues/767 <!-- If no one reviews your PR within a few days, please @-mention one of baskaryan, efriis, eyurtsev, ccurme, vbarda, hwchase17. -->
This commit is contained in:
parent
c39521b70d
commit
1e3e05b0c3
@ -346,6 +346,9 @@ class BaseChatOpenAI(BaseChatModel):
|
||||
http_client as well if you'd like a custom client for sync invocations."""
|
||||
stop: Optional[Union[List[str], str]] = Field(default=None, alias="stop_sequences")
|
||||
"""Default stop sequences."""
|
||||
extra_body: Optional[Mapping[str, Any]] = None
|
||||
"""Optional additional JSON properties to include in the request parameters when
|
||||
making requests to OpenAI compatible APIs, such as vLLM."""
|
||||
|
||||
class Config:
|
||||
"""Configuration for this pydantic object."""
|
||||
@ -445,6 +448,9 @@ class BaseChatOpenAI(BaseChatModel):
|
||||
params["max_tokens"] = self.max_tokens
|
||||
if self.stop:
|
||||
params["stop"] = self.stop
|
||||
if self.extra_body is not None:
|
||||
params["extra_body"] = self.extra_body
|
||||
|
||||
return params
|
||||
|
||||
def _combine_llm_outputs(self, llm_outputs: List[Optional[dict]]) -> dict:
|
||||
|
@ -137,6 +137,9 @@ class BaseOpenAI(BaseLLM):
|
||||
http_async_client: Union[Any, None] = None
|
||||
"""Optional httpx.AsyncClient. Only used for async invocations. Must specify
|
||||
http_client as well if you'd like a custom client for sync invocations."""
|
||||
extra_body: Optional[Mapping[str, Any]] = None
|
||||
"""Optional additional JSON properties to include in the request parameters when
|
||||
making requests to OpenAI compatible APIs, such as vLLM."""
|
||||
|
||||
class Config:
|
||||
"""Configuration for this pydantic object."""
|
||||
@ -222,6 +225,9 @@ class BaseOpenAI(BaseLLM):
|
||||
if self.max_tokens is not None:
|
||||
normal_params["max_tokens"] = self.max_tokens
|
||||
|
||||
if self.extra_body is not None:
|
||||
normal_params["extra_body"] = self.extra_body
|
||||
|
||||
# Azure gpt-35-turbo doesn't support best_of
|
||||
# don't specify best_of if it is 1
|
||||
if self.best_of > 1:
|
||||
|
Loading…
Reference in New Issue
Block a user