openai[patch]: openai proxy added to base embeddings (#24539)

- [ ] **PR title**: "langchain-openai: openai proxy added to base
embeddings"

- [ ] **PR message**: 
    - **Description:** 
    Dear langchain developers,
You've already supported proxy for ChatOpenAI implementation in your
package. At the same time, if somebody needed to use proxy for chat, it
also could be necessary to be able to use it for OpenAIEmbeddings.
That's why I think it's important to add proxy support for OpenAI
embeddings. That's what I've done in this PR.

@baskaryan

---------

Co-authored-by: karpov <karpov@dohod.ru>
Co-authored-by: Bagatur <baskaryan@gmail.com>
This commit is contained in:
Pavel 2024-07-28 23:54:13 +03:00 committed by GitHub
parent 821196c4ee
commit 7fcfe7c1f4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 49 additions and 6 deletions

View File

@ -423,10 +423,19 @@ class BaseChatOpenAI(BaseChatModel):
"default_headers": values["default_headers"],
"default_query": values["default_query"],
}
openai_proxy = values["openai_proxy"]
if values["openai_proxy"] and (
values["http_client"] or values["http_async_client"]
):
openai_proxy = values["openai_proxy"]
http_client = values["http_client"]
http_async_client = values["http_async_client"]
raise ValueError(
"Cannot specify 'openai_proxy' if one of "
"'http_client'/'http_async_client' is already specified. Received:\n"
f"{openai_proxy=}\n{http_client=}\n{http_async_client=}"
)
if not values.get("client"):
if openai_proxy and not values["http_client"]:
if values["openai_proxy"] and not values["http_client"]:
try:
import httpx
except ImportError as e:
@ -434,13 +443,13 @@ class BaseChatOpenAI(BaseChatModel):
"Could not import httpx python package. "
"Please install it with `pip install httpx`."
) from e
values["http_client"] = httpx.Client(proxy=openai_proxy)
values["http_client"] = httpx.Client(proxy=values["openai_proxy"])
sync_specific = {"http_client": values["http_client"]}
values["client"] = openai.OpenAI(
**client_params, **sync_specific
).chat.completions
if not values.get("async_client"):
if openai_proxy and not values["http_async_client"]:
if values["openai_proxy"] and not values["http_async_client"]:
try:
import httpx
except ImportError as e:
@ -448,7 +457,9 @@ class BaseChatOpenAI(BaseChatModel):
"Could not import httpx python package. "
"Please install it with `pip install httpx`."
) from e
values["http_async_client"] = httpx.AsyncClient(proxy=openai_proxy)
values["http_async_client"] = httpx.AsyncClient(
proxy=values["openai_proxy"]
)
async_specific = {"http_client": values["http_async_client"]}
values["async_client"] = openai.AsyncOpenAI(
**client_params, **async_specific

View File

@ -282,12 +282,44 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
"default_headers": values["default_headers"],
"default_query": values["default_query"],
}
if values["openai_proxy"] and (
values["http_client"] or values["http_async_client"]
):
openai_proxy = values["openai_proxy"]
http_client = values["http_client"]
http_async_client = values["http_async_client"]
raise ValueError(
"Cannot specify 'openai_proxy' if one of "
"'http_client'/'http_async_client' is already specified. Received:\n"
f"{openai_proxy=}\n{http_client=}\n{http_async_client=}"
)
if not values.get("client"):
if values["openai_proxy"] and not values["http_client"]:
try:
import httpx
except ImportError as e:
raise ImportError(
"Could not import httpx python package. "
"Please install it with `pip install httpx`."
) from e
values["http_client"] = httpx.Client(proxy=values["openai_proxy"])
sync_specific = {"http_client": values["http_client"]}
values["client"] = openai.OpenAI(
**client_params, **sync_specific
).embeddings
if not values.get("async_client"):
if values["openai_proxy"] and not values["http_async_client"]:
try:
import httpx
except ImportError as e:
raise ImportError(
"Could not import httpx python package. "
"Please install it with `pip install httpx`."
) from e
values["http_async_client"] = httpx.AsyncClient(
proxy=values["openai_proxy"]
)
async_specific = {"http_client": values["http_async_client"]}
values["async_client"] = openai.AsyncOpenAI(
**client_params, **async_specific