mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-26 16:43:35 +00:00
openai[patch]: increase max batch size for Azure OpenAI Embeddings API (#19532)
**Description:** Azure OpenAI has increased its maximum batch size from 16 to 2048 for the Embeddings API per this How-To [page](https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/embeddings?tabs=console#best-practices) Co-authored-by: Bagatur <22008038+baskaryan@users.noreply.github.com>
This commit is contained in:
parent
56f4c5459b
commit
0b1e09029f
@ -99,10 +99,10 @@ class AzureOpenAIEmbeddings(OpenAIEmbeddings):
|
|||||||
values["azure_ad_token"] = (
|
values["azure_ad_token"] = (
|
||||||
convert_to_secret_str(azure_ad_token) if azure_ad_token else None
|
convert_to_secret_str(azure_ad_token) if azure_ad_token else None
|
||||||
)
|
)
|
||||||
# Azure OpenAI embedding models allow a maximum of 16 texts
|
# Azure OpenAI embedding models allow a maximum of 2048 texts
|
||||||
# at a time in each batch
|
# at a time in each batch
|
||||||
# See: https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#embeddings
|
# See: https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/embeddings?tabs=console#best-practices
|
||||||
values["chunk_size"] = min(values["chunk_size"], 16)
|
values["chunk_size"] = min(values["chunk_size"], 2048)
|
||||||
# For backwards compatibility. Before openai v1, no distinction was made
|
# For backwards compatibility. Before openai v1, no distinction was made
|
||||||
# between azure_endpoint and base_url (openai_api_base).
|
# between azure_endpoint and base_url (openai_api_base).
|
||||||
openai_api_base = values["openai_api_base"]
|
openai_api_base = values["openai_api_base"]
|
||||||
|
Loading…
Reference in New Issue
Block a user