docs: remove duplicate quote in AzureOpenAIEmbeddings doc (#18315)

- **Description:** Remove duplicate quote in AzureOpenAIEmbeddings doc,
remove trailing spaces.
- **Issue:** No
- **Dependencies:** No
This commit is contained in:
kkdamowang 2024-03-01 03:25:50 +08:00 committed by GitHub
parent 4c62362eab
commit 6782dac420
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 16 additions and 16 deletions

View File

@ -23,18 +23,18 @@ class AzureOpenAIEmbeddings(OpenAIEmbeddings):
from langchain_openai import AzureOpenAIEmbeddings from langchain_openai import AzureOpenAIEmbeddings
openai = AzureOpenAIEmbeddings(model=""text-embedding-3-large") openai = AzureOpenAIEmbeddings(model="text-embedding-3-large")
""" """
azure_endpoint: Union[str, None] = None azure_endpoint: Union[str, None] = None
"""Your Azure endpoint, including the resource. """Your Azure endpoint, including the resource.
Automatically inferred from env var `AZURE_OPENAI_ENDPOINT` if not provided. Automatically inferred from env var `AZURE_OPENAI_ENDPOINT` if not provided.
Example: `https://example-resource.azure.openai.com/` Example: `https://example-resource.azure.openai.com/`
""" """
deployment: Optional[str] = Field(default=None, alias="azure_deployment") deployment: Optional[str] = Field(default=None, alias="azure_deployment")
"""A model deployment. """A model deployment.
If given sets the base client URL to include `/deployments/{azure_deployment}`. If given sets the base client URL to include `/deployments/{azure_deployment}`.
Note: this means you won't be able to use non-deployment endpoints. Note: this means you won't be able to use non-deployment endpoints.
@ -46,7 +46,7 @@ class AzureOpenAIEmbeddings(OpenAIEmbeddings):
Automatically inferred from env var `AZURE_OPENAI_AD_TOKEN` if not provided. Automatically inferred from env var `AZURE_OPENAI_AD_TOKEN` if not provided.
For more: For more:
https://www.microsoft.com/en-us/security/business/identity-access/microsoft-entra-id. https://www.microsoft.com/en-us/security/business/identity-access/microsoft-entra-id.
""" # noqa: E501 """ # noqa: E501
azure_ad_token_provider: Union[Callable[[], str], None] = None azure_ad_token_provider: Union[Callable[[], str], None] = None

View File

@ -49,7 +49,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
from langchain_openai import OpenAIEmbeddings from langchain_openai import OpenAIEmbeddings
openai = OpenAIEmbeddings(model=""text-embedding-3-large") openai = OpenAIEmbeddings(model="text-embedding-3-large")
In order to use the library with Microsoft Azure endpoints, use In order to use the library with Microsoft Azure endpoints, use
AzureOpenAIEmbeddings. AzureOpenAIEmbeddings.
@ -60,7 +60,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
async_client: Any = Field(default=None, exclude=True) #: :meta private: async_client: Any = Field(default=None, exclude=True) #: :meta private:
model: str = "text-embedding-ada-002" model: str = "text-embedding-ada-002"
dimensions: Optional[int] = None dimensions: Optional[int] = None
"""The number of dimensions the resulting output embeddings should have. """The number of dimensions the resulting output embeddings should have.
Only supported in `text-embedding-3` and later models. Only supported in `text-embedding-3` and later models.
""" """
@ -71,7 +71,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
"""Automatically inferred from env var `OPENAI_API_VERSION` if not provided.""" """Automatically inferred from env var `OPENAI_API_VERSION` if not provided."""
# to support Azure OpenAI Service custom endpoints # to support Azure OpenAI Service custom endpoints
openai_api_base: Optional[str] = Field(default=None, alias="base_url") openai_api_base: Optional[str] = Field(default=None, alias="base_url")
"""Base URL path for API requests, leave blank if not using a proxy or service """Base URL path for API requests, leave blank if not using a proxy or service
emulator.""" emulator."""
# to support Azure OpenAI Service custom endpoints # to support Azure OpenAI Service custom endpoints
openai_api_type: Optional[str] = None openai_api_type: Optional[str] = None
@ -92,21 +92,21 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
request_timeout: Optional[Union[float, Tuple[float, float], Any]] = Field( request_timeout: Optional[Union[float, Tuple[float, float], Any]] = Field(
default=None, alias="timeout" default=None, alias="timeout"
) )
"""Timeout for requests to OpenAI completion API. Can be float, httpx.Timeout or """Timeout for requests to OpenAI completion API. Can be float, httpx.Timeout or
None.""" None."""
headers: Any = None headers: Any = None
tiktoken_enabled: bool = True tiktoken_enabled: bool = True
"""Set this to False for non-OpenAI implementations of the embeddings API, e.g. """Set this to False for non-OpenAI implementations of the embeddings API, e.g.
the `--extensions openai` extension for `text-generation-webui`""" the `--extensions openai` extension for `text-generation-webui`"""
tiktoken_model_name: Optional[str] = None tiktoken_model_name: Optional[str] = None
"""The model name to pass to tiktoken when using this class. """The model name to pass to tiktoken when using this class.
Tiktoken is used to count the number of tokens in documents to constrain Tiktoken is used to count the number of tokens in documents to constrain
them to be under a certain limit. By default, when set to None, this will them to be under a certain limit. By default, when set to None, this will
be the same as the embedding model name. However, there are some cases be the same as the embedding model name. However, there are some cases
where you may want to use this Embedding class with a model name not where you may want to use this Embedding class with a model name not
supported by tiktoken. This can include when using Azure embeddings or supported by tiktoken. This can include when using Azure embeddings or
when using one of the many model providers that expose an OpenAI-like when using one of the many model providers that expose an OpenAI-like
API but with different models. In those cases, in order to avoid erroring API but with different models. In those cases, in order to avoid erroring
when tiktoken is called, you can specify a model name to use here.""" when tiktoken is called, you can specify a model name to use here."""
show_progress_bar: bool = False show_progress_bar: bool = False
"""Whether to show a progress bar when embedding.""" """Whether to show a progress bar when embedding."""