From 57d8a3d1e8fd376d8db8772977870d01ee5e444e Mon Sep 17 00:00:00 2001 From: Myeongseop Kim <81076998+amicus-veritatis@users.noreply.github.com> Date: Thu, 6 Jul 2023 12:36:01 +0900 Subject: [PATCH] Make tqdm for OpenAIEmbeddings optional (#7247) - Description: I have added a `show_progress_bar` parameter (defaults.to `False`) to the `OpenAIEmbeddings`. If the user sets `show_progress_bar` to `True`, a progress bar will be displayed. - Issue: #7246 - Dependencies: N/A - Tag maintainer: @hwchase17, @baskaryan - Twitter handle: N/A --- langchain/embeddings/openai.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/langchain/embeddings/openai.py b/langchain/embeddings/openai.py index 9194a22e476..8e028e52bdb 100644 --- a/langchain/embeddings/openai.py +++ b/langchain/embeddings/openai.py @@ -191,6 +191,8 @@ class OpenAIEmbeddings(BaseModel, Embeddings): when using one of the many model providers that expose an OpenAI-like API but with different models. In those cases, in order to avoid erroring when tiktoken is called, you can specify a model name to use here.""" + show_progress_bar: bool = False + """Whether to show a progress bar when embedding.""" class Config: """Configuration for this pydantic object.""" @@ -309,12 +311,17 @@ class OpenAIEmbeddings(BaseModel, Embeddings): batched_embeddings = [] _chunk_size = chunk_size or self.chunk_size - try: - import tqdm - _iter = tqdm.tqdm(range(0, len(tokens), _chunk_size)) - except ImportError: + if self.show_progress_bar: + try: + import tqdm + + _iter = tqdm.tqdm(range(0, len(tokens), _chunk_size)) + except ImportError: + _iter = range(0, len(tokens), _chunk_size) + else: _iter = range(0, len(tokens), _chunk_size) + for i in _iter: response = embed_with_retry( self,