From 7b585c7585264ad785f4df31d911bb4cf8dc534b Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Wed, 5 Jul 2023 12:04:22 -0400 Subject: [PATCH] add tqdm to embeddings (#7205) for longer running embeddings, can be helpful to visualize --- langchain/embeddings/openai.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/langchain/embeddings/openai.py b/langchain/embeddings/openai.py index f3cb66547b..260a0dc81c 100644 --- a/langchain/embeddings/openai.py +++ b/langchain/embeddings/openai.py @@ -298,7 +298,13 @@ class OpenAIEmbeddings(BaseModel, Embeddings): batched_embeddings = [] _chunk_size = chunk_size or self.chunk_size - for i in range(0, len(tokens), _chunk_size): + try: + import tqdm + + _iter = tqdm.tqdm(range(0, len(tokens), _chunk_size)) + except ImportError: + _iter = range(0, len(tokens), _chunk_size) + for i in _iter: response = embed_with_retry( self, input=tokens[i : i + _chunk_size],