From b11f21c25fc6accca7a6f325c1fd3e63dd5f91ea Mon Sep 17 00:00:00 2001 From: James Braza Date: Thu, 28 Sep 2023 19:56:42 -0700 Subject: [PATCH] Updated `LocalAIEmbeddings` docstring to better explain why `openai` (#10946) Fixes my misgivings in https://github.com/langchain-ai/langchain/issues/10912 --- libs/langchain/langchain/embeddings/localai.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/libs/langchain/langchain/embeddings/localai.py b/libs/langchain/langchain/embeddings/localai.py index 9eb139c962..f63a6a66a1 100644 --- a/libs/langchain/langchain/embeddings/localai.py +++ b/libs/langchain/langchain/embeddings/localai.py @@ -120,16 +120,19 @@ async def async_embed_with_retry(embeddings: LocalAIEmbeddings, **kwargs: Any) - class LocalAIEmbeddings(BaseModel, Embeddings): """LocalAI embedding models. - To use, you should have the ``openai`` python package installed, and the - environment variable ``OPENAI_API_KEY`` set to a random string. You need to - specify ``OPENAI_API_BASE`` to point to your LocalAI service endpoint. + Since LocalAI and OpenAI have 1:1 compatibility between APIs, this class + uses the ``openai`` Python package's ``openai.Embedding`` as its client. + Thus, you should have the ``openai`` python package installed, and defeat + the environment variable ``OPENAI_API_KEY`` by setting to a random string. + You also need to specify ``OPENAI_API_BASE`` to point to your LocalAI + service endpoint. Example: .. code-block:: python from langchain.embeddings import LocalAIEmbeddings openai = LocalAIEmbeddings( - openai_api_key="random-key", + openai_api_key="random-string", openai_api_base="http://localhost:8080" )