forked from Archives/langchain
You cannot select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
84 lines
3.1 KiB
Python
84 lines
3.1 KiB
Python
"""Wrappers around embedding modules."""
|
|
import logging
|
|
from typing import Any
|
|
|
|
from langchain.embeddings.aleph_alpha import (
|
|
AlephAlphaAsymmetricSemanticEmbedding,
|
|
AlephAlphaSymmetricSemanticEmbedding,
|
|
)
|
|
from langchain.embeddings.bedrock import BedrockEmbeddings
|
|
from langchain.embeddings.cohere import CohereEmbeddings
|
|
from langchain.embeddings.elasticsearch import ElasticsearchEmbeddings
|
|
from langchain.embeddings.fake import FakeEmbeddings
|
|
from langchain.embeddings.google_palm import GooglePalmEmbeddings
|
|
from langchain.embeddings.huggingface import (
|
|
HuggingFaceEmbeddings,
|
|
HuggingFaceInstructEmbeddings,
|
|
)
|
|
from langchain.embeddings.huggingface_hub import HuggingFaceHubEmbeddings
|
|
from langchain.embeddings.jina import JinaEmbeddings
|
|
from langchain.embeddings.llamacpp import LlamaCppEmbeddings
|
|
from langchain.embeddings.minimax import MiniMaxEmbeddings
|
|
from langchain.embeddings.modelscope_hub import ModelScopeEmbeddings
|
|
from langchain.embeddings.mosaicml import MosaicMLInstructorEmbeddings
|
|
from langchain.embeddings.openai import OpenAIEmbeddings
|
|
from langchain.embeddings.sagemaker_endpoint import SagemakerEndpointEmbeddings
|
|
from langchain.embeddings.self_hosted import SelfHostedEmbeddings
|
|
from langchain.embeddings.self_hosted_hugging_face import (
|
|
SelfHostedHuggingFaceEmbeddings,
|
|
SelfHostedHuggingFaceInstructEmbeddings,
|
|
)
|
|
from langchain.embeddings.sentence_transformer import SentenceTransformerEmbeddings
|
|
from langchain.embeddings.tensorflow_hub import TensorflowHubEmbeddings
|
|
from langchain.embeddings.vertexai import VertexAIEmbeddings
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
__all__ = [
|
|
"OpenAIEmbeddings",
|
|
"HuggingFaceEmbeddings",
|
|
"CohereEmbeddings",
|
|
"ElasticsearchEmbeddings",
|
|
"JinaEmbeddings",
|
|
"LlamaCppEmbeddings",
|
|
"HuggingFaceHubEmbeddings",
|
|
"ModelScopeEmbeddings",
|
|
"TensorflowHubEmbeddings",
|
|
"SagemakerEndpointEmbeddings",
|
|
"HuggingFaceInstructEmbeddings",
|
|
"MosaicMLInstructorEmbeddings",
|
|
"SelfHostedEmbeddings",
|
|
"SelfHostedHuggingFaceEmbeddings",
|
|
"SelfHostedHuggingFaceInstructEmbeddings",
|
|
"FakeEmbeddings",
|
|
"AlephAlphaAsymmetricSemanticEmbedding",
|
|
"AlephAlphaSymmetricSemanticEmbedding",
|
|
"SentenceTransformerEmbeddings",
|
|
"GooglePalmEmbeddings",
|
|
"MiniMaxEmbeddings",
|
|
"VertexAIEmbeddings",
|
|
"BedrockEmbeddings",
|
|
]
|
|
|
|
|
|
# TODO: this is in here to maintain backwards compatibility
|
|
class HypotheticalDocumentEmbedder:
|
|
def __init__(self, *args: Any, **kwargs: Any):
|
|
logger.warning(
|
|
"Using a deprecated class. Please use "
|
|
"`from langchain.chains import HypotheticalDocumentEmbedder` instead"
|
|
)
|
|
from langchain.chains.hyde.base import HypotheticalDocumentEmbedder as H
|
|
|
|
return H(*args, **kwargs) # type: ignore
|
|
|
|
@classmethod
|
|
def from_llm(cls, *args: Any, **kwargs: Any) -> Any:
|
|
logger.warning(
|
|
"Using a deprecated class. Please use "
|
|
"`from langchain.chains import HypotheticalDocumentEmbedder` instead"
|
|
)
|
|
from langchain.chains.hyde.base import HypotheticalDocumentEmbedder as H
|
|
|
|
return H.from_llm(*args, **kwargs)
|