2023-12-11 21:53:30 +00:00
|
|
|
"""**Embedding models** are wrappers around embedding models
|
|
|
|
from different APIs and services.
|
|
|
|
|
|
|
|
**Embedding models** can be LLMs or not.
|
|
|
|
|
|
|
|
**Class hierarchy:**
|
|
|
|
|
|
|
|
.. code-block::
|
|
|
|
|
|
|
|
Embeddings --> <name>Embeddings # Examples: OpenAIEmbeddings, HuggingFaceEmbeddings
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
import logging
|
|
|
|
from typing import Any
|
|
|
|
|
|
|
|
from langchain_community.embeddings.aleph_alpha import (
|
|
|
|
AlephAlphaAsymmetricSemanticEmbedding,
|
|
|
|
AlephAlphaSymmetricSemanticEmbedding,
|
|
|
|
)
|
|
|
|
from langchain_community.embeddings.awa import AwaEmbeddings
|
|
|
|
from langchain_community.embeddings.azure_openai import AzureOpenAIEmbeddings
|
2024-01-26 20:57:26 +00:00
|
|
|
from langchain_community.embeddings.baichuan import BaichuanTextEmbeddings
|
2023-12-11 21:53:30 +00:00
|
|
|
from langchain_community.embeddings.baidu_qianfan_endpoint import (
|
|
|
|
QianfanEmbeddingsEndpoint,
|
|
|
|
)
|
|
|
|
from langchain_community.embeddings.bedrock import BedrockEmbeddings
|
|
|
|
from langchain_community.embeddings.bookend import BookendEmbeddings
|
|
|
|
from langchain_community.embeddings.clarifai import ClarifaiEmbeddings
|
|
|
|
from langchain_community.embeddings.cohere import CohereEmbeddings
|
|
|
|
from langchain_community.embeddings.dashscope import DashScopeEmbeddings
|
|
|
|
from langchain_community.embeddings.databricks import DatabricksEmbeddings
|
|
|
|
from langchain_community.embeddings.deepinfra import DeepInfraEmbeddings
|
|
|
|
from langchain_community.embeddings.edenai import EdenAiEmbeddings
|
|
|
|
from langchain_community.embeddings.elasticsearch import ElasticsearchEmbeddings
|
|
|
|
from langchain_community.embeddings.embaas import EmbaasEmbeddings
|
|
|
|
from langchain_community.embeddings.ernie import ErnieEmbeddings
|
|
|
|
from langchain_community.embeddings.fake import (
|
|
|
|
DeterministicFakeEmbedding,
|
|
|
|
FakeEmbeddings,
|
|
|
|
)
|
|
|
|
from langchain_community.embeddings.fastembed import FastEmbedEmbeddings
|
|
|
|
from langchain_community.embeddings.google_palm import GooglePalmEmbeddings
|
|
|
|
from langchain_community.embeddings.gpt4all import GPT4AllEmbeddings
|
|
|
|
from langchain_community.embeddings.gradient_ai import GradientEmbeddings
|
|
|
|
from langchain_community.embeddings.huggingface import (
|
|
|
|
HuggingFaceBgeEmbeddings,
|
|
|
|
HuggingFaceEmbeddings,
|
|
|
|
HuggingFaceInferenceAPIEmbeddings,
|
|
|
|
HuggingFaceInstructEmbeddings,
|
|
|
|
)
|
|
|
|
from langchain_community.embeddings.huggingface_hub import HuggingFaceHubEmbeddings
|
|
|
|
from langchain_community.embeddings.infinity import InfinityEmbeddings
|
|
|
|
from langchain_community.embeddings.javelin_ai_gateway import JavelinAIGatewayEmbeddings
|
|
|
|
from langchain_community.embeddings.jina import JinaEmbeddings
|
|
|
|
from langchain_community.embeddings.johnsnowlabs import JohnSnowLabsEmbeddings
|
|
|
|
from langchain_community.embeddings.llamacpp import LlamaCppEmbeddings
|
2024-01-01 23:24:50 +00:00
|
|
|
from langchain_community.embeddings.llm_rails import LLMRailsEmbeddings
|
2023-12-11 21:53:30 +00:00
|
|
|
from langchain_community.embeddings.localai import LocalAIEmbeddings
|
|
|
|
from langchain_community.embeddings.minimax import MiniMaxEmbeddings
|
2024-01-22 19:38:11 +00:00
|
|
|
from langchain_community.embeddings.mlflow import (
|
|
|
|
MlflowCohereEmbeddings,
|
|
|
|
MlflowEmbeddings,
|
|
|
|
)
|
2023-12-11 21:53:30 +00:00
|
|
|
from langchain_community.embeddings.mlflow_gateway import MlflowAIGatewayEmbeddings
|
|
|
|
from langchain_community.embeddings.modelscope_hub import ModelScopeEmbeddings
|
|
|
|
from langchain_community.embeddings.mosaicml import MosaicMLInstructorEmbeddings
|
|
|
|
from langchain_community.embeddings.nlpcloud import NLPCloudEmbeddings
|
community[minor]: Add OCI Generative AI integration (#16548)
<!-- Thank you for contributing to LangChain!
Please title your PR "<package>: <description>", where <package> is
whichever of langchain, community, core, experimental, etc. is being
modified.
Replace this entire comment with:
- **Description:** Adding Oracle Cloud Infrastructure Generative AI
integration. Oracle Cloud Infrastructure (OCI) Generative AI is a fully
managed service that provides a set of state-of-the-art, customizable
large language models (LLMs) that cover a wide range of use cases, and
which is available through a single API. Using the OCI Generative AI
service you can access ready-to-use pretrained models, or create and
host your own fine-tuned custom models based on your own data on
dedicated AI clusters.
https://docs.oracle.com/en-us/iaas/Content/generative-ai/home.htm
- **Issue:** None,
- **Dependencies:** OCI Python SDK,
- **Twitter handle:** we announce bigger features on Twitter. If your PR
gets announced, and you'd like a mention, we'll gladly shout you out!
Please make sure your PR is passing linting and testing before
submitting. Run `make format`, `make lint` and `make test` from the root
of the package you've modified to check this locally.
Passed
See contribution guidelines for more information on how to write/run
tests, lint, etc: https://python.langchain.com/docs/contributing/
If you're adding a new integration, please include:
1. a test for the integration, preferably unit tests that do not rely on
network access,
2. an example notebook showing its use. It lives in
`docs/docs/integrations` directory.
we provide unit tests. However, we cannot provide integration tests due
to Oracle policies that prohibit public sharing of api keys.
If no one reviews your PR within a few days, please @-mention one of
@baskaryan, @eyurtsev, @hwchase17.
-->
---------
Co-authored-by: Arthur Cheng <arthur.cheng@oracle.com>
Co-authored-by: Bagatur <baskaryan@gmail.com>
2024-01-25 02:23:50 +00:00
|
|
|
from langchain_community.embeddings.oci_generative_ai import OCIGenAIEmbeddings
|
2023-12-11 21:53:30 +00:00
|
|
|
from langchain_community.embeddings.octoai_embeddings import OctoAIEmbeddings
|
|
|
|
from langchain_community.embeddings.ollama import OllamaEmbeddings
|
|
|
|
from langchain_community.embeddings.openai import OpenAIEmbeddings
|
|
|
|
from langchain_community.embeddings.sagemaker_endpoint import (
|
|
|
|
SagemakerEndpointEmbeddings,
|
|
|
|
)
|
|
|
|
from langchain_community.embeddings.self_hosted import SelfHostedEmbeddings
|
|
|
|
from langchain_community.embeddings.self_hosted_hugging_face import (
|
|
|
|
SelfHostedHuggingFaceEmbeddings,
|
|
|
|
SelfHostedHuggingFaceInstructEmbeddings,
|
|
|
|
)
|
|
|
|
from langchain_community.embeddings.sentence_transformer import (
|
|
|
|
SentenceTransformerEmbeddings,
|
|
|
|
)
|
|
|
|
from langchain_community.embeddings.spacy_embeddings import SpacyEmbeddings
|
|
|
|
from langchain_community.embeddings.tensorflow_hub import TensorflowHubEmbeddings
|
|
|
|
from langchain_community.embeddings.vertexai import VertexAIEmbeddings
|
2024-01-01 22:37:35 +00:00
|
|
|
from langchain_community.embeddings.volcengine import VolcanoEmbeddings
|
2023-12-11 21:53:30 +00:00
|
|
|
from langchain_community.embeddings.voyageai import VoyageEmbeddings
|
|
|
|
from langchain_community.embeddings.xinference import XinferenceEmbeddings
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
__all__ = [
|
|
|
|
"OpenAIEmbeddings",
|
|
|
|
"AzureOpenAIEmbeddings",
|
2024-01-26 20:57:26 +00:00
|
|
|
"BaichuanTextEmbeddings",
|
2023-12-11 21:53:30 +00:00
|
|
|
"ClarifaiEmbeddings",
|
|
|
|
"CohereEmbeddings",
|
|
|
|
"DatabricksEmbeddings",
|
|
|
|
"ElasticsearchEmbeddings",
|
|
|
|
"FastEmbedEmbeddings",
|
|
|
|
"HuggingFaceEmbeddings",
|
|
|
|
"HuggingFaceInferenceAPIEmbeddings",
|
|
|
|
"InfinityEmbeddings",
|
|
|
|
"GradientEmbeddings",
|
|
|
|
"JinaEmbeddings",
|
|
|
|
"LlamaCppEmbeddings",
|
2024-01-01 23:24:50 +00:00
|
|
|
"LLMRailsEmbeddings",
|
2023-12-11 21:53:30 +00:00
|
|
|
"HuggingFaceHubEmbeddings",
|
|
|
|
"MlflowEmbeddings",
|
2024-01-22 19:38:11 +00:00
|
|
|
"MlflowCohereEmbeddings",
|
2023-12-11 21:53:30 +00:00
|
|
|
"MlflowAIGatewayEmbeddings",
|
|
|
|
"ModelScopeEmbeddings",
|
|
|
|
"TensorflowHubEmbeddings",
|
|
|
|
"SagemakerEndpointEmbeddings",
|
|
|
|
"HuggingFaceInstructEmbeddings",
|
|
|
|
"MosaicMLInstructorEmbeddings",
|
|
|
|
"SelfHostedEmbeddings",
|
|
|
|
"SelfHostedHuggingFaceEmbeddings",
|
|
|
|
"SelfHostedHuggingFaceInstructEmbeddings",
|
|
|
|
"FakeEmbeddings",
|
|
|
|
"DeterministicFakeEmbedding",
|
|
|
|
"AlephAlphaAsymmetricSemanticEmbedding",
|
|
|
|
"AlephAlphaSymmetricSemanticEmbedding",
|
|
|
|
"SentenceTransformerEmbeddings",
|
|
|
|
"GooglePalmEmbeddings",
|
|
|
|
"MiniMaxEmbeddings",
|
|
|
|
"VertexAIEmbeddings",
|
|
|
|
"BedrockEmbeddings",
|
|
|
|
"DeepInfraEmbeddings",
|
|
|
|
"EdenAiEmbeddings",
|
|
|
|
"DashScopeEmbeddings",
|
|
|
|
"EmbaasEmbeddings",
|
|
|
|
"OctoAIEmbeddings",
|
|
|
|
"SpacyEmbeddings",
|
|
|
|
"NLPCloudEmbeddings",
|
|
|
|
"GPT4AllEmbeddings",
|
|
|
|
"XinferenceEmbeddings",
|
|
|
|
"LocalAIEmbeddings",
|
|
|
|
"AwaEmbeddings",
|
|
|
|
"HuggingFaceBgeEmbeddings",
|
|
|
|
"ErnieEmbeddings",
|
|
|
|
"JavelinAIGatewayEmbeddings",
|
|
|
|
"OllamaEmbeddings",
|
|
|
|
"QianfanEmbeddingsEndpoint",
|
|
|
|
"JohnSnowLabsEmbeddings",
|
|
|
|
"VoyageEmbeddings",
|
|
|
|
"BookendEmbeddings",
|
2024-01-01 22:37:35 +00:00
|
|
|
"VolcanoEmbeddings",
|
community[minor]: Add OCI Generative AI integration (#16548)
<!-- Thank you for contributing to LangChain!
Please title your PR "<package>: <description>", where <package> is
whichever of langchain, community, core, experimental, etc. is being
modified.
Replace this entire comment with:
- **Description:** Adding Oracle Cloud Infrastructure Generative AI
integration. Oracle Cloud Infrastructure (OCI) Generative AI is a fully
managed service that provides a set of state-of-the-art, customizable
large language models (LLMs) that cover a wide range of use cases, and
which is available through a single API. Using the OCI Generative AI
service you can access ready-to-use pretrained models, or create and
host your own fine-tuned custom models based on your own data on
dedicated AI clusters.
https://docs.oracle.com/en-us/iaas/Content/generative-ai/home.htm
- **Issue:** None,
- **Dependencies:** OCI Python SDK,
- **Twitter handle:** we announce bigger features on Twitter. If your PR
gets announced, and you'd like a mention, we'll gladly shout you out!
Please make sure your PR is passing linting and testing before
submitting. Run `make format`, `make lint` and `make test` from the root
of the package you've modified to check this locally.
Passed
See contribution guidelines for more information on how to write/run
tests, lint, etc: https://python.langchain.com/docs/contributing/
If you're adding a new integration, please include:
1. a test for the integration, preferably unit tests that do not rely on
network access,
2. an example notebook showing its use. It lives in
`docs/docs/integrations` directory.
we provide unit tests. However, we cannot provide integration tests due
to Oracle policies that prohibit public sharing of api keys.
If no one reviews your PR within a few days, please @-mention one of
@baskaryan, @eyurtsev, @hwchase17.
-->
---------
Co-authored-by: Arthur Cheng <arthur.cheng@oracle.com>
Co-authored-by: Bagatur <baskaryan@gmail.com>
2024-01-25 02:23:50 +00:00
|
|
|
"OCIGenAIEmbeddings",
|
2023-12-11 21:53:30 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
# TODO: this is in here to maintain backwards compatibility
|
|
|
|
class HypotheticalDocumentEmbedder:
|
|
|
|
def __init__(self, *args: Any, **kwargs: Any):
|
|
|
|
logger.warning(
|
|
|
|
"Using a deprecated class. Please use "
|
|
|
|
"`from langchain.chains import HypotheticalDocumentEmbedder` instead"
|
|
|
|
)
|
|
|
|
from langchain.chains.hyde.base import HypotheticalDocumentEmbedder as H
|
|
|
|
|
|
|
|
return H(*args, **kwargs) # type: ignore
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def from_llm(cls, *args: Any, **kwargs: Any) -> Any:
|
|
|
|
logger.warning(
|
|
|
|
"Using a deprecated class. Please use "
|
|
|
|
"`from langchain.chains import HypotheticalDocumentEmbedder` instead"
|
|
|
|
)
|
|
|
|
from langchain.chains.hyde.base import HypotheticalDocumentEmbedder as H
|
|
|
|
|
|
|
|
return H.from_llm(*args, **kwargs)
|