2023-12-11 21:53:30 +00:00
|
|
|
from langchain_core.language_models.llms import BaseLLM
|
|
|
|
|
|
|
|
from langchain_community import llms
|
|
|
|
|
|
|
|
EXPECT_ALL = [
|
|
|
|
"AI21",
|
|
|
|
"AlephAlpha",
|
|
|
|
"AmazonAPIGateway",
|
|
|
|
"Anthropic",
|
|
|
|
"Anyscale",
|
2023-12-20 06:16:57 +00:00
|
|
|
"Aphrodite",
|
2023-12-11 21:53:30 +00:00
|
|
|
"Arcee",
|
|
|
|
"Aviary",
|
|
|
|
"AzureMLOnlineEndpoint",
|
|
|
|
"AzureOpenAI",
|
2024-03-18 20:24:40 +00:00
|
|
|
"BaichuanLLM",
|
2023-12-11 21:53:30 +00:00
|
|
|
"Banana",
|
|
|
|
"Baseten",
|
|
|
|
"Beam",
|
|
|
|
"Bedrock",
|
|
|
|
"CTransformers",
|
|
|
|
"CTranslate2",
|
|
|
|
"CerebriumAI",
|
|
|
|
"ChatGLM",
|
|
|
|
"Clarifai",
|
|
|
|
"Cohere",
|
|
|
|
"Databricks",
|
|
|
|
"DeepInfra",
|
|
|
|
"DeepSparse",
|
|
|
|
"EdenAI",
|
|
|
|
"FakeListLLM",
|
|
|
|
"Fireworks",
|
|
|
|
"ForefrontAI",
|
2024-03-08 02:20:47 +00:00
|
|
|
"Friendli",
|
2023-12-11 21:53:30 +00:00
|
|
|
"GigaChat",
|
|
|
|
"GPT4All",
|
|
|
|
"GooglePalm",
|
|
|
|
"GooseAI",
|
|
|
|
"GradientLLM",
|
|
|
|
"HuggingFaceEndpoint",
|
|
|
|
"HuggingFaceHub",
|
|
|
|
"HuggingFacePipeline",
|
|
|
|
"HuggingFaceTextGenInference",
|
|
|
|
"HumanInputLLM",
|
2024-03-28 03:12:59 +00:00
|
|
|
"IpexLLM",
|
2023-12-11 21:53:30 +00:00
|
|
|
"KoboldApiLLM",
|
2024-01-24 02:22:32 +00:00
|
|
|
"Konko",
|
2023-12-11 21:53:30 +00:00
|
|
|
"LlamaCpp",
|
2024-03-18 20:24:40 +00:00
|
|
|
"Llamafile",
|
2023-12-11 21:53:30 +00:00
|
|
|
"TextGen",
|
|
|
|
"ManifestWrapper",
|
|
|
|
"Minimax",
|
2024-03-18 20:24:40 +00:00
|
|
|
"Mlflow",
|
2023-12-11 21:53:30 +00:00
|
|
|
"MlflowAIGateway",
|
2024-04-09 14:17:07 +00:00
|
|
|
"MLXPipeline",
|
2023-12-11 21:53:30 +00:00
|
|
|
"Modal",
|
|
|
|
"MosaicML",
|
|
|
|
"Nebula",
|
2023-12-20 19:52:20 +00:00
|
|
|
"OCIModelDeploymentTGI",
|
|
|
|
"OCIModelDeploymentVLLM",
|
community[minor]: Add OCI Generative AI integration (#16548)
<!-- Thank you for contributing to LangChain!
Please title your PR "<package>: <description>", where <package> is
whichever of langchain, community, core, experimental, etc. is being
modified.
Replace this entire comment with:
- **Description:** Adding Oracle Cloud Infrastructure Generative AI
integration. Oracle Cloud Infrastructure (OCI) Generative AI is a fully
managed service that provides a set of state-of-the-art, customizable
large language models (LLMs) that cover a wide range of use cases, and
which is available through a single API. Using the OCI Generative AI
service you can access ready-to-use pretrained models, or create and
host your own fine-tuned custom models based on your own data on
dedicated AI clusters.
https://docs.oracle.com/en-us/iaas/Content/generative-ai/home.htm
- **Issue:** None,
- **Dependencies:** OCI Python SDK,
- **Twitter handle:** we announce bigger features on Twitter. If your PR
gets announced, and you'd like a mention, we'll gladly shout you out!
Please make sure your PR is passing linting and testing before
submitting. Run `make format`, `make lint` and `make test` from the root
of the package you've modified to check this locally.
Passed
See contribution guidelines for more information on how to write/run
tests, lint, etc: https://python.langchain.com/docs/contributing/
If you're adding a new integration, please include:
1. a test for the integration, preferably unit tests that do not rely on
network access,
2. an example notebook showing its use. It lives in
`docs/docs/integrations` directory.
we provide unit tests. However, we cannot provide integration tests due
to Oracle policies that prohibit public sharing of api keys.
If no one reviews your PR within a few days, please @-mention one of
@baskaryan, @eyurtsev, @hwchase17.
-->
---------
Co-authored-by: Arthur Cheng <arthur.cheng@oracle.com>
Co-authored-by: Bagatur <baskaryan@gmail.com>
2024-01-25 02:23:50 +00:00
|
|
|
"OCIGenAI",
|
2023-12-11 21:53:30 +00:00
|
|
|
"NIBittensorLLM",
|
|
|
|
"NLPCloud",
|
|
|
|
"Ollama",
|
|
|
|
"OpenAI",
|
|
|
|
"OpenAIChat",
|
|
|
|
"OpenLLM",
|
|
|
|
"OpenLM",
|
|
|
|
"PaiEasEndpoint",
|
|
|
|
"Petals",
|
|
|
|
"PipelineAI",
|
|
|
|
"Predibase",
|
|
|
|
"PredictionGuard",
|
|
|
|
"PromptLayerOpenAI",
|
|
|
|
"PromptLayerOpenAIChat",
|
|
|
|
"OpaquePrompts",
|
|
|
|
"RWKV",
|
|
|
|
"Replicate",
|
|
|
|
"SagemakerEndpoint",
|
|
|
|
"SelfHostedHuggingFaceLLM",
|
|
|
|
"SelfHostedPipeline",
|
|
|
|
"StochasticAI",
|
|
|
|
"TitanTakeoff",
|
|
|
|
"TitanTakeoffPro",
|
2024-03-18 20:24:40 +00:00
|
|
|
"Together",
|
2023-12-11 21:53:30 +00:00
|
|
|
"Tongyi",
|
|
|
|
"VertexAI",
|
|
|
|
"VertexAIModelGarden",
|
|
|
|
"VLLM",
|
|
|
|
"VLLMOpenAI",
|
2024-04-03 16:21:34 +00:00
|
|
|
"WeightOnlyQuantPipeline",
|
2023-12-11 21:53:30 +00:00
|
|
|
"Writer",
|
|
|
|
"OctoAIEndpoint",
|
|
|
|
"Xinference",
|
|
|
|
"JavelinAIGateway",
|
|
|
|
"QianfanLLMEndpoint",
|
|
|
|
"YandexGPT",
|
2024-02-14 19:46:20 +00:00
|
|
|
"Yuan2",
|
2023-12-11 21:53:30 +00:00
|
|
|
"VolcEngineMaasLLM",
|
|
|
|
"WatsonxLLM",
|
2024-02-20 19:23:47 +00:00
|
|
|
"SparkLLM",
|
2023-12-11 21:53:30 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
def test_all_imports() -> None:
|
|
|
|
"""Simple test to make sure all things can be imported."""
|
|
|
|
for cls in llms.__all__:
|
|
|
|
assert issubclass(getattr(llms, cls), BaseLLM)
|
|
|
|
assert set(llms.__all__) == set(EXPECT_ALL)
|