diff --git a/docs/docs/integrations/chat/octoai.ipynb b/docs/docs/integrations/chat/octoai.ipynb new file mode 100644 index 0000000000..8c2a1bc853 --- /dev/null +++ b/docs/docs/integrations/chat/octoai.ipynb @@ -0,0 +1,112 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# ChatOctoAI\n", + "\n", + "[OctoAI](https://docs.octoai.cloud/docs) offers easy access to efficient compute and enables users to integrate their choice of AI models into applications. The `OctoAI` compute service helps you run, tune, and scale AI applications easily.\n", + "\n", + "This notebook demonstrates the use of `langchain.chat_models.ChatOctoAI` for [OctoAI endpoints](https://octoai.cloud/text).\n", + "\n", + "## Setup\n", + "\n", + "To run our example app, there are two simple steps to take:\n", + "\n", + "1. Get an API Token from [your OctoAI account page](https://octoai.cloud/settings).\n", + " \n", + "2. Paste your API token in in the code cell below or use the `octoai_api_token` keyword argument.\n", + "\n", + "Note: If you want to use a different model than the [available models](https://octoai.cloud/text?selectedTags=Chat), you can containerize the model and make a custom OctoAI endpoint yourself, by following [Build a Container from Python](https://octo.ai/docs/bring-your-own-model/advanced-build-a-container-from-scratch-in-python) and [Create a Custom Endpoint from a Container](https://octo.ai/docs/bring-your-own-model/create-custom-endpoints-from-a-container/create-custom-endpoints-from-a-container) and then updating your `OCTOAI_API_BASE` environment variable.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "os.environ[\"OCTOAI_API_TOKEN\"] = \"OCTOAI_API_TOKEN\"" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_community.chat_models import ChatOctoAI\n", + "from langchain_core.messages import HumanMessage, SystemMessage" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Example" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "chat = ChatOctoAI(max_tokens=300, model_name=\"mixtral-8x7b-instruct\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "messages = [\n", + " SystemMessage(content=\"You are a helpful assistant.\"),\n", + " HumanMessage(content=\"Tell me about Leonardo da Vinci briefly.\"),\n", + "]\n", + "print(chat(messages).content)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Leonardo da Vinci (1452-1519) was an Italian polymath who is often considered one of the greatest painters in history. However, his genius extended far beyond art. He was also a scientist, inventor, mathematician, engineer, anatomist, geologist, and cartographer.\n", + "\n", + "Da Vinci is best known for his paintings such as the Mona Lisa, The Last Supper, and The Virgin of the Rocks. His scientific studies were ahead of his time, and his notebooks contain detailed drawings and descriptions of various machines, human anatomy, and natural phenomena.\n", + "\n", + "Despite never receiving a formal education, da Vinci's insatiable curiosity and observational skills made him a pioneer in many fields. His work continues to inspire and influence artists, scientists, and thinkers today." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.7" + }, + "vscode": { + "interpreter": { + "hash": "97697b63fdcee0a640856f91cb41326ad601964008c341809e43189d1cab1047" + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/libs/community/langchain_community/chat_models/__init__.py b/libs/community/langchain_community/chat_models/__init__.py index b041d6e76a..96af77d5f0 100644 --- a/libs/community/langchain_community/chat_models/__init__.py +++ b/libs/community/langchain_community/chat_models/__init__.py @@ -234,6 +234,7 @@ _module_lookup = { "ChatMLX": "langchain_community.chat_models.mlx", "ChatMaritalk": "langchain_community.chat_models.maritalk", "ChatMlflow": "langchain_community.chat_models.mlflow", + "ChatOctoAI": "langchain_community.chat_models.octoai", "ChatOllama": "langchain_community.chat_models.ollama", "ChatOpenAI": "langchain_community.chat_models.openai", "ChatPerplexity": "langchain_community.chat_models.perplexity", diff --git a/libs/community/langchain_community/chat_models/octoai.py b/libs/community/langchain_community/chat_models/octoai.py new file mode 100644 index 0000000000..8834b86706 --- /dev/null +++ b/libs/community/langchain_community/chat_models/octoai.py @@ -0,0 +1,93 @@ +"""OctoAI Endpoints chat wrapper. Relies heavily on ChatOpenAI.""" +from typing import Dict + +from langchain_core.pydantic_v1 import Field, SecretStr, root_validator +from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env + +from langchain_community.chat_models.openai import ChatOpenAI +from langchain_community.utils.openai import is_openai_v1 + +DEFAULT_API_BASE = "https://text.octoai.run/v1/" +DEFAULT_MODEL = "llama-2-13b-chat" + + +class ChatOctoAI(ChatOpenAI): + """OctoAI Chat large language models. + + See https://octo.ai/ for information about OctoAI. + + To use, you should have the ``openai`` python package installed and the + environment variable ``OCTOAI_API_TOKEN`` set with your API token. + Alternatively, you can use the octoai_api_token keyword argument. + + Any parameters that are valid to be passed to the `openai.create` call can be passed + in, even if not explicitly saved on this class. + + Example: + .. code-block:: python + + from langchain_community.chat_models import ChatOctoAI + chat = ChatOctoAI(model_name="mixtral-8x7b-instruct") + """ + + octoai_api_base: str = Field(default=DEFAULT_API_BASE) + octoai_api_token: SecretStr = Field(default=None) + model_name: str = Field(default=DEFAULT_MODEL) + + @property + def _llm_type(self) -> str: + """Return type of chat model.""" + return "octoai-chat" + + @property + def lc_secrets(self) -> Dict[str, str]: + return {"octoai_api_token": "OCTOAI_API_TOKEN"} + + @classmethod + def is_lc_serializable(cls) -> bool: + return False + + @root_validator() + def validate_environment(cls, values: Dict) -> Dict: + """Validate that api key and python package exists in environment.""" + values["octoai_api_base"] = get_from_dict_or_env( + values, + "octoai_api_base", + "OCTOAI_API_BASE", + default=DEFAULT_API_BASE, + ) + values["octoai_api_token"] = convert_to_secret_str( + get_from_dict_or_env(values, "octoai_api_token", "OCTOAI_API_TOKEN") + ) + values["model_name"] = get_from_dict_or_env( + values, + "model_name", + "MODEL_NAME", + default=DEFAULT_MODEL, + ) + + try: + import openai + + if is_openai_v1(): + client_params = { + "api_key": values["octoai_api_token"].get_secret_value(), + "base_url": values["octoai_api_base"], + } + if not values.get("client"): + values["client"] = openai.OpenAI(**client_params).chat.completions + if not values.get("async_client"): + values["async_client"] = openai.AsyncOpenAI( + **client_params + ).chat.completions + else: + values["openai_api_base"] = values["octoai_api_base"] + values["openai_api_key"] = values["octoai_api_token"].get_secret_value() + values["client"] = openai.ChatCompletion + except ImportError: + raise ImportError( + "Could not import openai python package. " + "Please install it with `pip install openai`." + ) + + return values diff --git a/libs/community/tests/integration_tests/chat_models/test_octoai.py b/libs/community/tests/integration_tests/chat_models/test_octoai.py new file mode 100644 index 0000000000..274cb7008a --- /dev/null +++ b/libs/community/tests/integration_tests/chat_models/test_octoai.py @@ -0,0 +1,11 @@ +from langchain_core.messages import AIMessage, HumanMessage + +from langchain_community.chat_models.octoai import ChatOctoAI + + +def test_chat_octoai() -> None: + chat = ChatOctoAI() + message = HumanMessage(content="Hello") + response = chat([message]) + assert isinstance(response, AIMessage) + assert isinstance(response.content, str) diff --git a/libs/community/tests/unit_tests/chat_models/test_imports.py b/libs/community/tests/unit_tests/chat_models/test_imports.py index 8765ef875e..59ed05f6ef 100644 --- a/libs/community/tests/unit_tests/chat_models/test_imports.py +++ b/libs/community/tests/unit_tests/chat_models/test_imports.py @@ -48,6 +48,7 @@ EXPECTED_ALL = [ "SolarChat", "QianfanChatEndpoint", "VolcEngineMaasChat", + "ChatOctoAI", ]