docs: aws docs updates (#20571)

pull/20577/head
Erick Friis 1 month ago committed by GitHub
parent f09bd0b75b
commit e395115807
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -3,10 +3,14 @@
{ {
"cell_type": "raw", "cell_type": "raw",
"id": "fbc66410", "id": "fbc66410",
"metadata": {}, "metadata": {
"vscode": {
"languageId": "raw"
}
},
"source": [ "source": [
"---\n", "---\n",
"sidebar_label: Bedrock Chat\n", "sidebar_label: Bedrock\n",
"---" "---"
] ]
}, },
@ -15,7 +19,7 @@
"id": "bf733a38-db84-4363-89e2-de6735c37230", "id": "bf733a38-db84-4363-89e2-de6735c37230",
"metadata": {}, "metadata": {},
"source": [ "source": [
"# BedrockChat\n", "# ChatBedrock\n",
"\n", "\n",
">[Amazon Bedrock](https://aws.amazon.com/bedrock/) is a fully managed service that offers a choice of \n", ">[Amazon Bedrock](https://aws.amazon.com/bedrock/) is a fully managed service that offers a choice of \n",
"> high-performing foundation models (FMs) from leading AI companies like `AI21 Labs`, `Anthropic`, `Cohere`, \n", "> high-performing foundation models (FMs) from leading AI companies like `AI21 Labs`, `Anthropic`, `Cohere`, \n",
@ -30,42 +34,53 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 2,
"id": "d51edc81", "id": "d51edc81",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Note: you may need to restart the kernel to use updated packages.\n"
]
}
],
"source": [ "source": [
"%pip install --upgrade --quiet boto3" "%pip install --upgrade --quiet langchain-aws"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 1,
"id": "d4a7c55d-b235-4ca4-a579-c90cc9570da9", "id": "d4a7c55d-b235-4ca4-a579-c90cc9570da9",
"metadata": { "metadata": {
"tags": [] "tags": []
}, },
"outputs": [], "outputs": [],
"source": [ "source": [
"from langchain_community.chat_models import BedrockChat\n", "from langchain_aws import ChatBedrock\n",
"from langchain_core.messages import HumanMessage" "from langchain_core.messages import HumanMessage"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 2, "execution_count": 11,
"id": "70cf04e8-423a-4ff6-8b09-f11fb711c817", "id": "70cf04e8-423a-4ff6-8b09-f11fb711c817",
"metadata": { "metadata": {
"tags": [] "tags": []
}, },
"outputs": [], "outputs": [],
"source": [ "source": [
"chat = BedrockChat(model_id=\"anthropic.claude-v2\", model_kwargs={\"temperature\": 0.1})" "chat = ChatBedrock(\n",
" model_id=\"anthropic.claude-3-sonnet-20240229-v1:0\",\n",
" model_kwargs={\"temperature\": 0.1},\n",
")"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 3, "execution_count": 12,
"id": "8199ef8f-eb8b-4253-9ea0-6c24a013ca4c", "id": "8199ef8f-eb8b-4253-9ea0-6c24a013ca4c",
"metadata": { "metadata": {
"tags": [] "tags": []
@ -74,10 +89,10 @@
{ {
"data": { "data": {
"text/plain": [ "text/plain": [
"AIMessage(content=\" Voici la traduction en français : J'adore programmer.\", additional_kwargs={}, example=False)" "AIMessage(content=\"Voici la traduction en français :\\n\\nJ'aime la programmation.\", additional_kwargs={'usage': {'prompt_tokens': 20, 'completion_tokens': 21, 'total_tokens': 41}}, response_metadata={'model_id': 'anthropic.claude-3-sonnet-20240229-v1:0', 'usage': {'prompt_tokens': 20, 'completion_tokens': 21, 'total_tokens': 41}}, id='run-994f0362-0e50-4524-afad-3c4f5bb11328-0')"
] ]
}, },
"execution_count": 3, "execution_count": 12,
"metadata": {}, "metadata": {},
"output_type": "execute_result" "output_type": "execute_result"
} }
@ -88,7 +103,7 @@
" content=\"Translate this sentence from English to French. I love programming.\"\n", " content=\"Translate this sentence from English to French. I love programming.\"\n",
" )\n", " )\n",
"]\n", "]\n",
"chat(messages)" "chat.invoke(messages)"
] ]
}, },
{ {
@ -97,39 +112,30 @@
"id": "a4a4f4d4", "id": "a4a4f4d4",
"metadata": {}, "metadata": {},
"source": [ "source": [
"### For BedrockChat with Streaming" "### Streaming\n",
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c253883f",
"metadata": {},
"outputs": [],
"source": [
"from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n",
"\n", "\n",
"chat = BedrockChat(\n", "To stream responses, you can use the runnable `.stream()` method."
" model_id=\"anthropic.claude-v2\",\n",
" streaming=True,\n",
" callbacks=[StreamingStdOutCallbackHandler()],\n",
" model_kwargs={\"temperature\": 0.1},\n",
")"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 14,
"id": "d9e52838", "id": "d9e52838",
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Voici la traduction en français :\n",
"\n",
"J'aime la programmation."
]
}
],
"source": [ "source": [
"messages = [\n", "for chunk in chat.stream(messages):\n",
" HumanMessage(\n", " print(chunk.content, end=\"\", flush=True)"
" content=\"Translate this sentence from English to French. I love programming.\"\n",
" )\n",
"]\n",
"chat(messages)"
] ]
} }
], ],
@ -149,7 +155,7 @@
"name": "python", "name": "python",
"nbconvert_exporter": "python", "nbconvert_exporter": "python",
"pygments_lexer": "ipython3", "pygments_lexer": "ipython3",
"version": "3.10.12" "version": "3.11.4"
} }
}, },
"nbformat": 4, "nbformat": 4,

@ -118,25 +118,7 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"!pip install --upgrade --force-reinstall langchain" "!pip install --upgrade --quiet langchain langchain-community langchain-aws"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!pip install --upgrade --force-reinstall langchain-core"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!pip install --upgrade --force-reinstall langchain-community"
] ]
}, },
{ {
@ -264,7 +246,7 @@
"source": [ "source": [
"import boto3\n", "import boto3\n",
"from langchain.chains.graph_qa.neptune_sparql import NeptuneSparqlQAChain\n", "from langchain.chains.graph_qa.neptune_sparql import NeptuneSparqlQAChain\n",
"from langchain_community.chat_models import BedrockChat\n", "from langchain_aws import ChatBedrock\n",
"from langchain_community.graphs import NeptuneRdfGraph\n", "from langchain_community.graphs import NeptuneRdfGraph\n",
"\n", "\n",
"host = \"<your host>\"\n", "host = \"<your host>\"\n",
@ -279,7 +261,7 @@
"\n", "\n",
"MODEL_ID = \"anthropic.claude-v2\"\n", "MODEL_ID = \"anthropic.claude-v2\"\n",
"bedrock_client = boto3.client(\"bedrock-runtime\")\n", "bedrock_client = boto3.client(\"bedrock-runtime\")\n",
"llm = BedrockChat(model_id=MODEL_ID, client=bedrock_client)\n", "llm = ChatBedrock(model_id=MODEL_ID, client=bedrock_client)\n",
"\n", "\n",
"chain = NeptuneSparqlQAChain.from_llm(\n", "chain = NeptuneSparqlQAChain.from_llm(\n",
" llm=llm,\n", " llm=llm,\n",

@ -2,6 +2,28 @@
The `LangChain` integrations related to [Amazon AWS](https://aws.amazon.com/) platform. The `LangChain` integrations related to [Amazon AWS](https://aws.amazon.com/) platform.
First-party AWS integrations are available in the `langchain_aws` package.
```bash
pip install langchain-aws
```
And there are also some community integrations available in the `langchain_community` package with the `boto3` optional dependency.
```bash
pip install langchain-community boto3
```
## Chat models
### Bedrock Chat
See a [usage example](/docs/integrations/chat/bedrock).
```python
from langchain_aws import ChatBedrock
```
## LLMs ## LLMs
### Bedrock ### Bedrock
@ -20,7 +42,7 @@ The `LangChain` integrations related to [Amazon AWS](https://aws.amazon.com/) pl
See a [usage example](/docs/integrations/llms/bedrock). See a [usage example](/docs/integrations/llms/bedrock).
```python ```python
from langchain_community.llms.bedrock import Bedrock from langchain_aws import BedrockLLM
``` ```
### Amazon API Gateway ### Amazon API Gateway
@ -53,18 +75,7 @@ We use `SageMaker` to host our model and expose it as the `SageMaker Endpoint`.
See a [usage example](/docs/integrations/llms/sagemaker). See a [usage example](/docs/integrations/llms/sagemaker).
```python ```python
from langchain_community.llms import SagemakerEndpoint from langchain_aws import SagemakerEndpoint
from langchain_community.llms.sagemaker_endpoint import LLMContentHandler
```
## Chat models
### Bedrock Chat
See a [usage example](/docs/integrations/chat/bedrock).
```python
from langchain_community.chat_models import BedrockChat
``` ```
## Embedding Models ## Embedding Models
@ -188,16 +199,16 @@ from langchain.vectorstores import DocumentDBVectorSearch
> manuals, and websites. It supports multiple languages and can understand complex queries, synonyms, and > manuals, and websites. It supports multiple languages and can understand complex queries, synonyms, and
> contextual meanings to provide highly relevant search results. > contextual meanings to provide highly relevant search results.
We need to install the `boto3` library. We need to install the `langchain-aws` library.
```bash ```bash
pip install boto3 pip install langchain-aws
``` ```
See a [usage example](/docs/integrations/retrievers/amazon_kendra_retriever). See a [usage example](/docs/integrations/retrievers/amazon_kendra_retriever).
```python ```python
from langchain.retrievers import AmazonKendraRetriever from langchain_aws import AmazonKendraRetriever
``` ```
### Amazon Bedrock (Knowledge Bases) ### Amazon Bedrock (Knowledge Bases)
@ -206,16 +217,16 @@ from langchain.retrievers import AmazonKendraRetriever
> `Amazon Web Services` (`AWS`) offering which lets you quickly build RAG applications by using your > `Amazon Web Services` (`AWS`) offering which lets you quickly build RAG applications by using your
> private data to customize foundation model response. > private data to customize foundation model response.
We need to install the `boto3` library. We need to install the `langchain-aws` library.
```bash ```bash
pip install boto3 pip install langchain-aws
``` ```
See a [usage example](/docs/integrations/retrievers/bedrock). See a [usage example](/docs/integrations/retrievers/bedrock).
```python ```python
from langchain.retrievers import AmazonKnowledgeBasesRetriever from langchain_aws import AmazonKnowledgeBasesRetriever
``` ```
## Tools ## Tools

@ -158,9 +158,9 @@
} }
], ],
"source": [ "source": [
"from langchain_community.chat_models import BedrockChat\n", "from langchain_aws import ChatBedrock\n",
"\n", "\n",
"llm = BedrockChat(model_id=\"anthropic.claude-v2\")\n", "llm = ChatBedrock(model_id=\"anthropic.claude-v2\")\n",
"msg = llm.invoke([(\"human\", \"What's the oldest known example of cuneiform\")])\n", "msg = llm.invoke([(\"human\", \"What's the oldest known example of cuneiform\")])\n",
"msg.response_metadata" "msg.response_metadata"
] ]

@ -307,7 +307,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 19, "execution_count": 1,
"id": "4a3eced5-2ff7-49a7-a48b-768af8658323", "id": "4a3eced5-2ff7-49a7-a48b-768af8658323",
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
@ -315,33 +315,41 @@
"name": "stdout", "name": "stdout",
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
"Tokens Used: 79\n", "Tokens Used: 0\n",
"\tPrompt Tokens: 26\n", "\tPrompt Tokens: 0\n",
"\tCompletion Tokens: 53\n", "\tCompletion Tokens: 0\n",
"Successful Requests: 2\n", "Successful Requests: 2\n",
"Total Cost (USD): $0.00148\n" "Total Cost (USD): $0.0\n"
] ]
} }
], ],
"source": [ "source": [
"# !pip install boto3\n", "# !pip install langchain-aws\n",
"from langchain_aws import ChatBedrock\n",
"from langchain_community.callbacks.manager import get_bedrock_anthropic_callback\n", "from langchain_community.callbacks.manager import get_bedrock_anthropic_callback\n",
"from langchain_community.chat_models import BedrockChat\n",
"\n", "\n",
"llm = BedrockChat(model_id=\"anthropic.claude-v2\")\n", "llm = ChatBedrock(model_id=\"anthropic.claude-v2\")\n",
"\n", "\n",
"with get_bedrock_anthropic_callback() as cb:\n", "with get_bedrock_anthropic_callback() as cb:\n",
" result = llm.invoke(\"Tell me a joke\")\n", " result = llm.invoke(\"Tell me a joke\")\n",
" result2 = llm.invoke(\"Tell me a joke\")\n", " result2 = llm.invoke(\"Tell me a joke\")\n",
" print(cb)" " print(cb)"
] ]
},
{
"cell_type": "code",
"execution_count": null,
"id": "bb40375d",
"metadata": {},
"outputs": [],
"source": []
} }
], ],
"metadata": { "metadata": {
"kernelspec": { "kernelspec": {
"display_name": "poetry-venv-2", "display_name": ".venv",
"language": "python", "language": "python",
"name": "poetry-venv-2" "name": "python3"
}, },
"language_info": { "language_info": {
"codemirror_mode": { "codemirror_mode": {
@ -353,7 +361,7 @@
"name": "python", "name": "python",
"nbconvert_exporter": "python", "nbconvert_exporter": "python",
"pygments_lexer": "ipython3", "pygments_lexer": "ipython3",
"version": "3.9.1" "version": "3.11.4"
} }
}, },
"nbformat": 4, "nbformat": 4,

@ -2,6 +2,7 @@ import re
from collections import defaultdict from collections import defaultdict
from typing import Any, Dict, Iterator, List, Optional, Tuple, Union from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
from langchain_core._api.deprecation import deprecated
from langchain_core.callbacks import ( from langchain_core.callbacks import (
CallbackManagerForLLMRun, CallbackManagerForLLMRun,
) )
@ -195,6 +196,9 @@ class ChatPromptAdapter:
_message_type_lookups = {"human": "user", "ai": "assistant"} _message_type_lookups = {"human": "user", "ai": "assistant"}
@deprecated(
since="0.0.34", removal="0.3", alternative_import="langchain_aws.ChatBedrock"
)
class BedrockChat(BaseChatModel, BedrockBase): class BedrockChat(BaseChatModel, BedrockBase):
"""Chat model that uses the Bedrock API.""" """Chat model that uses the Bedrock API."""

@ -14,6 +14,7 @@ from typing import (
Tuple, Tuple,
) )
from langchain_core._api.deprecation import deprecated
from langchain_core.callbacks import ( from langchain_core.callbacks import (
AsyncCallbackManagerForLLMRun, AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun, CallbackManagerForLLMRun,
@ -711,6 +712,9 @@ class BedrockBase(BaseModel, ABC):
run_manager.on_llm_new_token(chunk.text, chunk=chunk) # type: ignore[unused-coroutine] run_manager.on_llm_new_token(chunk.text, chunk=chunk) # type: ignore[unused-coroutine]
@deprecated(
since="0.0.34", removal="0.3", alternative_import="langchain_aws.BedrockLLM"
)
class Bedrock(LLM, BedrockBase): class Bedrock(LLM, BedrockBase):
"""Bedrock models. """Bedrock models.

@ -235,10 +235,10 @@ SERIALIZABLE_MAPPING: Dict[Tuple[str, ...], Tuple[str, ...]] = {
"AzureChatOpenAI", "AzureChatOpenAI",
), ),
("langchain", "chat_models", "bedrock", "BedrockChat"): ( ("langchain", "chat_models", "bedrock", "BedrockChat"): (
"langchain", "langchain_aws",
"chat_models", "chat_models",
"bedrock", "bedrock",
"BedrockChat", "ChatBedrock",
), ),
("langchain", "chat_models", "anthropic", "ChatAnthropic"): ( ("langchain", "chat_models", "anthropic", "ChatAnthropic"): (
"langchain_anthropic", "langchain_anthropic",
@ -311,10 +311,10 @@ SERIALIZABLE_MAPPING: Dict[Tuple[str, ...], Tuple[str, ...]] = {
"BaseOpenAI", "BaseOpenAI",
), ),
("langchain", "llms", "bedrock", "Bedrock"): ( ("langchain", "llms", "bedrock", "Bedrock"): (
"langchain", "langchain_aws",
"llms", "llms",
"bedrock", "bedrock",
"Bedrock", "BedrockLLM",
), ),
("langchain", "llms", "fireworks", "Fireworks"): ( ("langchain", "llms", "fireworks", "Fireworks"): (
"langchain", "langchain",

@ -1,6 +1,6 @@
import os import os
from langchain_community.chat_models import BedrockChat from langchain_aws import ChatBedrock
from langchain_core.prompts import ChatPromptTemplate from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import ConfigurableField from langchain_core.runnables import ConfigurableField
@ -16,11 +16,11 @@ _model_kwargs = {
# Full list of base model IDs is available at # Full list of base model IDs is available at
# https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids-arns.html # https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids-arns.html
_model_alts = { _model_alts = {
"claude_2_1": BedrockChat( "claude_2_1": ChatBedrock(
model_id="anthropic.claude-v2:1", model_kwargs=_model_kwargs model_id="anthropic.claude-v2:1", model_kwargs=_model_kwargs
), ),
"claude_1": BedrockChat(model_id="anthropic.claude-v1", model_kwargs=_model_kwargs), "claude_1": ChatBedrock(model_id="anthropic.claude-v1", model_kwargs=_model_kwargs),
"claude_instant_1": BedrockChat( "claude_instant_1": ChatBedrock(
model_id="anthropic.claude-instant-v1", model_kwargs=_model_kwargs model_id="anthropic.claude-instant-v1", model_kwargs=_model_kwargs
), ),
} }
@ -34,7 +34,7 @@ _prompt = ChatPromptTemplate.from_messages(
] ]
) )
_model = BedrockChat( _model = ChatBedrock(
model_id="anthropic.claude-v2", model_kwargs=_model_kwargs model_id="anthropic.claude-v2", model_kwargs=_model_kwargs
).configurable_alternatives( ).configurable_alternatives(
which=ConfigurableField( which=ConfigurableField(

Loading…
Cancel
Save