docs: aws docs updates (#20571)

pull/20577/head
Erick Friis 4 weeks ago committed by GitHub
parent f09bd0b75b
commit e395115807
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -3,10 +3,14 @@
{
"cell_type": "raw",
"id": "fbc66410",
"metadata": {},
"metadata": {
"vscode": {
"languageId": "raw"
}
},
"source": [
"---\n",
"sidebar_label: Bedrock Chat\n",
"sidebar_label: Bedrock\n",
"---"
]
},
@ -15,7 +19,7 @@
"id": "bf733a38-db84-4363-89e2-de6735c37230",
"metadata": {},
"source": [
"# BedrockChat\n",
"# ChatBedrock\n",
"\n",
">[Amazon Bedrock](https://aws.amazon.com/bedrock/) is a fully managed service that offers a choice of \n",
"> high-performing foundation models (FMs) from leading AI companies like `AI21 Labs`, `Anthropic`, `Cohere`, \n",
@ -30,42 +34,53 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 2,
"id": "d51edc81",
"metadata": {},
"outputs": [],
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Note: you may need to restart the kernel to use updated packages.\n"
]
}
],
"source": [
"%pip install --upgrade --quiet boto3"
"%pip install --upgrade --quiet langchain-aws"
]
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 1,
"id": "d4a7c55d-b235-4ca4-a579-c90cc9570da9",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"from langchain_community.chat_models import BedrockChat\n",
"from langchain_aws import ChatBedrock\n",
"from langchain_core.messages import HumanMessage"
]
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 11,
"id": "70cf04e8-423a-4ff6-8b09-f11fb711c817",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"chat = BedrockChat(model_id=\"anthropic.claude-v2\", model_kwargs={\"temperature\": 0.1})"
"chat = ChatBedrock(\n",
" model_id=\"anthropic.claude-3-sonnet-20240229-v1:0\",\n",
" model_kwargs={\"temperature\": 0.1},\n",
")"
]
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 12,
"id": "8199ef8f-eb8b-4253-9ea0-6c24a013ca4c",
"metadata": {
"tags": []
@ -74,10 +89,10 @@
{
"data": {
"text/plain": [
"AIMessage(content=\" Voici la traduction en français : J'adore programmer.\", additional_kwargs={}, example=False)"
"AIMessage(content=\"Voici la traduction en français :\\n\\nJ'aime la programmation.\", additional_kwargs={'usage': {'prompt_tokens': 20, 'completion_tokens': 21, 'total_tokens': 41}}, response_metadata={'model_id': 'anthropic.claude-3-sonnet-20240229-v1:0', 'usage': {'prompt_tokens': 20, 'completion_tokens': 21, 'total_tokens': 41}}, id='run-994f0362-0e50-4524-afad-3c4f5bb11328-0')"
]
},
"execution_count": 3,
"execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
@ -88,7 +103,7 @@
" content=\"Translate this sentence from English to French. I love programming.\"\n",
" )\n",
"]\n",
"chat(messages)"
"chat.invoke(messages)"
]
},
{
@ -97,39 +112,30 @@
"id": "a4a4f4d4",
"metadata": {},
"source": [
"### For BedrockChat with Streaming"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c253883f",
"metadata": {},
"outputs": [],
"source": [
"from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n",
"### Streaming\n",
"\n",
"chat = BedrockChat(\n",
" model_id=\"anthropic.claude-v2\",\n",
" streaming=True,\n",
" callbacks=[StreamingStdOutCallbackHandler()],\n",
" model_kwargs={\"temperature\": 0.1},\n",
")"
"To stream responses, you can use the runnable `.stream()` method."
]
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 14,
"id": "d9e52838",
"metadata": {},
"outputs": [],
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Voici la traduction en français :\n",
"\n",
"J'aime la programmation."
]
}
],
"source": [
"messages = [\n",
" HumanMessage(\n",
" content=\"Translate this sentence from English to French. I love programming.\"\n",
" )\n",
"]\n",
"chat(messages)"
"for chunk in chat.stream(messages):\n",
" print(chunk.content, end=\"\", flush=True)"
]
}
],
@ -149,7 +155,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.12"
"version": "3.11.4"
}
},
"nbformat": 4,

@ -118,25 +118,7 @@
"metadata": {},
"outputs": [],
"source": [
"!pip install --upgrade --force-reinstall langchain"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!pip install --upgrade --force-reinstall langchain-core"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!pip install --upgrade --force-reinstall langchain-community"
"!pip install --upgrade --quiet langchain langchain-community langchain-aws"
]
},
{
@ -264,7 +246,7 @@
"source": [
"import boto3\n",
"from langchain.chains.graph_qa.neptune_sparql import NeptuneSparqlQAChain\n",
"from langchain_community.chat_models import BedrockChat\n",
"from langchain_aws import ChatBedrock\n",
"from langchain_community.graphs import NeptuneRdfGraph\n",
"\n",
"host = \"<your host>\"\n",
@ -279,7 +261,7 @@
"\n",
"MODEL_ID = \"anthropic.claude-v2\"\n",
"bedrock_client = boto3.client(\"bedrock-runtime\")\n",
"llm = BedrockChat(model_id=MODEL_ID, client=bedrock_client)\n",
"llm = ChatBedrock(model_id=MODEL_ID, client=bedrock_client)\n",
"\n",
"chain = NeptuneSparqlQAChain.from_llm(\n",
" llm=llm,\n",

@ -2,6 +2,28 @@
The `LangChain` integrations related to [Amazon AWS](https://aws.amazon.com/) platform.
First-party AWS integrations are available in the `langchain_aws` package.
```bash
pip install langchain-aws
```
And there are also some community integrations available in the `langchain_community` package with the `boto3` optional dependency.
```bash
pip install langchain-community boto3
```
## Chat models
### Bedrock Chat
See a [usage example](/docs/integrations/chat/bedrock).
```python
from langchain_aws import ChatBedrock
```
## LLMs
### Bedrock
@ -20,7 +42,7 @@ The `LangChain` integrations related to [Amazon AWS](https://aws.amazon.com/) pl
See a [usage example](/docs/integrations/llms/bedrock).
```python
from langchain_community.llms.bedrock import Bedrock
from langchain_aws import BedrockLLM
```
### Amazon API Gateway
@ -53,18 +75,7 @@ We use `SageMaker` to host our model and expose it as the `SageMaker Endpoint`.
See a [usage example](/docs/integrations/llms/sagemaker).
```python
from langchain_community.llms import SagemakerEndpoint
from langchain_community.llms.sagemaker_endpoint import LLMContentHandler
```
## Chat models
### Bedrock Chat
See a [usage example](/docs/integrations/chat/bedrock).
```python
from langchain_community.chat_models import BedrockChat
from langchain_aws import SagemakerEndpoint
```
## Embedding Models
@ -188,16 +199,16 @@ from langchain.vectorstores import DocumentDBVectorSearch
> manuals, and websites. It supports multiple languages and can understand complex queries, synonyms, and
> contextual meanings to provide highly relevant search results.
We need to install the `boto3` library.
We need to install the `langchain-aws` library.
```bash
pip install boto3
pip install langchain-aws
```
See a [usage example](/docs/integrations/retrievers/amazon_kendra_retriever).
```python
from langchain.retrievers import AmazonKendraRetriever
from langchain_aws import AmazonKendraRetriever
```
### Amazon Bedrock (Knowledge Bases)
@ -206,16 +217,16 @@ from langchain.retrievers import AmazonKendraRetriever
> `Amazon Web Services` (`AWS`) offering which lets you quickly build RAG applications by using your
> private data to customize foundation model response.
We need to install the `boto3` library.
We need to install the `langchain-aws` library.
```bash
pip install boto3
pip install langchain-aws
```
See a [usage example](/docs/integrations/retrievers/bedrock).
```python
from langchain.retrievers import AmazonKnowledgeBasesRetriever
from langchain_aws import AmazonKnowledgeBasesRetriever
```
## Tools

@ -158,9 +158,9 @@
}
],
"source": [
"from langchain_community.chat_models import BedrockChat\n",
"from langchain_aws import ChatBedrock\n",
"\n",
"llm = BedrockChat(model_id=\"anthropic.claude-v2\")\n",
"llm = ChatBedrock(model_id=\"anthropic.claude-v2\")\n",
"msg = llm.invoke([(\"human\", \"What's the oldest known example of cuneiform\")])\n",
"msg.response_metadata"
]

@ -307,7 +307,7 @@
},
{
"cell_type": "code",
"execution_count": 19,
"execution_count": 1,
"id": "4a3eced5-2ff7-49a7-a48b-768af8658323",
"metadata": {},
"outputs": [
@ -315,33 +315,41 @@
"name": "stdout",
"output_type": "stream",
"text": [
"Tokens Used: 79\n",
"\tPrompt Tokens: 26\n",
"\tCompletion Tokens: 53\n",
"Tokens Used: 0\n",
"\tPrompt Tokens: 0\n",
"\tCompletion Tokens: 0\n",
"Successful Requests: 2\n",
"Total Cost (USD): $0.00148\n"
"Total Cost (USD): $0.0\n"
]
}
],
"source": [
"# !pip install boto3\n",
"# !pip install langchain-aws\n",
"from langchain_aws import ChatBedrock\n",
"from langchain_community.callbacks.manager import get_bedrock_anthropic_callback\n",
"from langchain_community.chat_models import BedrockChat\n",
"\n",
"llm = BedrockChat(model_id=\"anthropic.claude-v2\")\n",
"llm = ChatBedrock(model_id=\"anthropic.claude-v2\")\n",
"\n",
"with get_bedrock_anthropic_callback() as cb:\n",
" result = llm.invoke(\"Tell me a joke\")\n",
" result2 = llm.invoke(\"Tell me a joke\")\n",
" print(cb)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "bb40375d",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "poetry-venv-2",
"display_name": ".venv",
"language": "python",
"name": "poetry-venv-2"
"name": "python3"
},
"language_info": {
"codemirror_mode": {
@ -353,7 +361,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.1"
"version": "3.11.4"
}
},
"nbformat": 4,

@ -2,6 +2,7 @@ import re
from collections import defaultdict
from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
from langchain_core._api.deprecation import deprecated
from langchain_core.callbacks import (
CallbackManagerForLLMRun,
)
@ -195,6 +196,9 @@ class ChatPromptAdapter:
_message_type_lookups = {"human": "user", "ai": "assistant"}
@deprecated(
since="0.0.34", removal="0.3", alternative_import="langchain_aws.ChatBedrock"
)
class BedrockChat(BaseChatModel, BedrockBase):
"""Chat model that uses the Bedrock API."""

@ -14,6 +14,7 @@ from typing import (
Tuple,
)
from langchain_core._api.deprecation import deprecated
from langchain_core.callbacks import (
AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun,
@ -711,6 +712,9 @@ class BedrockBase(BaseModel, ABC):
run_manager.on_llm_new_token(chunk.text, chunk=chunk) # type: ignore[unused-coroutine]
@deprecated(
since="0.0.34", removal="0.3", alternative_import="langchain_aws.BedrockLLM"
)
class Bedrock(LLM, BedrockBase):
"""Bedrock models.

@ -235,10 +235,10 @@ SERIALIZABLE_MAPPING: Dict[Tuple[str, ...], Tuple[str, ...]] = {
"AzureChatOpenAI",
),
("langchain", "chat_models", "bedrock", "BedrockChat"): (
"langchain",
"langchain_aws",
"chat_models",
"bedrock",
"BedrockChat",
"ChatBedrock",
),
("langchain", "chat_models", "anthropic", "ChatAnthropic"): (
"langchain_anthropic",
@ -311,10 +311,10 @@ SERIALIZABLE_MAPPING: Dict[Tuple[str, ...], Tuple[str, ...]] = {
"BaseOpenAI",
),
("langchain", "llms", "bedrock", "Bedrock"): (
"langchain",
"langchain_aws",
"llms",
"bedrock",
"Bedrock",
"BedrockLLM",
),
("langchain", "llms", "fireworks", "Fireworks"): (
"langchain",

@ -1,6 +1,6 @@
import os
from langchain_community.chat_models import BedrockChat
from langchain_aws import ChatBedrock
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import ConfigurableField
@ -16,11 +16,11 @@ _model_kwargs = {
# Full list of base model IDs is available at
# https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids-arns.html
_model_alts = {
"claude_2_1": BedrockChat(
"claude_2_1": ChatBedrock(
model_id="anthropic.claude-v2:1", model_kwargs=_model_kwargs
),
"claude_1": BedrockChat(model_id="anthropic.claude-v1", model_kwargs=_model_kwargs),
"claude_instant_1": BedrockChat(
"claude_1": ChatBedrock(model_id="anthropic.claude-v1", model_kwargs=_model_kwargs),
"claude_instant_1": ChatBedrock(
model_id="anthropic.claude-instant-v1", model_kwargs=_model_kwargs
),
}
@ -34,7 +34,7 @@ _prompt = ChatPromptTemplate.from_messages(
]
)
_model = BedrockChat(
_model = ChatBedrock(
model_id="anthropic.claude-v2", model_kwargs=_model_kwargs
).configurable_alternatives(
which=ConfigurableField(

Loading…
Cancel
Save