cohere, docs: update imports and installs to langchain_cohere (#19918)

cohere: update imports and installs to langchain_cohere

---------

Co-authored-by: Harry M <127103098+harry-cohere@users.noreply.github.com>
Co-authored-by: Erick Friis <erick@langchain.dev>
pull/19926/head
billytrend-cohere 6 months ago committed by GitHub
parent 146d1a6347
commit de6c0cf248
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -149,7 +149,7 @@ llm = ChatAnthropic(anthropic_api_key="...")
First we'll need to import the Cohere SDK package.
```shell
pip install cohere
pip install langchain-cohere
```
Accessing the API requires an API key, which you can get by creating an account and heading [here](https://dashboard.cohere.com/api-keys). Once we have a key we'll want to set it as an environment variable by running:
@ -161,7 +161,7 @@ export COHERE_API_KEY="..."
We can then initialize the model:
```python
from langchain_community.chat_models import ChatCohere
from langchain_cohere import ChatCohere
llm = ChatCohere()
```
@ -169,7 +169,7 @@ llm = ChatCohere()
If you'd prefer not to set an environment variable you can pass the key in directly via the `cohere_api_key` named parameter when initiating the Cohere LLM class:
```python
from langchain_community.chat_models import ChatCohere
from langchain_cohere import ChatCohere
llm = ChatCohere(cohere_api_key="...")
```

@ -29,10 +29,10 @@
"source": [
"## Setup\n",
"\n",
"The integration lives in the `langchain-community` package. We also need to install the `cohere` package itself. We can install these with:\n",
"The integration lives in the `langchain-cohere` package. We can install these with:\n",
"\n",
"```bash\n",
"pip install -U langchain-community langchain-cohere\n",
"pip install -U langchain-cohere\n",
"```\n",
"\n",
"We'll also need to get a [Cohere API key](https://cohere.com/) and set the `COHERE_API_KEY` environment variable:"
@ -40,7 +40,7 @@
},
{
"cell_type": "code",
"execution_count": 1,
"execution_count": 11,
"id": "2108b517-1e8d-473d-92fa-4f930e8072a7",
"metadata": {},
"outputs": [],
@ -61,7 +61,7 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 12,
"id": "7f11de02",
"metadata": {},
"outputs": [],
@ -82,7 +82,7 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 13,
"id": "d4a7c55d-b235-4ca4-a579-c90cc9570da9",
"metadata": {
"tags": []
@ -95,19 +95,19 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 14,
"id": "70cf04e8-423a-4ff6-8b09-f11fb711c817",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"chat = ChatCohere(model=\"command\", max_tokens=256, temperature=0.75)"
"chat = ChatCohere(model=\"command\")"
]
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": 15,
"id": "8199ef8f-eb8b-4253-9ea0-6c24a013ca4c",
"metadata": {
"tags": []
@ -116,10 +116,10 @@
{
"data": {
"text/plain": [
"AIMessage(content=\"4! That's one, two, three, four. Keep adding and we'll reach new heights!\", response_metadata={'documents': None, 'citations': None, 'search_results': None, 'search_queries': None, 'token_count': {'prompt_tokens': 73, 'response_tokens': 21, 'total_tokens': 94, 'billed_tokens': 25}})"
"AIMessage(content='4 && 5 \\n6 || 7 \\n\\nWould you like to play a game of odds and evens?', additional_kwargs={'documents': None, 'citations': None, 'search_results': None, 'search_queries': None, 'is_search_required': None, 'generation_id': '2076b614-52b3-4082-a259-cc92cd3d9fea', 'token_count': {'prompt_tokens': 68, 'response_tokens': 23, 'total_tokens': 91, 'billed_tokens': 77}}, response_metadata={'documents': None, 'citations': None, 'search_results': None, 'search_queries': None, 'is_search_required': None, 'generation_id': '2076b614-52b3-4082-a259-cc92cd3d9fea', 'token_count': {'prompt_tokens': 68, 'response_tokens': 23, 'total_tokens': 91, 'billed_tokens': 77}}, id='run-3475e0c8-c89b-4937-9300-e07d652455e1-0')"
]
},
"execution_count": 5,
"execution_count": 15,
"metadata": {},
"output_type": "execute_result"
}
@ -131,7 +131,7 @@
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": 16,
"id": "c5fac0e9-05a4-4fc1-a3b3-e5bbb24b971b",
"metadata": {
"tags": []
@ -140,10 +140,10 @@
{
"data": {
"text/plain": [
"AIMessage(content='4! According to the rules of addition, 1 + 2 equals 3, and 3 + 3 equals 6.', response_metadata={'documents': None, 'citations': None, 'search_results': None, 'search_queries': None, 'token_count': {'prompt_tokens': 73, 'response_tokens': 28, 'total_tokens': 101, 'billed_tokens': 32}})"
"AIMessage(content='4 && 5', additional_kwargs={'documents': None, 'citations': None, 'search_results': None, 'search_queries': None, 'is_search_required': None, 'generation_id': 'f0708a92-f874-46ee-9b93-334d616ad92e', 'token_count': {'prompt_tokens': 68, 'response_tokens': 3, 'total_tokens': 71, 'billed_tokens': 57}}, response_metadata={'documents': None, 'citations': None, 'search_results': None, 'search_queries': None, 'is_search_required': None, 'generation_id': 'f0708a92-f874-46ee-9b93-334d616ad92e', 'token_count': {'prompt_tokens': 68, 'response_tokens': 3, 'total_tokens': 71, 'billed_tokens': 57}}, id='run-1635e63e-2994-4e7f-986e-152ddfc95777-0')"
]
},
"execution_count": 6,
"execution_count": 16,
"metadata": {},
"output_type": "execute_result"
}
@ -154,7 +154,7 @@
},
{
"cell_type": "code",
"execution_count": 7,
"execution_count": 17,
"id": "025be980-e50d-4a68-93dc-c9c7b500ce34",
"metadata": {
"tags": []
@ -164,7 +164,7 @@
"name": "stdout",
"output_type": "stream",
"text": [
"4! It's a pleasure to be of service in this mathematical game."
"4 && 5"
]
}
],
@ -175,17 +175,17 @@
},
{
"cell_type": "code",
"execution_count": 8,
"execution_count": 18,
"id": "064288e4-f184-4496-9427-bcf148fa055e",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"[AIMessage(content='4! According to the rules of addition, 1 + 2 equals 3, and 3 + 3 equals 6.', response_metadata={'documents': None, 'citations': None, 'search_results': None, 'search_queries': None, 'token_count': {'prompt_tokens': 73, 'response_tokens': 28, 'total_tokens': 101, 'billed_tokens': 32}})]"
"[AIMessage(content='4 && 5', additional_kwargs={'documents': None, 'citations': None, 'search_results': None, 'search_queries': None, 'is_search_required': None, 'generation_id': '6770ca86-f6c3-4ba3-a285-c4772160612f', 'token_count': {'prompt_tokens': 68, 'response_tokens': 3, 'total_tokens': 71, 'billed_tokens': 57}}, response_metadata={'documents': None, 'citations': None, 'search_results': None, 'search_queries': None, 'is_search_required': None, 'generation_id': '6770ca86-f6c3-4ba3-a285-c4772160612f', 'token_count': {'prompt_tokens': 68, 'response_tokens': 3, 'total_tokens': 71, 'billed_tokens': 57}}, id='run-8d6fade2-1b39-4e31-ab23-4be622dd0027-0')]"
]
},
"execution_count": 8,
"execution_count": 18,
"metadata": {},
"output_type": "execute_result"
}
@ -206,7 +206,7 @@
},
{
"cell_type": "code",
"execution_count": 9,
"execution_count": 19,
"id": "0851b103",
"metadata": {},
"outputs": [],
@ -219,17 +219,17 @@
},
{
"cell_type": "code",
"execution_count": 10,
"execution_count": 20,
"id": "ae950c0f-1691-47f1-b609-273033cae707",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"AIMessage(content='What do you call a bear with no teeth? A gummy bear!', response_metadata={'documents': None, 'citations': None, 'search_results': None, 'search_queries': None, 'token_count': {'prompt_tokens': 72, 'response_tokens': 14, 'total_tokens': 86, 'billed_tokens': 20}})"
"AIMessage(content='What color socks do bears wear?\\n\\nThey dont wear socks, they have bear feet. \\n\\nHope you laughed! If not, maybe this will help: laughter is the best medicine, and a good sense of humor is infectious!', additional_kwargs={'documents': None, 'citations': None, 'search_results': None, 'search_queries': None, 'is_search_required': None, 'generation_id': '6edccf44-9bc8-4139-b30e-13b368f3563c', 'token_count': {'prompt_tokens': 68, 'response_tokens': 51, 'total_tokens': 119, 'billed_tokens': 108}}, response_metadata={'documents': None, 'citations': None, 'search_results': None, 'search_queries': None, 'is_search_required': None, 'generation_id': '6edccf44-9bc8-4139-b30e-13b368f3563c', 'token_count': {'prompt_tokens': 68, 'response_tokens': 51, 'total_tokens': 119, 'billed_tokens': 108}}, id='run-ef7f9789-0d4d-43bf-a4f7-f2a0e27a5320-0')"
]
},
"execution_count": 10,
"execution_count": 20,
"metadata": {},
"output_type": "execute_result"
}

@ -6,7 +6,7 @@
## Installation and Setup
- Install the Python SDK :
```bash
pip install cohere
pip install langchain-cohere
```
Get a [Cohere api key](https://dashboard.cohere.ai/) and set it as an environment variable (`COHERE_API_KEY`)
@ -15,10 +15,10 @@ Get a [Cohere api key](https://dashboard.cohere.ai/) and set it as an environmen
|API|description|Endpoint docs|Import|Example usage|
|---|---|---|---|---|
|Chat|Build chat bots|[chat](https://docs.cohere.com/reference/chat)|`from langchain_community.chat_models import ChatCohere`|[cohere.ipynb](/docs/integrations/chat/cohere)|
|LLM|Generate text|[generate](https://docs.cohere.com/reference/generate)|`from langchain_community.llms import Cohere`|[cohere.ipynb](/docs/integrations/llms/cohere)|
|Chat|Build chat bots|[chat](https://docs.cohere.com/reference/chat)|`from langchain_cohere import ChatCohere`|[cohere.ipynb](/docs/integrations/chat/cohere)|
|LLM|Generate text|[generate](https://docs.cohere.com/reference/generate)|`from langchain_cohere import Cohere`|[cohere.ipynb](/docs/integrations/llms/cohere)|
|RAG Retriever|Connect to external data sources|[chat + rag](https://docs.cohere.com/reference/chat)|`from langchain.retrievers import CohereRagRetriever`|[cohere.ipynb](/docs/integrations/retrievers/cohere)|
|Text Embedding|Embed strings to vectors|[embed](https://docs.cohere.com/reference/embed)|`from langchain_community.embeddings import CohereEmbeddings`|[cohere.ipynb](/docs/integrations/text_embedding/cohere)|
|Text Embedding|Embed strings to vectors|[embed](https://docs.cohere.com/reference/embed)|`from langchain_cohere import CohereEmbeddings`|[cohere.ipynb](/docs/integrations/text_embedding/cohere)|
|Rerank Retriever|Rank strings based on relevance|[rerank](https://docs.cohere.com/reference/rerank)|`from langchain.retrievers.document_compressors import CohereRerank`|[cohere.ipynb](/docs/integrations/retrievers/cohere-reranker)|
## Quick copy examples
@ -26,7 +26,7 @@ Get a [Cohere api key](https://dashboard.cohere.ai/) and set it as an environmen
### Chat
```python
from langchain_community.chat_models import ChatCohere
from langchain_cohere import ChatCohere
from langchain_core.messages import HumanMessage
chat = ChatCohere()
messages = [HumanMessage(content="knock knock")]
@ -37,17 +37,45 @@ print(chat(messages))
```python
from langchain_community.llms import Cohere
from langchain_cohere import Cohere
llm = Cohere(model="command")
print(llm.invoke("Come up with a pet name"))
```
### ReAct Agent
```python
from langchain_community.tools.tavily_search import TavilySearchResults
from langchain_cohere import ChatCohere, create_cohere_react_agent
from langchain.prompts import ChatPromptTemplate
from langchain.agents import AgentExecutor
llm = ChatCohere()
internet_search = TavilySearchResults(max_results=4)
internet_search.name = "internet_search"
internet_search.description = "Route a user query to the internet"
prompt = ChatPromptTemplate.from_template("{input}")
agent = create_cohere_react_agent(
llm,
[internet_search],
prompt
)
agent_executor = AgentExecutor(agent=agent, tools=[internet_search], verbose=True)```
agent_executor.invoke({
"input": "In what year was the company that was founded as Sound of Music added to the S&P 500?",
})
```
### RAG Retriever
```python
from langchain_community.chat_models import ChatCohere
from langchain_cohere import ChatCohere
from langchain.retrievers import CohereRagRetriever
from langchain_core.documents import Document
@ -58,7 +86,7 @@ print(rag.get_relevant_documents("What is cohere ai?"))
### Text Embedding
```python
from langchain_community.embeddings import CohereEmbeddings
from langchain_cohere import CohereEmbeddings
embeddings = CohereEmbeddings(model="embed-english-light-v3.0")
print(embeddings.embed_documents(["This is a test document."]))

@ -55,7 +55,7 @@ embeddings_model = OpenAIEmbeddings()
To start we'll need to install the Cohere SDK package:
```bash
pip install cohere
pip install langchain-cohere
```
Accessing the API requires an API key, which you can get by creating an account and heading [here](https://dashboard.cohere.com/api-keys). Once we have a key we'll want to set it as an environment variable by running:
@ -67,14 +67,14 @@ export COHERE_API_KEY="..."
If you'd prefer not to set an environment variable you can pass the key in directly via the `cohere_api_key` named parameter when initiating the Cohere LLM class:
```python
from langchain_community.embeddings import CohereEmbeddings
from langchain_cohere import CohereEmbeddings
embeddings_model = CohereEmbeddings(cohere_api_key="...")
```
Otherwise you can initialize without any params:
```python
from langchain_community.embeddings import CohereEmbeddings
from langchain_cohere import CohereEmbeddings
embeddings_model = CohereEmbeddings()
```

@ -99,7 +99,7 @@ chat_model = ChatAnthropic(anthropic_api_key="...")
First we'll need to install their partner package:
```shell
pip install cohere
pip install langchain-cohere
```
Accessing the API requires an API key, which you can get by creating an account and heading [here](https://dashboard.cohere.com/api-keys). Once we have a key we'll want to set it as an environment variable by running:
@ -111,7 +111,7 @@ export COHERE_API_KEY="..."
We can then initialize the model:
```python
from langchain_community.chat_models import ChatCohere
from langchain_cohere import ChatCohere
chat_model = ChatCohere()
```
@ -119,7 +119,7 @@ chat_model = ChatCohere()
If you'd prefer not to set an environment variable you can pass the key in directly via the `cohere_api_key` named parameter when initiating the Cohere LLM class:
```python
from langchain_community.chat_models import ChatCohere
from langchain_cohere import ChatCohere
chat_model = ChatCohere(cohere_api_key="...")
```

@ -1 +1,93 @@
# langchain-cohere
# Cohere
>[Cohere](https://cohere.ai/about) is a Canadian startup that provides natural language processing models
> that help companies improve human-machine interactions.
## Installation and Setup
- Install the Python SDK :
```bash
pip install langchain-cohere
```
Get a [Cohere api key](https://dashboard.cohere.ai/) and set it as an environment variable (`COHERE_API_KEY`)
## Cohere langchain integrations
| API | description | Endpoint docs | Import | Example usage |
| ---------------- | -------------------------------- | ------------------------------------------------------ | -------------------------------------------------------------------- | ------------------------------------------------------------- |
| Chat | Build chat bots | [chat](https://docs.cohere.com/reference/chat) | `from langchain_cohere import ChatCohere` | [cohere.ipynb](/docs/integrations/chat/cohere) |
| LLM | Generate text | [generate](https://docs.cohere.com/reference/generate) | `from langchain_cohere import Cohere` | [cohere.ipynb](/docs/integrations/llms/cohere) |
| RAG Retriever | Connect to external data sources | [chat + rag](https://docs.cohere.com/reference/chat) | `from langchain.retrievers import CohereRagRetriever` | [cohere.ipynb](/docs/integrations/retrievers/cohere) |
| Text Embedding | Embed strings to vectors | [embed](https://docs.cohere.com/reference/embed) | `from langchain_cohere import CohereEmbeddings` | [cohere.ipynb](/docs/integrations/text_embedding/cohere) |
| Rerank Retriever | Rank strings based on relevance | [rerank](https://docs.cohere.com/reference/rerank) | `from langchain.retrievers.document_compressors import CohereRerank` | [cohere.ipynb](/docs/integrations/retrievers/cohere-reranker) |
## Quick copy examples
### Chat
```python
from langchain_cohere import ChatCohere
from langchain_core.messages import HumanMessage
chat = ChatCohere()
messages = [HumanMessage(content="knock knock")]
print(chat(messages))
```
### LLM
```python
from langchain_cohere import Cohere
llm = Cohere(model="command")
print(llm.invoke("Come up with a pet name"))
```
### ReAct Agent
```python
from langchain_community.tools.tavily_search import TavilySearchResults
from langchain_cohere import ChatCohere, create_cohere_react_agent
from langchain.prompts import ChatPromptTemplate
from langchain.agents import AgentExecutor
llm = ChatCohere()
internet_search = TavilySearchResults(max_results=4)
internet_search.name = "internet_search"
internet_search.description = "Route a user query to the internet"
prompt = ChatPromptTemplate.from_template("{input}")
agent = create_cohere_react_agent(
llm,
[internet_search],
prompt
)
agent_executor = AgentExecutor(agent=agent, tools=[internet_search], verbose=True)```
agent_executor.invoke({
"input": "In what year was the company that was founded as Sound of Music added to the S&P 500?",
})
```
### RAG Retriever
```python
from langchain_cohere import ChatCohere
from langchain.retrievers import CohereRagRetriever
from langchain_core.documents import Document
rag = CohereRagRetriever(llm=ChatCohere())
print(rag.get_relevant_documents("What is cohere ai?"))
```
### Text Embedding
```python
from langchain_cohere import CohereEmbeddings
embeddings = CohereEmbeddings(model="embed-english-light-v3.0")
print(embeddings.embed_documents(["This is a test document."]))
```

@ -99,8 +99,7 @@
"from langchain.agents import AgentExecutor\n",
"from langchain.retrievers import WikipediaRetriever\n",
"from langchain.tools.retriever import create_retriever_tool\n",
"from langchain_cohere import create_cohere_tools_agent\n",
"from langchain_cohere.chat_models import ChatCohere\n",
"from langchain_cohere import create_cohere_tools_agent, ChatCohere\n",
"from langchain_core.prompts import ChatPromptTemplate"
]
},

@ -42,7 +42,7 @@ class CohereEmbeddings(BaseModel, Embeddings):
"""Maximum number of retries to make when generating."""
request_timeout: Optional[float] = None
"""Timeout in seconds for the Cohere API request."""
user_agent: str = "langchain"
user_agent: str = "langchain:partner"
"""Identifier for the application making the request."""
base_url: Optional[str] = None

Loading…
Cancel
Save