…tch]: import models from community
ran
```bash
git grep -l 'from langchain\.chat_models' | xargs -L 1 sed -i '' "s/from\ langchain\.chat_models/from\ langchain_community.chat_models/g"
git grep -l 'from langchain\.llms' | xargs -L 1 sed -i '' "s/from\ langchain\.llms/from\ langchain_community.llms/g"
git grep -l 'from langchain\.embeddings' | xargs -L 1 sed -i '' "s/from\ langchain\.embeddings/from\ langchain_community.embeddings/g"
git checkout master libs/langchain/tests/unit_tests/llms
git checkout master libs/langchain/tests/unit_tests/chat_models
git checkout master libs/langchain/tests/unit_tests/embeddings/test_imports.py
make format
cd libs/langchain; make format
cd ../experimental; make format
cd ../core; make format
```
" \"You are a planner who is an expert at coming up with a todo list for a given objective. Come up with a todo list for this objective: {objective}\"\n",
" \"You are a planner who is an expert at coming up with a todo list for a given objective. Come up with a todo list for this objective: {objective}\"\n",
"- To use Azure embeddings with OpenAI V1, you'll need to use the new `AzureOpenAIEmbeddings` instead of the existing `OpenAIEmbeddings`. `OpenAIEmbeddings` continue to work when using Azure with `openai<1`.\n",
"- To use Azure embeddings with OpenAI V1, you'll need to use the new `AzureOpenAIEmbeddings` instead of the existing `OpenAIEmbeddings`. `OpenAIEmbeddings` continue to work when using Azure with `openai<1`.\n",
from langchain_community.chat_models import ChatOpenAI
llm = ChatOpenAI()
llm = ChatOpenAI()
```
```
@ -93,7 +93,7 @@ llm = ChatOpenAI()
If you'd prefer not to set an environment variable you can pass the key in directly via the `openai_api_key` named parameter when initiating the OpenAI LLM class:
If you'd prefer not to set an environment variable you can pass the key in directly via the `openai_api_key` named parameter when initiating the OpenAI LLM class:
```python
```python
from langchain.chat_models import ChatOpenAI
from langchain_community.chat_models import ChatOpenAI
llm = ChatOpenAI(openai_api_key="...")
llm = ChatOpenAI(openai_api_key="...")
```
```
@ -110,7 +110,7 @@ First, follow [these instructions](https://github.com/jmorganca/ollama) to set u
Then, make sure the Ollama server is running. After that, you can do:
Then, make sure the Ollama server is running. After that, you can do:
```python
```python
from langchain.llms import Ollama
from langchain_community.llms import Ollama
llm = Ollama(model="llama2")
llm = Ollama(model="llama2")
```
```
@ -412,7 +412,7 @@ pip install langchainhub
Now we can use it to get a predefined prompt
Now we can use it to get a predefined prompt
```python
```python
from langchain.chat_models import ChatOpenAI
from langchain_community.chat_models import ChatOpenAI
from langchain import hub
from langchain import hub
from langchain.agents import create_openai_functions_agent
from langchain.agents import create_openai_functions_agent
from langchain.agents import AgentExecutor
from langchain.agents import AgentExecutor
@ -476,14 +476,14 @@ from typing import List
from fastapi import FastAPI
from fastapi import FastAPI
from langchain.prompts import ChatPromptTemplate
from langchain.prompts import ChatPromptTemplate
from langchain.chat_models import ChatOpenAI
from langchain_community.chat_models import ChatOpenAI
from langchain_community.document_loaders import WebBaseLoader
from langchain_community.document_loaders import WebBaseLoader
from langchain_community.embeddings import OpenAIEmbeddings
from langchain_community.embeddings import OpenAIEmbeddings
from langchain_community.vectorstores import DocArrayInMemorySearch
from langchain_community.vectorstores import DocArrayInMemorySearch
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.tools.retriever import create_retriever_tool
from langchain.tools.retriever import create_retriever_tool
from langchain_community.tools.tavily_search import TavilySearchResults
from langchain_community.tools.tavily_search import TavilySearchResults
from langchain.chat_models import ChatOpenAI
from langchain_community.chat_models import ChatOpenAI
from langchain import hub
from langchain import hub
from langchain.agents import create_openai_functions_agent
from langchain.agents import create_openai_functions_agent