diff --git a/docs/docs/integrations/chat/google_vertex_ai_palm.ipynb b/docs/docs/integrations/chat/google_vertex_ai_palm.ipynb index 696add6701..8d194eb048 100644 --- a/docs/docs/integrations/chat/google_vertex_ai_palm.ipynb +++ b/docs/docs/integrations/chat/google_vertex_ai_palm.ipynb @@ -5,7 +5,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# GCP Vertex AI \n", + "# Google Cloud Vertex AI \n", "\n", "Note: This is separate from the Google PaLM integration. Google has chosen to offer an enterprise version of PaLM through GCP, and this supports the models made available through there. \n", "\n", @@ -31,7 +31,7 @@ }, "outputs": [], "source": [ - "#!pip install langchain google-cloud-aiplatform" + "#!pip install langchain google-cloud-aiplatform\n" ] }, { @@ -41,7 +41,7 @@ "outputs": [], "source": [ "from langchain.chat_models import ChatVertexAI\n", - "from langchain.prompts import ChatPromptTemplate" + "from langchain.prompts import ChatPromptTemplate\n" ] }, { @@ -50,7 +50,7 @@ "metadata": {}, "outputs": [], "source": [ - "chat = ChatVertexAI()" + "chat = ChatVertexAI()\n" ] }, { @@ -64,7 +64,7 @@ "prompt = ChatPromptTemplate.from_messages(\n", " [(\"system\", system), (\"human\", human)]\n", ")\n", - "messages = prompt.format_messages()" + "messages = prompt.format_messages()\n" ] }, { @@ -84,7 +84,7 @@ } ], "source": [ - "chat(messages)" + "chat(messages)\n" ] }, { @@ -104,7 +104,7 @@ "human = \"{text}\"\n", "prompt = ChatPromptTemplate.from_messages(\n", " [(\"system\", system), (\"human\", human)]\n", - ")" + ")\n" ] }, { @@ -127,7 +127,7 @@ "chain = prompt | chat\n", "chain.invoke(\n", " {\"input_language\": \"English\", \"output_language\": \"Japanese\", \"text\": \"I love programming\"}\n", - ")" + ")\n" ] }, { @@ -161,7 +161,7 @@ " model_name=\"codechat-bison\",\n", " max_output_tokens=1000,\n", " temperature=0.5\n", - ")" + ")\n" ] }, { @@ -189,7 +189,7 @@ ], "source": [ "# For simple string in string out usage, we can use the `predict` method:\n", - "print(chat.predict(\"Write a Python function to identify all prime numbers\"))" + "print(chat.predict(\"Write a Python function to identify all prime numbers\"))\n" ] }, { @@ -209,7 +209,7 @@ "source": [ "import asyncio\n", "# import nest_asyncio\n", - "# nest_asyncio.apply()" + "# nest_asyncio.apply()\n" ] }, { @@ -237,7 +237,7 @@ " top_k=40,\n", ")\n", "\n", - "asyncio.run(chat.agenerate([messages]))" + "asyncio.run(chat.agenerate([messages]))\n" ] }, { @@ -257,7 +257,7 @@ } ], "source": [ - "asyncio.run(chain.ainvoke({\"input_language\": \"English\", \"output_language\": \"Sanskrit\", \"text\": \"I love programming\"}))" + "asyncio.run(chain.ainvoke({\"input_language\": \"English\", \"output_language\": \"Sanskrit\", \"text\": \"I love programming\"}))\n" ] }, { @@ -275,7 +275,7 @@ "metadata": {}, "outputs": [], "source": [ - "import sys" + "import sys\n" ] }, { @@ -310,7 +310,7 @@ "messages = prompt.format_messages()\n", "for chunk in chat.stream(messages):\n", " sys.stdout.write(chunk.content)\n", - " sys.stdout.flush()" + " sys.stdout.flush()\n" ] } ], diff --git a/docs/docs/integrations/llms/google_vertex_ai_palm.ipynb b/docs/docs/integrations/llms/google_vertex_ai_palm.ipynb index 40435cf37b..96d328f1b2 100644 --- a/docs/docs/integrations/llms/google_vertex_ai_palm.ipynb +++ b/docs/docs/integrations/llms/google_vertex_ai_palm.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# GCP Vertex AI\n", + "# Google Cloud Vertex AI\n", "\n", "**Note:** This is separate from the `Google PaLM` integration, it exposes [Vertex AI PaLM API](https://cloud.google.com/vertex-ai/docs/generative-ai/learn/overview) on `Google Cloud`. \n" ] @@ -41,7 +41,7 @@ }, "outputs": [], "source": [ - "#!pip install langchain google-cloud-aiplatform" + "#!pip install langchain google-cloud-aiplatform\n" ] }, { @@ -50,7 +50,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.llms import VertexAI" + "from langchain.llms import VertexAI\n" ] }, { @@ -74,7 +74,7 @@ ], "source": [ "llm = VertexAI()\n", - "print(llm(\"What are some of the pros and cons of Python as a programming language?\"))" + "print(llm(\"What are some of the pros and cons of Python as a programming language?\"))\n" ] }, { @@ -90,7 +90,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.prompts import PromptTemplate" + "from langchain.prompts import PromptTemplate\n" ] }, { @@ -102,7 +102,7 @@ "template = \"\"\"Question: {question}\n", "\n", "Answer: Let's think step by step.\"\"\"\n", - "prompt = PromptTemplate.from_template(template)" + "prompt = PromptTemplate.from_template(template)\n" ] }, { @@ -111,7 +111,7 @@ "metadata": {}, "outputs": [], "source": [ - "chain = prompt | llm" + "chain = prompt | llm\n" ] }, { @@ -130,7 +130,7 @@ ], "source": [ "question = \"Who was the president in the year Justin Beiber was born?\"\n", - "print(chain.invoke({\"question\": question}))" + "print(chain.invoke({\"question\": question}))\n" ] }, { @@ -159,7 +159,7 @@ }, "outputs": [], "source": [ - "llm = VertexAI(model_name=\"code-bison\", max_output_tokens=1000, temperature=0.3)" + "llm = VertexAI(model_name=\"code-bison\", max_output_tokens=1000, temperature=0.3)\n" ] }, { @@ -168,7 +168,7 @@ "metadata": {}, "outputs": [], "source": [ - "question = \"Write a python function that checks if a string is a valid email address\"" + "question = \"Write a python function that checks if a string is a valid email address\"\n" ] }, { @@ -193,7 +193,7 @@ } ], "source": [ - "print(llm(question))" + "print(llm(question))\n" ] }, { @@ -223,7 +223,7 @@ ], "source": [ "result = llm.generate([question])\n", - "result.generations" + "result.generations\n" ] }, { @@ -243,7 +243,7 @@ "source": [ "# If running in a Jupyter notebook you'll need to install nest_asyncio\n", "\n", - "# !pip install nest_asyncio" + "# !pip install nest_asyncio\n" ] }, { @@ -254,7 +254,7 @@ "source": [ "import asyncio\n", "# import nest_asyncio\n", - "# nest_asyncio.apply()" + "# nest_asyncio.apply()\n" ] }, { @@ -274,7 +274,7 @@ } ], "source": [ - "asyncio.run(llm.agenerate([question]))" + "asyncio.run(llm.agenerate([question]))\n" ] }, { @@ -292,7 +292,7 @@ "metadata": {}, "outputs": [], "source": [ - "import sys" + "import sys\n" ] }, { @@ -337,7 +337,7 @@ "source": [ "for chunk in llm.stream(question):\n", " sys.stdout.write(chunk)\n", - " sys.stdout.flush()" + " sys.stdout.flush()\n" ] }, { @@ -360,7 +360,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.llms import VertexAIModelGarden" + "from langchain.llms import VertexAIModelGarden\n" ] }, { @@ -372,7 +372,7 @@ "llm = VertexAIModelGarden(\n", " project=\"YOUR PROJECT\",\n", " endpoint_id=\"YOUR ENDPOINT_ID\"\n", - ")" + ")\n" ] }, { @@ -381,7 +381,7 @@ "metadata": {}, "outputs": [], "source": [ - "print(llm(\"What is the meaning of life?\"))" + "print(llm(\"What is the meaning of life?\"))\n" ] }, { @@ -397,7 +397,7 @@ "metadata": {}, "outputs": [], "source": [ - "prompt = PromptTemplate.from_template(\"What is the meaning of {thing}?\")" + "prompt = PromptTemplate.from_template(\"What is the meaning of {thing}?\")\n" ] }, { @@ -407,7 +407,7 @@ "outputs": [], "source": [ "chian = prompt | llm\n", - "print(chain.invoke({\"thing\": \"life\"}))" + "print(chain.invoke({\"thing\": \"life\"}))\n" ] } ], diff --git a/docs/docs/integrations/platforms/google.mdx b/docs/docs/integrations/platforms/google.mdx index bcfb812686..b5df554e4e 100644 --- a/docs/docs/integrations/platforms/google.mdx +++ b/docs/docs/integrations/platforms/google.mdx @@ -30,7 +30,6 @@ Access PaLM chat models like `chat-bison` and `codechat-bison` via Google Cloud. from langchain.chat_models import ChatVertexAI ``` - ## Document Loader ### Google BigQuery @@ -51,7 +50,7 @@ from langchain.document_loaders import BigQueryLoader ### Google Cloud Storage ->[Google Cloud Storage](https://en.wikipedia.org/wiki/Google_Cloud_Storage) is a managed service for storing unstructured data. +> [Google Cloud Storage](https://en.wikipedia.org/wiki/Google_Cloud_Storage) is a managed service for storing unstructured data. First, we need to install `google-cloud-storage` python package. @@ -74,11 +73,11 @@ from langchain.document_loaders import GCSFileLoader ### Google Drive ->[Google Drive](https://en.wikipedia.org/wiki/Google_Drive) is a file storage and synchronization service developed by Google. +> [Google Drive](https://en.wikipedia.org/wiki/Google_Drive) is a file storage and synchronization service developed by Google. Currently, only `Google Docs` are supported. -First, we need to install several python package. +First, we need to install several python packages. ```bash pip install google-api-python-client google-auth-httplib2 google-auth-oauthlib @@ -91,10 +90,11 @@ from langchain.document_loaders import GoogleDriveLoader ``` ## Vector Store -### Google Vertex AI MatchingEngine +### Google Vertex AI Vector Search -> [Google Vertex AI Matching Engine](https://cloud.google.com/vertex-ai/docs/matching-engine/overview) provides -> the industry's leading high-scale low latency vector database. These vector databases are commonly +> [Google Vertex AI Vector Search](https://cloud.google.com/vertex-ai/docs/matching-engine/overview), +> formerly known as Vertex AI Matching Engine, provides the industry's leading high-scale +> low latency vector database. These vector databases are commonly > referred to as vector similarity-matching or an approximate nearest neighbor (ANN) service. We need to install several python packages. @@ -181,14 +181,28 @@ There exists a `GoogleSearchAPIWrapper` utility which wraps this API. To import ```python from langchain.utilities import GoogleSearchAPIWrapper ``` + For a more detailed walkthrough of this wrapper, see [this notebook](/docs/integrations/tools/google_search.html). We can easily load this wrapper as a Tool (to use with an Agent). We can do this with: + ```python from langchain.agents import load_tools tools = load_tools(["google-search"]) ``` +### Google Places + +See a [usage example](/docs/integrations/tools/google_places). + +``` +pip install googlemaps +``` + +```python +from langchain.tools import GooglePlacesTool +``` + ## Document Transformer ### Google Document AI @@ -216,3 +230,40 @@ See a [usage example](/docs/integrations/document_transformers/docai). from langchain.document_loaders.blob_loaders import Blob from langchain.document_loaders.parsers import DocAIParser ``` + +## Chat loaders +### Gmail + +> [Gmail](https://en.wikipedia.org/wiki/Gmail) is a free email service provided by Google. + +First, we need to install several python packages. + +```bash +pip install --upgrade google-auth google-auth-oauthlib google-auth-httplib2 google-api-python-client +``` + +See a [usage example and authorizing instructions](/docs/integrations/chat_loaders/gmail). + +```python +from langchain.chat_loaders.gmail import GMailLoader +``` + +## Agents and Toolkits +### Gmail + +See a [usage example and authorizing instructions](/docs/integrations/toolkits/gmail). + +```python +from langchain.agents.agent_toolkits import GmailToolkit + +toolkit = GmailToolkit() +``` + +### Google Drive + +See a [usage example and authorizing instructions](/docs/integrations/toolkits/google_drive). + +```python +from langchain_googledrive.utilities.google_drive import GoogleDriveAPIWrapper +from langchain_googledrive.tools.google_drive.tool import GoogleDriveSearchTool +``` diff --git a/docs/docs/integrations/providers/google_document_ai.mdx b/docs/docs/integrations/providers/google_document_ai.mdx deleted file mode 100644 index 431cc4ac5f..0000000000 --- a/docs/docs/integrations/providers/google_document_ai.mdx +++ /dev/null @@ -1,28 +0,0 @@ -# Google Document AI - ->[Document AI](https://cloud.google.com/document-ai/docs/overview) is a `Google Cloud Platform` -> service to transform unstructured data from documents into structured data, making it easier -> to understand, analyze, and consume. - - -## Installation and Setup - -You need to set up a [`GCS` bucket and create your own OCR processor](https://cloud.google.com/document-ai/docs/create-processor) -The `GCS_OUTPUT_PATH` should be a path to a folder on GCS (starting with `gs://`) -and a processor name should look like `projects/PROJECT_NUMBER/locations/LOCATION/processors/PROCESSOR_ID`. -You can get it either programmatically or copy from the `Prediction endpoint` section of the `Processor details` -tab in the Google Cloud Console. - -```bash -pip install google-cloud-documentai -pip install google-cloud-documentai-toolbox -``` - -## Document Transformer - -See a [usage example](/docs/integrations/document_transformers/docai). - -```python -from langchain.document_loaders.blob_loaders import Blob -from langchain.document_loaders.parsers import DocAIParser -``` diff --git a/docs/docs/integrations/providers/google_serper.mdx b/docs/docs/integrations/providers/google_serper.mdx index 8fd535c57f..74cf86d8e2 100644 --- a/docs/docs/integrations/providers/google_serper.mdx +++ b/docs/docs/integrations/providers/google_serper.mdx @@ -1,4 +1,4 @@ -# Google Serper +# Serper - Google Search API This page covers how to use the [Serper](https://serper.dev) Google Search API within LangChain. Serper is a low-cost Google Search API that can be used to add answer box, knowledge graph, and organic results data from Google Search. It is broken into two parts: setup, and then references to the specific Google Serper wrapper. diff --git a/docs/docs/integrations/vectorstores/matchingengine.ipynb b/docs/docs/integrations/vectorstores/matchingengine.ipynb index faee307c34..a086be2cf0 100644 --- a/docs/docs/integrations/vectorstores/matchingengine.ipynb +++ b/docs/docs/integrations/vectorstores/matchingengine.ipynb @@ -5,11 +5,11 @@ "id": "655b8f55-2089-4733-8b09-35dea9580695", "metadata": {}, "source": [ - "# Google Vertex AI MatchingEngine\n", + "# Google Vertex AI Vector Search\n", "\n", - "This notebook shows how to use functionality related to the `GCP Vertex AI MatchingEngine` vector database.\n", + "This notebook shows how to use functionality related to the `Google Cloud Vertex AI Vector Search` vector database.\n", "\n", - "> Vertex AI [Matching Engine](https://cloud.google.com/vertex-ai/docs/matching-engine/overview) provides the industry's leading high-scale low latency vector database. These vector databases are commonly referred to as vector similarity-matching or an approximate nearest neighbor (ANN) service.\n", + "> [Google Vertex AI Vector Search](https://cloud.google.com/vertex-ai/docs/matching-engine/overview), formerly known as Vertex AI Matching Engine, provides the industry's leading high-scale low latency vector database. These vector databases are commonly referred to as vector similarity-matching or an approximate nearest neighbor (ANN) service.\n", "\n", "**Note**: This module expects an endpoint and deployed index already created as the creation time takes close to one hour. To see how to create an index refer to the section [Create Index and deploy it to an Endpoint](#create-index-and-deploy-it-to-an-endpoint)" ] @@ -29,7 +29,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.vectorstores import MatchingEngine" + "from langchain.vectorstores import MatchingEngine\n" ] }, { @@ -61,7 +61,7 @@ "\n", "vector_store.add_texts(texts=texts)\n", "\n", - "vector_store.similarity_search(\"lunch\", k=2)" + "vector_store.similarity_search(\"lunch\", k=2)\n" ] }, { @@ -93,7 +93,7 @@ "!pip install tensorflow \\\n", " google-cloud-aiplatform \\\n", " tensorflow-hub \\\n", - " tensorflow-text " + " tensorflow-text \n" ] }, { @@ -108,7 +108,7 @@ "\n", "from google.cloud import aiplatform\n", "import tensorflow_hub as hub\n", - "import tensorflow_text" + "import tensorflow_text\n" ] }, { @@ -137,7 +137,7 @@ "VPC_NETWORK_FULL = f\"projects/{PROJECT_NUMBER}/global/networks/{VPC_NETWORK}\"\n", "\n", "# Change this if you need the VPC to be created.\n", - "CREATE_VPC = False" + "CREATE_VPC = False\n" ] }, { @@ -148,7 +148,7 @@ "outputs": [], "source": [ "# Set the project id\n", - "! gcloud config set project {PROJECT_ID}" + "! gcloud config set project {PROJECT_ID}\n" ] }, { @@ -177,7 +177,7 @@ "\n", " # Set up peering with service networking\n", " # Your account must have the \"Compute Network Admin\" role to run the following.\n", - " ! gcloud services vpc-peerings connect --service=servicenetworking.googleapis.com --network={VPC_NETWORK} --ranges={PEERING_RANGE_NAME} --project={PROJECT_ID}" + " ! gcloud services vpc-peerings connect --service=servicenetworking.googleapis.com --network={VPC_NETWORK} --ranges={PEERING_RANGE_NAME} --project={PROJECT_ID}\n" ] }, { @@ -188,7 +188,7 @@ "outputs": [], "source": [ "# Creating bucket.\n", - "! gsutil mb -l $REGION -p $PROJECT_ID $BUCKET_URI" + "! gsutil mb -l $REGION -p $PROJECT_ID $BUCKET_URI\n" ] }, { @@ -208,7 +208,7 @@ "source": [ "# Load the Universal Sentence Encoder module\n", "module_url = \"https://tfhub.dev/google/universal-sentence-encoder-multilingual/3\"\n", - "model = hub.load(module_url)" + "model = hub.load(module_url)\n" ] }, { @@ -219,7 +219,7 @@ "outputs": [], "source": [ "# Generate embeddings for each word\n", - "embeddings = model([\"banana\"])" + "embeddings = model([\"banana\"])\n" ] }, { @@ -245,7 +245,7 @@ "with open(\"data.json\", \"w\") as f:\n", " json.dump(initial_config, f)\n", "\n", - "!gsutil cp data.json {EMBEDDING_DIR}/file.json" + "!gsutil cp data.json {EMBEDDING_DIR}/file.json\n" ] }, { @@ -255,7 +255,7 @@ "metadata": {}, "outputs": [], "source": [ - "aiplatform.init(project=PROJECT_ID, location=REGION, staging_bucket=BUCKET_URI)" + "aiplatform.init(project=PROJECT_ID, location=REGION, staging_bucket=BUCKET_URI)\n" ] }, { @@ -279,7 +279,7 @@ " dimensions=DIMENSIONS,\n", " approximate_neighbors_count=150,\n", " distance_measure_type=\"DOT_PRODUCT_DISTANCE\",\n", - ")" + ")\n" ] }, { @@ -300,7 +300,7 @@ "my_index_endpoint = aiplatform.MatchingEngineIndexEndpoint.create(\n", " display_name=f\"{DISPLAY_NAME}-endpoint\",\n", " network=VPC_NETWORK_FULL,\n", - ")" + ")\n" ] }, { @@ -322,7 +322,7 @@ " index=my_index, deployed_index_id=DEPLOYED_INDEX_ID\n", ")\n", "\n", - "my_index_endpoint.deployed_indexes" + "my_index_endpoint.deployed_indexes\n" ] } ], diff --git a/docs/vercel.json b/docs/vercel.json index 65a7811996..44177f8404 100644 --- a/docs/vercel.json +++ b/docs/vercel.json @@ -3850,6 +3850,10 @@ { "source": "/docs/integrations/retrievers/google_cloud_enterprise_search", "destination": "/docs/integrations/retrievers/google_vertex_ai_search" + }, + { + "source": "/docs/integrations/providers/google_document_ai", + "destination": "/docs/integrations/platforms/google#google-document-ai" } ] }