diff --git a/docs/modules/utils/examples/bash.ipynb b/docs/modules/utils/examples/bash.ipynb index 7e7337b5..e16e930f 100644 --- a/docs/modules/utils/examples/bash.ipynb +++ b/docs/modules/utils/examples/bash.ipynb @@ -1,85 +1,85 @@ { - "cells": [ - { - "cell_type": "markdown", - "id": "8f210ec3", - "metadata": {}, - "source": [ - "# Bash\n", - "It can often be useful to have an LLM generate bash commands, and then run them. A common use case this is for letting it interact with your local file system. We provide an easy util to execute bash commands." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "f7b3767b", - "metadata": {}, - "outputs": [], - "source": [ - "from langchain.utilities import BashProcess" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "cf1c92f0", - "metadata": {}, - "outputs": [], - "source": [ - "bash = BashProcess()" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "2fa952fc", - "metadata": {}, - "outputs": [ + "cells": [ + { + "cell_type": "markdown", + "id": "8f210ec3", + "metadata": {}, + "source": [ + "# Bash\n", + "It can often be useful to have an LLM generate bash commands, and then run them. A common use case for this is letting the LLM interact with your local file system. We provide an easy util to execute bash commands." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "f7b3767b", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.utilities import BashProcess" + ] + }, { - "name": "stdout", - "output_type": "stream", - "text": [ - "bash.ipynb\n", - "google_search.ipynb\n", - "python.ipynb\n", - "requests.ipynb\n", - "serpapi.ipynb\n", - "\n" - ] + "cell_type": "code", + "execution_count": 2, + "id": "cf1c92f0", + "metadata": {}, + "outputs": [], + "source": [ + "bash = BashProcess()" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "2fa952fc", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "bash.ipynb\n", + "google_search.ipynb\n", + "python.ipynb\n", + "requests.ipynb\n", + "serpapi.ipynb\n", + "\n" + ] + } + ], + "source": [ + "print(bash.run(\"ls\"))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "851fee9f", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.9" } - ], - "source": [ - "print(bash.run(\"ls\"))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "851fee9f", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.9" - } - }, - "nbformat": 4, - "nbformat_minor": 5 + "nbformat": 4, + "nbformat_minor": 5 } diff --git a/docs/modules/utils/key_concepts.md b/docs/modules/utils/key_concepts.md index cbb0af9c..7b2e3d37 100644 --- a/docs/modules/utils/key_concepts.md +++ b/docs/modules/utils/key_concepts.md @@ -1,29 +1,35 @@ # Key Concepts ## Python REPL -Sometimes, for complex calculations, rather than have an LLM generate the answer directly, -it can be better to have the LLM generate code to calculate the answer, and then run that code to get the answer. + +Sometimes, for complex calculations, rather than have an LLM generate the answer directly, +it can be better to have the LLM generate code to calculate the answer, and then run that code to get the answer. In order to easily do that, we provide a simple Python REPL to execute commands in. -This interface will only return things that are printed - +This interface will only return things that are printed - therefore, if you want to use it to calculate an answer, make sure to have it print out the answer. ## Bash -It can often be useful to have an LLM generate bash commands, and then run them. -A common use case this is for letting it interact with your local file system. + +It can often be useful to have an LLM generate bash commands, and then run them. +A common use case for this is letting the LLM interact with your local file system. We provide an easy component to execute bash commands. ## Requests Wrapper -The web contains a lot of information that LLMs do not have access to. -In order to easily let LLMs interact with that information, + +The web contains a lot of information that LLMs do not have access to. +In order to easily let LLMs interact with that information, we provide a wrapper around the Python Requests module that takes in a URL and fetches data from that URL. ## Google Search + This uses the official Google Search API to look up information on the web. ## SerpAPI + This uses SerpAPI, a third party search API engine, to interact with Google Search. ## Searx Search + This uses the Searx (SearxNG fork) meta search engine API to lookup information -on the web. It supports 139 search engines and is easy to self-host +on the web. It supports 139 search engines and is easy to self-host which makes it a good choice for privacy-conscious users.