diff --git a/docs/modules/utils/combine_docs_examples/hyde.ipynb b/docs/modules/utils/combine_docs_examples/hyde.ipynb index 66325be5..430db5ba 100644 --- a/docs/modules/utils/combine_docs_examples/hyde.ipynb +++ b/docs/modules/utils/combine_docs_examples/hyde.ipynb @@ -37,6 +37,12 @@ "llm = OpenAI()" ] }, + { + "cell_type": "markdown", + "id": "33bd6905", + "metadata": {}, + "source": [] + }, { "cell_type": "code", "execution_count": 3, @@ -99,6 +105,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "id": "1da90437", "metadata": {}, @@ -106,7 +113,7 @@ "## Using our own prompts\n", "Besides using preconfigured prompts, we can also easily construct our own prompts and use those in the LLMChain that is generating the documents. This can be useful if we know the domain our queries will be in, as we can condition the prompt to generate text more similar to that.\n", "\n", - "In the example below, let's condition it generate text about a state of the union address (because we will use that in the next example)." + "In the example below, let's condition it to generate text about a state of the union address (because we will use that in the next example)." ] }, { @@ -229,7 +236,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -243,11 +250,11 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.1" + "version": "3.9.12 (main, Mar 26 2022, 15:51:15) \n[Clang 13.1.6 (clang-1316.0.21.2)]" }, "vscode": { "interpreter": { - "hash": "9dd01537e9ab68cf47cb0398488d182358f774f73101197b3bd1b5502c6ec7f9" + "hash": "aee8b7b246df8f9039afb4144a1f6fd8d2ca17a180786b69acc140d282b71a49" } } },