From 8d2344db43bb6956ff8fb2ecb8b7c83ba6539041 Mon Sep 17 00:00:00 2001 From: Danny Davenport Date: Mon, 31 Jul 2023 20:15:29 -0400 Subject: [PATCH] updates some spelling mistakes (#8537) Just updating some spelling / grammar issues in the documentation. No code changes. --------- Co-authored-by: Harrison Chase --- .../prompt_templates/connecting_to_a_feature_store.ipynb | 2 +- docs/snippets/get_started/quickstart/llm_chain.mdx | 2 +- .../modules/model_io/prompts/prompt_templates/get_started.mdx | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/extras/modules/model_io/prompts/prompt_templates/connecting_to_a_feature_store.ipynb b/docs/extras/modules/model_io/prompts/prompt_templates/connecting_to_a_feature_store.ipynb index 4a690db635..5ba7988f80 100644 --- a/docs/extras/modules/model_io/prompts/prompt_templates/connecting_to_a_feature_store.ipynb +++ b/docs/extras/modules/model_io/prompts/prompt_templates/connecting_to_a_feature_store.ipynb @@ -25,7 +25,7 @@ "\n", "To start, we will use the popular open source feature store framework [Feast](https://github.com/feast-dev/feast).\n", "\n", - "This assumes you have already run the steps in the README around getting started. We will build of off that example in getting started, and create and LLMChain to write a note to a specific driver regarding their up-to-date statistics." + "This assumes you have already run the steps in the README around getting started. We will build off of that example in getting started, and create and LLMChain to write a note to a specific driver regarding their up-to-date statistics." ] }, { diff --git a/docs/snippets/get_started/quickstart/llm_chain.mdx b/docs/snippets/get_started/quickstart/llm_chain.mdx index 88091cbef3..7a4d5f1b1c 100644 --- a/docs/snippets/get_started/quickstart/llm_chain.mdx +++ b/docs/snippets/get_started/quickstart/llm_chain.mdx @@ -17,7 +17,7 @@ class CommaSeparatedListOutputParser(BaseOutputParser): return text.strip().split(", ") template = """You are a helpful assistant who generates comma separated lists. -A user will pass in a category, and you should generated 5 objects in that category in a comma separated list. +A user will pass in a category, and you should generate 5 objects in that category in a comma separated list. ONLY return a comma separated list, and nothing more.""" system_message_prompt = SystemMessagePromptTemplate.from_template(template) human_template = "{text}" diff --git a/docs/snippets/modules/model_io/prompts/prompt_templates/get_started.mdx b/docs/snippets/modules/model_io/prompts/prompt_templates/get_started.mdx index 47ba6c321e..38d770c326 100644 --- a/docs/snippets/modules/model_io/prompts/prompt_templates/get_started.mdx +++ b/docs/snippets/modules/model_io/prompts/prompt_templates/get_started.mdx @@ -74,7 +74,7 @@ These chat messages differ from raw string (which you would pass into a [LLM](/d For example, in OpenAI [Chat Completion API](https://platform.openai.com/docs/guides/chat/introduction), a chat message can be associated with the AI, human or system role. The model is supposed to follow instruction from system chat message more closely. -LangChain provides several prompt templates to make constructing and working with prompts easily. You are encouraged to use these chat related prompt templates instead of `PromptTemplate` when querying chat models to fully exploit the potential of underlying chat model. +LangChain provides several prompt templates to make constructing and working with prompts easy. You are encouraged to use these chat related prompt templates instead of `PromptTemplate` when querying chat models to fully utilize the potential of the underlying chat model.