diff --git a/examples/Chat_finetuning_data_prep.ipynb b/examples/Chat_finetuning_data_prep.ipynb index 31236ee2..a0318d55 100644 --- a/examples/Chat_finetuning_data_prep.ipynb +++ b/examples/Chat_finetuning_data_prep.ipynb @@ -9,7 +9,9 @@ "# Data preparation and analysis for chat model fine-tuning\n", "\n", "This notebook serves as a tool to preprocess and analyze the chat dataset used for fine-tuning a chat model. \n", - "It checks for format errors, provides basic statistics, and estimates token counts for fine-tuning costs.\n" + "It checks for format errors, provides basic statistics, and estimates token counts for fine-tuning costs.\n", + "The method shown here corresponds to [legacy fine-tuning](https://platform.openai.com/docs/guides/legacy-fine-tuning) for models like babbage-002 and davinci-002.\n", + "For fine-tuning gpt-3.5-turbo, see [the current fine-tuning page](https://platform.openai.com/docs/guides/fine-tuning)." ] }, {