From 79ba591fbc7dfaa33f349712e4a232d6fc9c592e Mon Sep 17 00:00:00 2001 From: rasbt Date: Sun, 15 Sep 2024 08:05:04 -0500 Subject: [PATCH] Clarify API usage limits in bonus content --- ch07/05_dataset-generation/reflection-gpt4.ipynb | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/ch07/05_dataset-generation/reflection-gpt4.ipynb b/ch07/05_dataset-generation/reflection-gpt4.ipynb index 013df29..00b538b 100644 --- a/ch07/05_dataset-generation/reflection-gpt4.ipynb +++ b/ch07/05_dataset-generation/reflection-gpt4.ipynb @@ -57,6 +57,15 @@ "```" ] }, + { + "cell_type": "markdown", + "id": "86ac82b4-e3d5-4ed5-8f46-6c97a9313463", + "metadata": {}, + "source": [ + "> Please note that this notebook reproduces the approach from the paper in which the authors used the GPT API to enhance existing datasets. However, it's important to be aware that GPT API-generated data may not be used to develop models that compete with OpenAI, as specified in the [OpenAI Terms of Use](https://openai.com/policies/row-terms-of-use/): \"What you cannot do... Use Output to develop models that compete with OpenAI.\"\n", + "You can find a relevant discussion [here](https://www.reddit.com/r/LocalLLaMA/comments/17vbg1f/does_openai_tos_prohibit_generating_datasets_for/))." + ] + }, { "cell_type": "code", "execution_count": 1, @@ -1049,7 +1058,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.4" + "version": "3.10.6" } }, "nbformat": 4,