diff --git a/ch06/01_main-chapter-code/ch06.ipynb b/ch06/01_main-chapter-code/ch06.ipynb index dda54d9..d3df2b0 100644 --- a/ch06/01_main-chapter-code/ch06.ipynb +++ b/ch06/01_main-chapter-code/ch06.ipynb @@ -1822,7 +1822,7 @@ "- In this section, we define and use the training function to improve the classification accuracy of the model\n", "- The `train_classifier_simple` function below is practically the same as the `train_model_simple` function we used for pretraining the model in chapter 5\n", "- The only two differences are that we now \n", - " 1. track the number of training examples seen (`examples_seen`) instead of the number of tokens seen\n", + " 1. track the number of training examples seen (`examples_seen`) instead of the number of examples seen\n", " 2. calculate the accuracy after each epoch instead of printing a sample text after each epoch" ] }, @@ -1846,7 +1846,7 @@ "# Overall the same as `train_model_simple` in chapter 5\n", "def train_classifier_simple(model, train_loader, val_loader, optimizer, device, num_epochs,\n", " eval_freq, eval_iter, tokenizer):\n", - " # Initialize lists to track losses and tokens seen\n", + " # Initialize lists to track losses and examples seen\n", " train_losses, val_losses, train_accs, val_accs = [], [], [], []\n", " examples_seen, global_step = 0, -1\n", "\n", @@ -2003,7 +2003,7 @@ " ax1.set_ylabel(label.capitalize())\n", " ax1.legend()\n", "\n", - " # Create a second x-axis for tokens seen\n", + " # Create a second x-axis for examples seen\n", " ax2 = ax1.twiny() # Create a second x-axis that shares the same y-axis\n", " ax2.plot(examples_seen, train_values, alpha=0) # Invisible plot for aligning ticks\n", " ax2.set_xlabel(\"Examples seen\")\n",