use torch no grad for loss (#119)

This commit is contained in:
Sebastian Raschka 2024-04-14 09:13:07 -04:00 committed by GitHub
parent a3a5574758
commit 155ac03f61
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -1135,8 +1135,10 @@
"\n",
"\n",
"torch.manual_seed(123) # For reproducibility due to the shuffling in the data loader\n",
"train_loss = calc_loss_loader(train_loader, model, device)\n",
"val_loss = calc_loss_loader(val_loader, model, device)\n",
"\n",
"with torch.no_grad(): # Disable gradient tracking for efficiency because we are not training, yet\n",
" train_loss = calc_loss_loader(train_loader, model, device)\n",
" val_loss = calc_loss_loader(val_loader, model, device)\n",
"\n",
"print(\"Training loss:\", train_loss)\n",
"print(\"Validation loss:\", val_loss)"