diff --git a/ch02/01_main-chapter-code/ch02.ipynb b/ch02/01_main-chapter-code/ch02.ipynb index 143b977..3b2e7db 100644 --- a/ch02/01_main-chapter-code/ch02.ipynb +++ b/ch02/01_main-chapter-code/ch02.ipynb @@ -1788,7 +1788,10 @@ ], "source": [ "token_embeddings = token_embedding_layer(inputs)\n", - "print(token_embeddings.shape)" + "print(token_embeddings.shape)\n", + "\n", + "# uncomment & execute the following line to see how the embeddings look like\n", + "# print(token_embedding)" ] }, { @@ -1807,7 +1810,10 @@ "outputs": [], "source": [ "context_length = max_length\n", - "pos_embedding_layer = torch.nn.Embedding(context_length, output_dim)" + "pos_embedding_layer = torch.nn.Embedding(context_length, output_dim)\n", + "\n", + "# uncomment & execute the following line to see how the embedding layer weights look like\n", + "# print(pos_embedding_layer.weight)" ] }, { @@ -1826,7 +1832,10 @@ ], "source": [ "pos_embeddings = pos_embedding_layer(torch.arange(max_length))\n", - "print(pos_embeddings.shape)" + "print(pos_embeddings.shape)\n", + "\n", + "# uncomment & execute the following line to see how the embeddings look like\n", + "# print(pos_embeddings)" ] }, { @@ -1853,7 +1862,10 @@ ], "source": [ "input_embeddings = token_embeddings + pos_embeddings\n", - "print(input_embeddings.shape)" + "print(input_embeddings.shape)\n", + "\n", + "# uncomment & execute the following line to see how the embeddings look like\n", + "# print(input_embeddings)" ] }, {