use blocksize in positional embedding

This commit is contained in:
rasbt 2024-01-15 08:15:33 -06:00
parent 9e85f13ba9
commit dfe2c3b46f

View File

@ -88,7 +88,7 @@
"\n",
"\n",
"token_embedding_layer = nn.Embedding(vocab_size, output_dim)\n",
"pos_embedding_layer = nn.Embedding(vocab_size, output_dim)\n",
"pos_embedding_layer = torch.nn.Embedding(block_size, output_dim)\n",
"\n",
"max_length = 4\n",
"dataloader = create_dataloader(raw_text, batch_size=8, max_length=max_length, stride=5)"
@ -324,6 +324,14 @@
"\n",
"print(\"context_vecs.shape:\", context_vecs.shape)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "f8d4be84-28bb-41d5-996c-4936acffd411",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {