mirror of
https://github.com/rasbt/LLMs-from-scratch.git
synced 2025-12-12 15:31:40 +00:00
use blocksize in positional embedding
This commit is contained in:
parent
9e85f13ba9
commit
dfe2c3b46f
@ -88,7 +88,7 @@
|
||||
"\n",
|
||||
"\n",
|
||||
"token_embedding_layer = nn.Embedding(vocab_size, output_dim)\n",
|
||||
"pos_embedding_layer = nn.Embedding(vocab_size, output_dim)\n",
|
||||
"pos_embedding_layer = torch.nn.Embedding(block_size, output_dim)\n",
|
||||
"\n",
|
||||
"max_length = 4\n",
|
||||
"dataloader = create_dataloader(raw_text, batch_size=8, max_length=max_length, stride=5)"
|
||||
@ -324,6 +324,14 @@
|
||||
"\n",
|
||||
"print(\"context_vecs.shape:\", context_vecs.shape)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "f8d4be84-28bb-41d5-996c-4936acffd411",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user