mirror of
https://github.com/rasbt/LLMs-from-scratch.git
synced 2025-12-02 18:10:37 +00:00
fix code comment (#834)
This commit is contained in:
parent
b6cd0a312f
commit
bfc6389fab
@ -1844,7 +1844,7 @@
|
||||
" # As in `CausalAttention`, for inputs where `num_tokens` exceeds `context_length`, \n",
|
||||
" # this will result in errors in the mask creation further below. \n",
|
||||
" # In practice, this is not a problem since the LLM (chapters 4-7) ensures that inputs \n",
|
||||
" # do not exceed `context_length` before reaching this forwar\n",
|
||||
" # do not exceed `context_length` before reaching this forward method.\n",
|
||||
"\n",
|
||||
" keys = self.W_key(x) # Shape: (b, num_tokens, d_out)\n",
|
||||
" queries = self.W_query(x)\n",
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user