From 7d92267170b7f0553aefff63a3080718cacd3862 Mon Sep 17 00:00:00 2001 From: casinca <47400729+casinca@users.noreply.github.com> Date: Sun, 9 Nov 2025 21:22:52 +0100 Subject: [PATCH] fix(GatedDeltaNet): Init param A from log of a uniform distrib (#906) --- ch04/08_deltanet/README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ch04/08_deltanet/README.md b/ch04/08_deltanet/README.md index ca50fe5..257e15f 100644 --- a/ch04/08_deltanet/README.md +++ b/ch04/08_deltanet/README.md @@ -166,7 +166,8 @@ class GatedDeltaNet(nn.Module): # A_log + W_alpha(x) + dt_bias self.W_alpha = nn.Linear(d_in, num_heads, bias=False) self.dt_bias = nn.Parameter(torch.ones(num_heads)) - self.A_log = nn.Parameter(torch.zeros(num_heads)) + A_init = torch.empty(num_heads).uniform_(0, 16) + self.A_log = nn.Parameter(torch.log(A_init)) # We could implement this as # W_alpha = nn.Linear(d_in, num_heads, bias=True) # but the bias is separate for interpretability and