added dropout to ffn

This commit is contained in:
k
2026-01-06 21:26:51 -05:00
parent 957aad2239
commit 3b590b3ce7

View File

@@ -15,6 +15,7 @@ class MultiHeadAttention:
k = k.view(B, T, self.n_heads, self.head_size).transpose(1, 2)
v = v.view(B, T, self.n_heads, self.head_size).transpose(1, 2)
#B H T S
#TODO attention free transformer
out = q.scaled_dot_product_attention(k,v,is_causal=True,dropout_p=0.01)
out = out.transpose(1,2).view(B,T,C)
@@ -34,7 +35,7 @@ class FeedForwardNetwork:
self.down = nn.Linear(hidden_size,embed_size,bias=False)
def __call__(self,x):
x = self.norm(x)
return self.down(self.gate(x).silu() * self.up(x))
return self.down(self.gate(x).silu() * self.up(x)).dropout(0.01)
def cast(self,dtype):
self.gate.weight = self.gate.weight.cast(dtype)
self.up.weight = self.up.weight.cast(dtype)