fixed cast in ffn

This commit is contained in:
k
2026-01-06 19:40:45 -05:00
parent d6b9f45fcc
commit c4e5e332ba

View File

@@ -10,19 +10,20 @@ class MultiHeadAttention:
class FeedForwardNetwork:
def __init__(self,embeding_size,ratio=(8/3)):
hidden_size = int(embeding_size*ratio)
self.norm = nn.RMSNorm(embeding_size)
self.gate = nn.Linear(embeding_size,hidden_size,bias=False)
self.up = nn.Linear(embeding_size, hidden_size,bias=False)
self.down = nn.Linear(hidden_size,embeding_size,bias=False)
def __init__(self,embed_size,ratio=(8/3)):
hidden_size = int(embed_size*ratio)
self.norm = nn.RMSNorm(embed_size)
self.gate = nn.Linear(embed_size,hidden_size,bias=False)
self.up = nn.Linear(embed_size, hidden_size,bias=False)
self.down = nn.Linear(hidden_size,embed_size,bias=False)
def __call__(self,x):
x = self.norm(x)
return self.down(self.gate(x).silu() * self.up(x))
def cast(self,dtype):
self.gate.weight = gate.weight.cast(dtype)
self.up.weight = up.weight.cast(dtype)
self.down.weight = down.weight.cast(dtype)
self.gate.weight = self.gate.weight.cast(dtype)
self.up.weight = self.up.weight.cast(dtype)
self.down.weight = self.down.weight.cast(dtype)
return self
class Block:
def __init__(self):