Fixed embeding mistake

This commit is contained in:
k
2026-03-05 15:13:34 -05:00
parent 8d8fb8c212
commit 98282675b0

View File

@@ -68,7 +68,7 @@ class Transformer():
def __init__(self,vocab_size,embed_size,n_heads,n_blocks,block_size): def __init__(self,vocab_size,embed_size,n_heads,n_blocks,block_size):
self.tok_embed = nn.Embedding(vocab_size,embed_size) self.tok_embed = nn.Embedding(vocab_size,embed_size)
self.pos_embed = nn.Embedding(block_size,embed_size) self.pos_embed = nn.Embedding(block_size,embed_size)
self.pos_idx = Tensor.arange(block_size, requires_grad=False).sin() self.pos_idx = Tensor.arange(block_size, requires_grad=False)
self.blocks = [Block(embed_size,n_heads,i%4==0) for i in range(n_blocks)] self.blocks = [Block(embed_size,n_heads,i%4==0) for i in range(n_blocks)]
self.norm = nn.RMSNorm(embed_size) self.norm = nn.RMSNorm(embed_size)