Compare commits

...

1 Commits

Author SHA1 Message Date
k
98282675b0 Fixed embeding mistake 2026-03-05 15:13:34 -05:00

View File

@@ -68,7 +68,7 @@ class Transformer():
def __init__(self,vocab_size,embed_size,n_heads,n_blocks,block_size):
self.tok_embed = nn.Embedding(vocab_size,embed_size)
self.pos_embed = nn.Embedding(block_size,embed_size)
self.pos_idx = Tensor.arange(block_size, requires_grad=False).sin()
self.pos_idx = Tensor.arange(block_size, requires_grad=False)
self.blocks = [Block(embed_size,n_heads,i%4==0) for i in range(n_blocks)]
self.norm = nn.RMSNorm(embed_size)