From 98282675b073e7f045be5bc7f163fa792f70903b Mon Sep 17 00:00:00 2001 From: k Date: Thu, 5 Mar 2026 15:13:34 -0500 Subject: [PATCH] Fixed embeding mistake --- model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model.py b/model.py index e484430..95b7678 100644 --- a/model.py +++ b/model.py @@ -68,7 +68,7 @@ class Transformer(): def __init__(self,vocab_size,embed_size,n_heads,n_blocks,block_size): self.tok_embed = nn.Embedding(vocab_size,embed_size) self.pos_embed = nn.Embedding(block_size,embed_size) - self.pos_idx = Tensor.arange(block_size, requires_grad=False).sin() + self.pos_idx = Tensor.arange(block_size, requires_grad=False) self.blocks = [Block(embed_size,n_heads,i%4==0) for i in range(n_blocks)] self.norm = nn.RMSNorm(embed_size)