Fixed embeding mistake
This commit is contained in:
2
model.py
2
model.py
@@ -68,7 +68,7 @@ class Transformer():
|
|||||||
def __init__(self,vocab_size,embed_size,n_heads,n_blocks,block_size):
|
def __init__(self,vocab_size,embed_size,n_heads,n_blocks,block_size):
|
||||||
self.tok_embed = nn.Embedding(vocab_size,embed_size)
|
self.tok_embed = nn.Embedding(vocab_size,embed_size)
|
||||||
self.pos_embed = nn.Embedding(block_size,embed_size)
|
self.pos_embed = nn.Embedding(block_size,embed_size)
|
||||||
self.pos_idx = Tensor.arange(block_size, requires_grad=False).sin()
|
self.pos_idx = Tensor.arange(block_size, requires_grad=False)
|
||||||
|
|
||||||
self.blocks = [Block(embed_size,n_heads,i%4==0) for i in range(n_blocks)]
|
self.blocks = [Block(embed_size,n_heads,i%4==0) for i in range(n_blocks)]
|
||||||
self.norm = nn.RMSNorm(embed_size)
|
self.norm = nn.RMSNorm(embed_size)
|
||||||
|
|||||||
Reference in New Issue
Block a user