Fixed embeding mistake
This commit is contained in:
parent
8d8fb8c212
commit
98282675b0
1 changed files with 1 additions and 1 deletions
2
model.py
2
model.py
|
|
@ -68,7 +68,7 @@ class Transformer():
|
|||
def __init__(self,vocab_size,embed_size,n_heads,n_blocks,block_size):
|
||||
self.tok_embed = nn.Embedding(vocab_size,embed_size)
|
||||
self.pos_embed = nn.Embedding(block_size,embed_size)
|
||||
self.pos_idx = Tensor.arange(block_size, requires_grad=False).sin()
|
||||
self.pos_idx = Tensor.arange(block_size, requires_grad=False)
|
||||
|
||||
self.blocks = [Block(embed_size,n_heads,i%4==0) for i in range(n_blocks)]
|
||||
self.norm = nn.RMSNorm(embed_size)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue