CosineAnnealing with optimizer Group
This commit is contained in:
28
optm.py
Normal file
28
optm.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from tinygrad import Tensor
|
||||
import math
|
||||
|
||||
class CosineLR:
|
||||
def __init__(self,optm,totalSteps,minlr):
|
||||
self.optm = optm
|
||||
self.maxlr = optm.lr
|
||||
self.minlr = minlr
|
||||
self.totalSteps = totalSteps
|
||||
self.steps = 0
|
||||
|
||||
def step(self):
|
||||
self.optm.lr = self.minlr + 0.5 * (self.maxlr - self.minlr) * (1 + math.cos((step / self.totalSteps) * math.pi))
|
||||
self.optm.step()
|
||||
self.steps += 1
|
||||
|
||||
def zero_grad(self):
|
||||
self.optm.zero_grad()
|
||||
|
||||
|
||||
def llmOptimizer(params,steps,minlr):
|
||||
muon_params = [p for p in params if len(p.shape) >= 2]
|
||||
adamw_params = [p for p in params if len(p.shape) < 2]
|
||||
|
||||
o1 = nn.optim.Muon(muon_params, lr=hypr["starting_lr"])
|
||||
o2 = nn.optim.AdamW(adamw_params, lr=hypr["starting_lr"])
|
||||
optimizer = nn.optim.OptimizerGroup([o1,o2])
|
||||
return CosineLR(optimizer,steps,minlr)
|
||||
Reference in New Issue
Block a user