-
Notifications
You must be signed in to change notification settings - Fork 7
/
Copy pathOptimizer.py
24 lines (22 loc) · 906 Bytes
/
Optimizer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
class TransformerOptimizer(object):
"""Optimizer in 'Attentions is all you need'
Args:
optimizer: Adam optimizer specified in the paper
warmup_steps: warmup steps for updating learning rate as in the paper
d_model: dimension of Transformer model
"""
def __init__(self, optimizer, warmup_steps, d_model=512):
self.base_optim = optimizer
self.warmup_steps = warmup_steps
self.init_lr = d_model**(-0.5)
self.lr = 0
self._step = 0
def step(self):
self._step += 1
if self._step <= self.warmup_steps:
self.lr = self.init_lr*self._step*self.warmup_steps**(-1.5)
else:
self.lr = self.init_lr*self._step**(-0.5)
for param_group in self.base_optim.param_groups:
param_group['lr'] = self.lr
self.base_optim.step()