未验证 提交 c71fff8c 编写于 作者: F FDInSky 提交者: GitHub

Add some other lr scheduler (#609)

* test=master add some other lr scheduler

* test=master fix something and validate them
上级 df3c8cd7
...@@ -39,12 +39,18 @@ class PiecewiseDecay(object): ...@@ -39,12 +39,18 @@ class PiecewiseDecay(object):
Multi step learning rate decay Multi step learning rate decay
Args: Args:
gamma (float): decay factor gamma (float | list): decay factor
milestones (list): steps at which to decay learning rate milestones (list): steps at which to decay learning rate
""" """
def __init__(self, gamma=0.1, milestones=[60000, 80000], values=None): def __init__(self, gamma=[0.1, 0.1], milestones=[60000, 80000],
values=None):
super(PiecewiseDecay, self).__init__() super(PiecewiseDecay, self).__init__()
if type(gamma) is not list:
self.gamma = []
for i in range(len(milestones)):
self.gamma.append(gamma / 10**i)
else:
self.gamma = gamma self.gamma = gamma
self.milestones = milestones self.milestones = milestones
self.values = values self.values = values
...@@ -54,13 +60,56 @@ class PiecewiseDecay(object): ...@@ -54,13 +60,56 @@ class PiecewiseDecay(object):
return fluid.layers.piecewise_decay(self.milestones, self.values) return fluid.layers.piecewise_decay(self.milestones, self.values)
assert base_lr is not None, "either base LR or values should be provided" assert base_lr is not None, "either base LR or values should be provided"
values = [base_lr] values = [base_lr]
lr = base_lr for g in self.gamma:
for _ in self.milestones: new_lr = base_lr * g
lr *= self.gamma values.append(new_lr)
values.append(lr)
return fluid.layers.piecewise_decay(self.milestones, values) return fluid.layers.piecewise_decay(self.milestones, values)
@serializable
class PolynomialDecay(object):
"""
Applies polynomial decay to the initial learning rate.
Args:
max_iter (int) – The learning rate decay steps.
end_lr(float) – End learning rate.
power (float) – Polynomial attenuation coefficient
"""
def __init__(self, max_iter=180000, end_lr=0.0001, power=1.0):
super(PolynomialDecay).__init__()
self.max_iter = max_iter
self.end_lr = end_lr
self.power = power
def __call__(self, base_lr=None, learning_rate=None):
assert base_lr is not None, "either base LR or values should be provided"
lr = fluid.layers.polynomial_decay(base_lr, self.max_iter, self.end_lr,
self.power)
return lr
@serializable
class ExponentialDecay(object):
"""
Applies exponential decay to the learning rate.
Args:
max_iter (int) – The learning rate decay steps.
decay_rate (float) – The learning rate decay rate.
"""
def __init__(self, max_iter, decay_rate):
super(ExponentialDecay).__init__()
self.max_iter = max_iter
self.decay_rate = decay_rate
def __call__(self, base_lr=None, learning_rate=None):
assert base_lr is not None, "either base LR or values should be provided"
lr = fluid.layers.exponential_decay(base_lr, self.max_iter,
self.decay_rate)
return lr
@serializable @serializable
class CosineDecay(object): class CosineDecay(object):
""" """
...@@ -78,6 +127,7 @@ class CosineDecay(object): ...@@ -78,6 +127,7 @@ class CosineDecay(object):
def __call__(self, base_lr=None, learning_rate=None): def __call__(self, base_lr=None, learning_rate=None):
assert base_lr is not None, "either base LR or values should be provided" assert base_lr is not None, "either base LR or values should be provided"
lr = fluid.layers.cosine_decay(base_lr, 1, self.max_iters) lr = fluid.layers.cosine_decay(base_lr, 1, self.max_iters)
return lr
@serializable @serializable
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册