未验证 提交 4a3a2d6b 编写于 作者: G guguguzi 提交者: GitHub

Add api MultiplicativeDecay (#38250)

* delete the modification of dygraph

* CI

* check CI

* modify the retrun value of get_lr
上级 c8fbd3cd
...@@ -205,6 +205,13 @@ def lambda_lr(epoch_num, learning_rate, lr_lambda, verbose=False): ...@@ -205,6 +205,13 @@ def lambda_lr(epoch_num, learning_rate, lr_lambda, verbose=False):
return learning_rate * lr_lambda(epoch_num) return learning_rate * lr_lambda(epoch_num)
def multiplicative_lr(epoch_num, learning_rate, lr_lambda, verbose=False):
latest_lr = learning_rate
for i in range(epoch_num):
latest_lr = latest_lr * lr_lambda(i + 1)
return latest_lr
def piecewise_lr(epoch_num, boundaries, values, verbose=False): def piecewise_lr(epoch_num, boundaries, values, verbose=False):
assert len(boundaries) + 1 == len(values) assert len(boundaries) + 1 == len(values)
for i in range(len(boundaries)): for i in range(len(boundaries)):
...@@ -519,6 +526,10 @@ class TestLRScheduler(unittest.TestCase): ...@@ -519,6 +526,10 @@ class TestLRScheduler(unittest.TestCase):
"learning_rate": 0.5, "learning_rate": 0.5,
"lr_lambda": lambda x: 0.95**x, "lr_lambda": lambda x: 0.95**x,
"verbose": True "verbose": True
}), (multiplicative_lr, paddle.optimizer.lr.MultiplicativeDecay, {
"learning_rate": 0.5,
"lr_lambda": lambda x: 0.95,
"verbose": True
}), (cosine_annealing_lr, paddle.optimizer.lr.CosineAnnealingDecay, { }), (cosine_annealing_lr, paddle.optimizer.lr.CosineAnnealingDecay, {
"learning_rate": 0.5, "learning_rate": 0.5,
"T_max": 10, "T_max": 10,
......
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册