未验证 提交 e2d6d6c8 编写于 作者: D dyning 提交者: GitHub

Merge pull request #350 from littletomatodonkey/add_l2_decay_int

add l2 decay interface
...@@ -15,6 +15,8 @@ from __future__ import absolute_import ...@@ -15,6 +15,8 @@ from __future__ import absolute_import
from __future__ import division from __future__ import division
from __future__ import print_function from __future__ import print_function
import paddle.fluid as fluid import paddle.fluid as fluid
from paddle.fluid.regularizer import L2Decay
from ppocr.utils.utility import initial_logger from ppocr.utils.utility import initial_logger
logger = initial_logger() logger = initial_logger()
...@@ -31,6 +33,8 @@ def AdamDecay(params, parameter_list=None): ...@@ -31,6 +33,8 @@ def AdamDecay(params, parameter_list=None):
base_lr = params['base_lr'] base_lr = params['base_lr']
beta1 = params['beta1'] beta1 = params['beta1']
beta2 = params['beta2'] beta2 = params['beta2']
l2_decay = params.get("l2_decay", 0.0)
if 'decay' in params: if 'decay' in params:
params = params['decay'] params = params['decay']
decay_mode = params['function'] decay_mode = params['function']
...@@ -47,5 +51,6 @@ def AdamDecay(params, parameter_list=None): ...@@ -47,5 +51,6 @@ def AdamDecay(params, parameter_list=None):
learning_rate=base_lr, learning_rate=base_lr,
beta1=beta1, beta1=beta1,
beta2=beta2, beta2=beta2,
regularization=L2Decay(regularization_coeff=l2_decay),
parameter_list=parameter_list) parameter_list=parameter_list)
return optimizer return optimizer
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册