提交 9ecfc348 编写于 作者: 文幕地方's avatar 文幕地方

rm const decay

上级 d8571bdb
......@@ -36,7 +36,7 @@ Optimizer:
lr:
learning_rate: 0.00005
regularizer:
name: Const
name: L2
factor: 0.00000
PostProcess:
......
......@@ -38,7 +38,7 @@ Optimizer:
epochs: *epoch_num
warmup_epoch: 2
regularizer:
name: Const
name: L2
factor: 0.00000
PostProcess:
......
......@@ -38,7 +38,7 @@ Optimizer:
epochs: *epoch_num
warmup_epoch: 2
regularizer:
name: Const
name: L2
factor: 0.00000
PostProcess:
......
......@@ -29,39 +29,23 @@ class L1Decay(object):
def __init__(self, factor=0.0):
super(L1Decay, self).__init__()
self.regularization_coeff = factor
self.coeff = factor
def __call__(self):
reg = paddle.regularizer.L1Decay(self.regularization_coeff)
reg = paddle.regularizer.L1Decay(self.coeff)
return reg
class L2Decay(object):
"""
L2 Weight Decay Regularization, which encourages the weights to be sparse.
L2 Weight Decay Regularization, which helps to prevent the model over-fitting.
Args:
factor(float): regularization coeff. Default:0.0.
"""
def __init__(self, factor=0.0):
super(L2Decay, self).__init__()
self.regularization_coeff = factor
self.coeff = factor
def __call__(self):
reg = paddle.regularizer.L2Decay(self.regularization_coeff)
return reg
class ConstDecay(object):
"""
Const L2 Weight Decay Regularization, which encourages the weights to be sparse.
Args:
factor(float): regularization coeff. Default:0.0.
"""
def __init__(self, factor=0.0):
super(ConstDecay, self).__init__()
self.regularization_coeff = factor
def __call__(self):
return self.regularization_coeff
return self.coeff
\ No newline at end of file
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册