提交 9ecfc348 编写于 作者: 文幕地方's avatar 文幕地方

rm const decay

上级 d8571bdb
...@@ -36,7 +36,7 @@ Optimizer: ...@@ -36,7 +36,7 @@ Optimizer:
lr: lr:
learning_rate: 0.00005 learning_rate: 0.00005
regularizer: regularizer:
name: Const name: L2
factor: 0.00000 factor: 0.00000
PostProcess: PostProcess:
......
...@@ -38,7 +38,7 @@ Optimizer: ...@@ -38,7 +38,7 @@ Optimizer:
epochs: *epoch_num epochs: *epoch_num
warmup_epoch: 2 warmup_epoch: 2
regularizer: regularizer:
name: Const name: L2
factor: 0.00000 factor: 0.00000
PostProcess: PostProcess:
......
...@@ -38,7 +38,7 @@ Optimizer: ...@@ -38,7 +38,7 @@ Optimizer:
epochs: *epoch_num epochs: *epoch_num
warmup_epoch: 2 warmup_epoch: 2
regularizer: regularizer:
name: Const name: L2
factor: 0.00000 factor: 0.00000
PostProcess: PostProcess:
......
...@@ -29,39 +29,23 @@ class L1Decay(object): ...@@ -29,39 +29,23 @@ class L1Decay(object):
def __init__(self, factor=0.0): def __init__(self, factor=0.0):
super(L1Decay, self).__init__() super(L1Decay, self).__init__()
self.regularization_coeff = factor self.coeff = factor
def __call__(self): def __call__(self):
reg = paddle.regularizer.L1Decay(self.regularization_coeff) reg = paddle.regularizer.L1Decay(self.coeff)
return reg return reg
class L2Decay(object): class L2Decay(object):
""" """
L2 Weight Decay Regularization, which encourages the weights to be sparse. L2 Weight Decay Regularization, which helps to prevent the model over-fitting.
Args: Args:
factor(float): regularization coeff. Default:0.0. factor(float): regularization coeff. Default:0.0.
""" """
def __init__(self, factor=0.0): def __init__(self, factor=0.0):
super(L2Decay, self).__init__() super(L2Decay, self).__init__()
self.regularization_coeff = factor self.coeff = factor
def __call__(self): def __call__(self):
reg = paddle.regularizer.L2Decay(self.regularization_coeff) return self.coeff
return reg \ No newline at end of file
class ConstDecay(object):
"""
Const L2 Weight Decay Regularization, which encourages the weights to be sparse.
Args:
factor(float): regularization coeff. Default:0.0.
"""
def __init__(self, factor=0.0):
super(ConstDecay, self).__init__()
self.regularization_coeff = factor
def __call__(self):
return self.regularization_coeff
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册