diff --git a/configs/vqa/re/layoutxlm.yml b/configs/vqa/re/layoutxlm.yml index bb367f0e64db6b67a77a555c61049c6b580e23a2..8d0ffadf0607687c96b2e3ecdb9fae059af6bf51 100644 --- a/configs/vqa/re/layoutxlm.yml +++ b/configs/vqa/re/layoutxlm.yml @@ -36,7 +36,7 @@ Optimizer: lr: learning_rate: 0.00005 regularizer: - name: Const + name: L2 factor: 0.00000 PostProcess: diff --git a/configs/vqa/ser/layoutlm.yml b/configs/vqa/ser/layoutlm.yml index a635fc7dbec1e1364aca85fe660c6fe44433da5b..f29153a2e9bea44f758a38558aa4c45a25f53213 100644 --- a/configs/vqa/ser/layoutlm.yml +++ b/configs/vqa/ser/layoutlm.yml @@ -38,7 +38,7 @@ Optimizer: epochs: *epoch_num warmup_epoch: 2 regularizer: - name: Const + name: L2 factor: 0.00000 PostProcess: diff --git a/configs/vqa/ser/layoutxlm.yml b/configs/vqa/ser/layoutxlm.yml index 1c1eac2289384990fc914f85d9f2a9233cda7440..14041eb26246f396b15f3b11754bda36609a47b9 100644 --- a/configs/vqa/ser/layoutxlm.yml +++ b/configs/vqa/ser/layoutxlm.yml @@ -38,7 +38,7 @@ Optimizer: epochs: *epoch_num warmup_epoch: 2 regularizer: - name: Const + name: L2 factor: 0.00000 PostProcess: diff --git a/ppocr/optimizer/regularizer.py b/ppocr/optimizer/regularizer.py index 83010e6544fb2a09c3fccdaa38f9f9da0c468fad..d48831fc2ea435f3fcb50161f5c778d9a1638452 100644 --- a/ppocr/optimizer/regularizer.py +++ b/ppocr/optimizer/regularizer.py @@ -29,39 +29,23 @@ class L1Decay(object): def __init__(self, factor=0.0): super(L1Decay, self).__init__() - self.regularization_coeff = factor + self.coeff = factor def __call__(self): - reg = paddle.regularizer.L1Decay(self.regularization_coeff) + reg = paddle.regularizer.L1Decay(self.coeff) return reg class L2Decay(object): """ - L2 Weight Decay Regularization, which encourages the weights to be sparse. + L2 Weight Decay Regularization, which helps to prevent the model over-fitting. Args: factor(float): regularization coeff. Default:0.0. """ def __init__(self, factor=0.0): super(L2Decay, self).__init__() - self.regularization_coeff = factor + self.coeff = factor def __call__(self): - reg = paddle.regularizer.L2Decay(self.regularization_coeff) - return reg - - -class ConstDecay(object): - """ - Const L2 Weight Decay Regularization, which encourages the weights to be sparse. - Args: - factor(float): regularization coeff. Default:0.0. - """ - - def __init__(self, factor=0.0): - super(ConstDecay, self).__init__() - self.regularization_coeff = factor - - def __call__(self): - return self.regularization_coeff + return self.coeff \ No newline at end of file