From dfa50c3c5008d1a18151f71db828f25c26062c94 Mon Sep 17 00:00:00 2001 From: littletomatodonkey Date: Mon, 28 Sep 2020 02:07:31 +0000 Subject: [PATCH] fix reg --- python/paddle/regularizer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/paddle/regularizer.py b/python/paddle/regularizer.py index b3f483fd891..26fcb944944 100644 --- a/python/paddle/regularizer.py +++ b/python/paddle/regularizer.py @@ -21,7 +21,7 @@ class L1Decay(fluid.regularizer.L1Decay): """ Implement the L1 Weight Decay Regularization, which encourages the weights to be sparse. - It can be set in :ref:`api_fluid_ParamAttr` or ``optimizer`` (such as :ref:`api_paddle_optimizer_Momentum` ). + It can be set in :ref:`api_paddle_ParamAttr` or ``optimizer`` (such as :ref:`api_paddle_optimizer_Momentum` ). When set in ``ParamAttr`` , it only takes effect for trainable parameters in this layer. When set in ``optimizer`` , it takes effect for all trainable parameters. When set together, ``ParamAttr`` has higher priority than ``optimizer`` , which means that for a trainable parameter, if regularizer is defined @@ -85,7 +85,7 @@ class L2Decay(fluid.regularizer.L2Decay): """ Implement the L2 Weight Decay Regularization, which helps to prevent the model over-fitting. - It can be set in :ref:`api_fluid_ParamAttr` or ``optimizer`` (such as :ref:`api_paddle_optimizer_Momentum` ). + It can be set in :ref:`api_paddle_ParamAttr` or ``optimizer`` (such as :ref:`api_paddle_optimizer_Momentum` ). When set in ``ParamAttr`` , it only takes effect for trainable parameters in this layer. When set in ``optimizer`` , it takes effect for all trainable parameters. When set together, ``ParamAttr`` has higher priority than ``optimizer`` , which means that for a trainable parameter, if regularizer is defined -- GitLab