From 168bbd6d3c310f284a1c8ad397846bd845495890 Mon Sep 17 00:00:00 2001 From: juncaipeng <52520497+juncaipeng@users.noreply.github.com> Date: Wed, 25 Sep 2019 16:48:00 +0800 Subject: [PATCH] Update L1Decay and L2Decay (#1367) * modify L1DecayRegularizer, L1Decay, L2DecayRegularizer and L2Decay, test=develop --- .../regularizer_cn/L1DecayRegularizer_cn.rst | 30 ++----------------- .../api_cn/regularizer_cn/L1Decay_cn.rst | 2 -- .../regularizer_cn/L2DecayRegularizer_cn.rst | 29 ++---------------- .../api_cn/regularizer_cn/L2Decay_cn.rst | 2 -- 4 files changed, 4 insertions(+), 59 deletions(-) diff --git a/doc/fluid/api_cn/regularizer_cn/L1DecayRegularizer_cn.rst b/doc/fluid/api_cn/regularizer_cn/L1DecayRegularizer_cn.rst index 50d65fa4b..720676825 100644 --- a/doc/fluid/api_cn/regularizer_cn/L1DecayRegularizer_cn.rst +++ b/doc/fluid/api_cn/regularizer_cn/L1DecayRegularizer_cn.rst @@ -5,33 +5,7 @@ L1DecayRegularizer .. py:class:: paddle.fluid.regularizer.L1DecayRegularizer(regularization_coeff=0.0) -L1DecayRegularizer实现L1权重衰减正则化,用于模型训练,使得权重矩阵稀疏。 +**注意:paddle.fluid.regularizer.L1DecayRegularizer是paddle.fluid.regularizer.L1Decay的别名,推荐使用paddle.fluid.regularizer.L1Decay。** -具体实现中,L1权重衰减正则化的计算公式如下: +详见 :ref:`cn_api_fluid_regularizer_L1Decay` 接口的使用文档。 -.. math:: - \\L1WeightDecay=reg\_coeff∗sign(parameter)\\ - -参数: - - **regularization_coeff** (float) – L1正则化系数,默认值为0.0。 - -**代码示例** - -.. code-block:: python - - import paddle.fluid as fluid - - main_prog = fluid.Program() - startup_prog = fluid.Program() - with fluid.program_guard(main_prog, startup_prog): - data = fluid.layers.data(name='image', shape=[3, 28, 28], dtype='float32') - label = fluid.layers.data(name='label', shape=[1], dtype='int64') - hidden = fluid.layers.fc(input=data, size=128, act='relu') - prediction = fluid.layers.fc(input=hidden, size=10, act='softmax') - loss = fluid.layers.cross_entropy(input=prediction, label=label) - avg_loss = fluid.layers.mean(loss) - optimizer = fluid.optimizer.Adagrad( - learning_rate=1e-4, - regularization=fluid.regularizer.L1DecayRegularizer( - regularization_coeff=0.1)) - optimizer.minimize(avg_loss) diff --git a/doc/fluid/api_cn/regularizer_cn/L1Decay_cn.rst b/doc/fluid/api_cn/regularizer_cn/L1Decay_cn.rst index ad0bf8869..9cf0766e5 100644 --- a/doc/fluid/api_cn/regularizer_cn/L1Decay_cn.rst +++ b/doc/fluid/api_cn/regularizer_cn/L1Decay_cn.rst @@ -6,8 +6,6 @@ L1Decay .. py:attribute:: paddle.fluid.regularizer.L1Decay(regularization_coeff=0.0) -``L1Decay`` 是 ``L1DecayRegularizer`` 的别名。 - L1Decay实现L1权重衰减正则化,用于模型训练,使得权重矩阵稀疏。 具体实现中,L1权重衰减正则化的计算公式如下: diff --git a/doc/fluid/api_cn/regularizer_cn/L2DecayRegularizer_cn.rst b/doc/fluid/api_cn/regularizer_cn/L2DecayRegularizer_cn.rst index f2961c26a..4c6d2a4d2 100644 --- a/doc/fluid/api_cn/regularizer_cn/L2DecayRegularizer_cn.rst +++ b/doc/fluid/api_cn/regularizer_cn/L2DecayRegularizer_cn.rst @@ -5,33 +5,8 @@ L2DecayRegularizer .. py:class:: paddle.fluid.regularizer.L2DecayRegularizer(regularization_coeff=0.0) -L2DecayRegularizer实现L2权重衰减正则化,用于模型训练,有助于防止模型对训练数据过拟合。 +**注意:paddle.fluid.regularizer.L2DecayRegularizer是paddle.fluid.regularizer.L2Decay的别名,推荐使用paddle.fluid.regularizer.L2Decay。** -具体实现中,L2权重衰减正则化的计算公式如下: +详见 :ref:`cn_api_fluid_regularizer_L2Decay` 接口的使用文档。 -.. math:: - \\L2WeightDecay=reg\_coeff*parameter\\ -参数: - - **regularization_coeff** (float) – 正则化系数,默认值为0.0。 - -**代码示例** - -.. code-block:: python - - import paddle.fluid as fluid - - main_prog = fluid.Program() - startup_prog = fluid.Program() - with fluid.program_guard(main_prog, startup_prog): - data = fluid.layers.data(name='image', shape=[3, 28, 28], dtype='float32') - label = fluid.layers.data(name='label', shape=[1], dtype='int64') - hidden = fluid.layers.fc(input=data, size=128, act='relu') - prediction = fluid.layers.fc(input=hidden, size=10, act='softmax') - loss = fluid.layers.cross_entropy(input=prediction, label=label) - avg_loss = fluid.layers.mean(loss) - optimizer = fluid.optimizer.Adagrad( - learning_rate=1e-4, - regularization=fluid.regularizer.L2DecayRegularizer( - regularization_coeff=0.1)) - optimizer.minimize(avg_loss) diff --git a/doc/fluid/api_cn/regularizer_cn/L2Decay_cn.rst b/doc/fluid/api_cn/regularizer_cn/L2Decay_cn.rst index 7966e36c6..1de52411d 100644 --- a/doc/fluid/api_cn/regularizer_cn/L2Decay_cn.rst +++ b/doc/fluid/api_cn/regularizer_cn/L2Decay_cn.rst @@ -5,8 +5,6 @@ L2Decay .. py:attribute:: paddle.fluid.regularizer.L2Decay -``L2Decay`` 是 ``L2DecayRegularizer`` 的别名。 - L2Decay实现L2权重衰减正则化,用于模型训练,有助于防止模型对训练数据过拟合。 具体实现中,L2权重衰减正则化的计算公式如下: -- GitLab