From 98ec9927890c1d6648783ba7b1107f212f3afde0 Mon Sep 17 00:00:00 2001 From: huzhiqiang <912790387@qq.com> Date: Thu, 10 Oct 2019 20:44:47 +0800 Subject: [PATCH] modify WeightNormParamAttr English doc test=develop (#20218) * modify WeightNormParamAttr English doc test=develop --- paddle/fluid/API.spec | 2 +- python/paddle/fluid/param_attr.py | 42 +++++++++++++++++++++---------- 2 files changed, 30 insertions(+), 14 deletions(-) diff --git a/paddle/fluid/API.spec b/paddle/fluid/API.spec index 5208ad17d8..cc6b03f982 100644 --- a/paddle/fluid/API.spec +++ b/paddle/fluid/API.spec @@ -1103,7 +1103,7 @@ paddle.fluid.CUDAPinnedPlace ('paddle.fluid.core_avx.CUDAPinnedPlace', ('documen paddle.fluid.CUDAPinnedPlace.__init__ __init__(self: paddle.fluid.core_avx.CUDAPinnedPlace) -> None paddle.fluid.ParamAttr ('paddle.fluid.param_attr.ParamAttr', ('document', '7b5bfe856689036b8fffb71af1558e5c')) paddle.fluid.ParamAttr.__init__ (ArgSpec(args=['self', 'name', 'initializer', 'learning_rate', 'regularizer', 'trainable', 'gradient_clip', 'do_model_average'], varargs=None, keywords=None, defaults=(None, None, 1.0, None, True, None, True)), ('document', '6adf97f83acf6453d4a6a4b1070f3754')) -paddle.fluid.WeightNormParamAttr ('paddle.fluid.param_attr.WeightNormParamAttr', ('document', 'b5ae1698ea72d5a9428000b916a67379')) +paddle.fluid.WeightNormParamAttr ('paddle.fluid.param_attr.WeightNormParamAttr', ('document', 'ea029ec9e0dea75f136211c433154f25')) paddle.fluid.WeightNormParamAttr.__init__ (ArgSpec(args=['self', 'dim', 'name', 'initializer', 'learning_rate', 'regularizer', 'trainable', 'gradient_clip', 'do_model_average'], varargs=None, keywords=None, defaults=(None, None, None, 1.0, None, True, None, False)), ('document', '6adf97f83acf6453d4a6a4b1070f3754')) paddle.fluid.DataFeeder ('paddle.fluid.data_feeder.DataFeeder', ('document', 'd9e64be617bd5f49dbb08ac2bc8665e6')) paddle.fluid.DataFeeder.__init__ (ArgSpec(args=['self', 'feed_list', 'place', 'program'], varargs=None, keywords=None, defaults=(None,)), ('document', '6adf97f83acf6453d4a6a4b1070f3754')) diff --git a/python/paddle/fluid/param_attr.py b/python/paddle/fluid/param_attr.py index 94c9af3d1e..0975540514 100644 --- a/python/paddle/fluid/param_attr.py +++ b/python/paddle/fluid/param_attr.py @@ -183,7 +183,7 @@ class ParamAttr(object): class WeightNormParamAttr(ParamAttr): """ - Used for weight Norm. Weight Norm is a reparameterization of the weight vectors + Parameter of weight Norm. Weight Norm is a reparameterization of the weight vectors in a neural network that decouples the magnitude of those weight vectors from their direction. Weight Norm has been implemented as discussed in this paper: `Weight Normalization: A Simple Reparameterization to Accelerate @@ -191,17 +191,27 @@ class WeightNormParamAttr(ParamAttr): `_. Args: - dim(int): Dimension over which to compute the norm. Default None. - name(str): The parameter's name. Default None. - initializer(Initializer): The method to initial this parameter. Default None. - learning_rate(float): The parameter's learning rate. The learning rate when - optimize is :math:`global\_lr * parameter\_lr * scheduler\_factor`. + dim(int): Dimension over which to compute the norm. Dim is a non-negative + number which is less than the rank of weight Tensor. For Example, dim can + be choosed from 0, 1, 2, 3 for convolution whose weight shape is [cout, cin, kh, kw] + and rank is 4. Default None, meaning that all elements will be normalized. + name(str, optional): The parameter's name. Default None, meaning that the name would + be created automatically. Please refer to :ref:`api_guide_Name` for more details. + initializer(Initializer): The method to initialize this parameter, such as + ``initializer = fluid.initializer.ConstantInitializer(1.0)``. Default None, + meaning that the weight parameter is initialized by Xavier initializer, and + the bias parameter is initialized by 0. + learning_rate(float32): The parameter's learning rate when + optimizer is :math:`global\_lr * parameter\_lr * scheduler\_factor`. Default 1.0. - regularizer(WeightDecayRegularizer): Regularization factor. Default None. - trainable(bool): Whether this parameter is trainable. Default True. - gradient_clip(BaseGradientClipAttr): The method to clip this parameter's - gradient. Default None. - do_model_average(bool): Whether this parameter should do model average. + regularizer(WeightDecayRegularizer): Regularization factor, such as + ``regularizer = fluid.regularizer.L2DecayRegularizer(regularization_coeff=0.1)``. + Default None, meaning that there is no regularization. + trainable(bool, optional): Whether this parameter is trainable. Default True. + gradient_clip: The method to clip this parameter's gradient, such as + ``gradient_clip = fluid.clip.GradientClipByNorm(clip_norm=2.0))`` . + Default None, meaning that there is no gradient clip. + do_model_average(bool, optional): Whether this parameter should do model average. Default False. Examples: @@ -212,8 +222,14 @@ class WeightNormParamAttr(ParamAttr): fc = fluid.layers.fc(input=data, size=1000, param_attr=fluid.WeightNormParamAttr( - dim=None, - name='weight_norm_param')) + dim=None, + name='weight_norm_param', + initializer=fluid.initializer.ConstantInitializer(1.0), + learning_rate=1.0, + regularizer=fluid.regularizer.L2DecayRegularizer(regularization_coeff=0.1), + trainable=True, + gradient_clip=fluid.clip.GradientClipByNorm(clip_norm=2.0), + do_model_average=False)) """ # List to record the parameters reparameterized by weight normalization. -- GitLab