param_attr.py 11.7 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
F
fengjiayi 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
F
fengjiayi 已提交
9 10 11 12 13
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
F
update  
fengjiayi 已提交
14

15 16
from .initializer import Initializer, Xavier, Constant
from .regularizer import WeightDecayRegularizer
17
from paddle.fluid.data_feeder import check_type
Y
Yu Yang 已提交
18

19 20 21 22
__all__ = [
    'ParamAttr',
    'WeightNormParamAttr',
]
Y
Yu Yang 已提交
23

Y
Yu Yang 已提交
24

25
class ParamAttr:
C
chengduoZH 已提交
26
    """
27

28
    Note:
29
        ``gradient_clip`` of ``ParamAttr`` HAS BEEN DEPRECATED since 2.0.
30
        Please use ``need_clip`` in ``ParamAttr`` to speficiy the clip scope.
31
        There are three clipping strategies: :ref:`api_paddle_nn_ClipGradByGlobalNorm` ,
32
        :ref:`api_paddle_nn_ClipGradByNorm` , :ref:`api_paddle_nn_ClipGradByValue` .
Z
Zeng Jinle 已提交
33

34 35 36 37
    Create a object to represent the attribute of parameter. The attributes are:
    name, initializer, learning rate, regularizer, trainable, gradient clip,
    and model average.

Z
Zeng Jinle 已提交
38 39 40 41 42 43
    Parameters:
        name (str, optional): The parameter's name. Default None, meaning that the name
                would be created automatically.
        initializer (Initializer, optional): The method to initial this parameter. Default
                None, meaning that the weight parameter is initialized by Xavier initializer,
                and the bias parameter is initialized by 0.
44
        learning_rate (float, optional): The parameter's learning rate. The learning rate when
Z
Zeng Jinle 已提交
45 46
                optimize is the global learning rates times the parameter's learning rate times
                the factor of learning rate scheduler. Default 1.0.
47 48 49 50
        regularizer (WeightDecayRegularizer, optional): Regularization strategy. There are two method:
                :ref:`api_paddle_regularizer_L1Decay` , :ref:`api_paddle_regularizer_L2Decay` . If
                regularizer is also set in ``optimizer`` (such as :ref:`api_paddle_optimizer_SGD` ),
                that regularizer setting in optimizer will be ignored. Default None, meaning there is
51
                no regularization.
52 53
        trainable (bool, optional): Whether this parameter is trainable. Default True.
        do_model_average (bool, optional): Whether this parameter should do model average
54
                when model average is enabled. Only used in ExponentialMovingAverage. Default True.
55 56 57 58
        need_clip (bool, optional): Whether the parameter gradient need to be cliped in optimizer. Default is True.

    Returns:
       ParamAttr Object.
C
chengduoZH 已提交
59 60

    Examples:
61

C
chengduoZH 已提交
62 63
        .. code-block:: python

64 65 66 67 68 69 70 71
            import paddle

            weight_attr = paddle.ParamAttr(name="weight",
                                           learning_rate=0.5,
                                           regularizer=paddle.regularizer.L2Decay(1.0),
                                           trainable=True)
            print(weight_attr.name) # "weight"
            paddle.nn.Linear(3, 4, weight_attr=weight_attr)
C
chengduoZH 已提交
72 73
    """

74 75 76 77 78 79 80 81 82 83
    def __init__(
        self,
        name=None,
        initializer=None,
        learning_rate=1.0,
        regularizer=None,
        trainable=True,
        do_model_average=True,
        need_clip=True,
    ):
84

85
        check_type(name, "name", (str, type(None)), "ParamAttr")
86 87 88
        check_type(learning_rate, "learning_rate", (float, int), "ParamAttr")
        check_type(trainable, "trainable", (bool), "ParamAttr")
        check_type(do_model_average, "do_model_average", (bool), "ParamAttr")
89
        check_type(need_clip, "need_clip", (bool), "ParamAttr")
90 91 92 93 94 95 96 97 98
        check_type(
            initializer, "initializer", (Initializer, type(None)), "ParamAttr"
        )
        check_type(
            regularizer,
            "regularizer",
            (WeightDecayRegularizer, type(None)),
            "ParamAttr",
        )
99

Y
Yu Yang 已提交
100
        self.name = name
101
        if self.name == "":
H
hong 已提交
102 103
            raise ValueError("name of ParamAttr can not be empty str")

Y
Yu Yang 已提交
104 105 106 107
        self.initializer = initializer
        self.learning_rate = learning_rate
        self.regularizer = regularizer
        self.trainable = trainable
108
        self.do_model_average = do_model_average
109
        self.need_clip = need_clip
Y
Yu Yang 已提交
110

Y
yuyang18 已提交
111
    def _set_default_initializer(self, initializer):
C
chengduoZH 已提交
112 113 114
        """
        Set the default initializer, the initializer should be Constant,
        Uniform, Normal, Xavier, MSRA.
C
chengduoZH 已提交
115 116 117 118 119 120

        Args:
            initializer(Initializer): the initializer to set.

        Returns:
            None
C
chengduoZH 已提交
121
        """
Y
Yu Yang 已提交
122 123 124 125 126 127 128 129 130 131
        if initializer is None:
            if self.initializer is None:
                raise ValueError("ParamAttr.initializer is not set")
            return

        if self.initializer is not None:
            return

        self.initializer = initializer

Y
yuyang18 已提交
132
    def _set_default_param_initializer(self):
C
chengduoZH 已提交
133 134
        """
        Set the default initializer for the parameter with Xavier.
C
chengduoZH 已提交
135 136 137 138 139 140

        Args:
            None.

        Returns:
            None.
C
chengduoZH 已提交
141
        """
Y
yuyang18 已提交
142
        self._set_default_initializer(Xavier())
Y
Yu Yang 已提交
143

Y
yuyang18 已提交
144
    def _set_default_bias_initializer(self):
C
chengduoZH 已提交
145 146
        """
        Set the default initializer for the bias with Constant(0.0).
C
chengduoZH 已提交
147 148 149 150 151 152

        Args:
            None.

        Returns:
            None.
C
chengduoZH 已提交
153
        """
Y
yuyang18 已提交
154
        self._set_default_initializer(Constant(0.0))
Y
Yu Yang 已提交
155 156

    @staticmethod
Y
yuyang18 已提交
157
    def _to_attr(arg):
C
chengduoZH 已提交
158 159 160 161 162 163 164 165 166 167 168 169 170 171
        """
        Create ParamAttr[s].

        Args:
            arg: Arguments to initialize ParamAttr[s]. arg's type can be
                str, Initializer, float, WeightDecayRegularizer, BaseGradientClipAttr,
                bool, ParamAttr, or a list of above type.

        Returns:
            ParamAttr[s]: ParamAttr[s] initialized with arg.

        Raises:
            arg can not initialize a ParamAttr.
        """
Y
Yu Yang 已提交
172 173
        if arg is None:
            return ParamAttr()
174
        elif isinstance(arg, list) or isinstance(arg, tuple):
Y
yuyang18 已提交
175
            return [ParamAttr._to_attr(a) for a in arg]
Y
Yu Yang 已提交
176 177
        elif isinstance(arg, ParamAttr):
            return arg
178
        elif isinstance(arg, str):
Y
Yu Yang 已提交
179 180 181 182 183 184
            return ParamAttr(name=arg)
        elif isinstance(arg, Initializer):
            return ParamAttr(initializer=arg)
        elif isinstance(arg, WeightDecayRegularizer):
            return ParamAttr(regularizer=arg)
        elif isinstance(arg, bool):
Y
yuyang18 已提交
185
            return ParamAttr._to_attr(None) if arg else False
Y
Yu Yang 已提交
186 187 188
        else:
            raise TypeError("{0} cast to ParamAttr".format(type(arg)))

Y
yuyang18 已提交
189
    def _to_kwargs(self, with_initializer=False):
C
chengduoZH 已提交
190 191 192 193 194 195 196 197 198
        """
        Returns the attributes of this parameter.

        Args:
            with_initializer(bool): Whether to add initializer attr.

        Returns:
            Parameter attributes(map): The attributes of this parameter.
        """
Y
Yu Yang 已提交
199 200
        kwargs = {
            'name': self.name,
201
            'optimize_attr': {'learning_rate': self.learning_rate},
Y
Yu Yang 已提交
202
            'regularizer': self.regularizer,
Y
Yu Yang 已提交
203
            'trainable': self.trainable,
204
            'do_model_average': self.do_model_average,
205
            'need_clip': self.need_clip,
Y
Yu Yang 已提交
206 207 208 209
        }
        if with_initializer:
            kwargs['initializer'] = self.initializer
        return kwargs
G
guosheng 已提交
210 211 212


class WeightNormParamAttr(ParamAttr):
213
    r"""
S
swtkiwi 已提交
214

215 216
    Note:
        Please use 'paddle.nn.utils.weight_norm' in dygraph mode.
217

218
    Note:
219
        ``gradient_clip`` of ``ParamAttr`` HAS BEEN DEPRECATED since 2.0.
220
        Please use ``need_clip`` in ``ParamAttr`` to speficiy the clip scope.
221
        There are three clipping strategies: :ref:`api_paddle_nn_ClipGradByGlobalNorm` ,
222
        :ref:`api_paddle_nn_ClipGradByNorm` , :ref:`api_paddle_nn_ClipGradByValue` .
223

224
    Parameter of weight Norm. Weight Norm is a reparameterization of the weight vectors
225
    in a neural network that decouples the magnitude of those weight vectors from
C
chengduoZH 已提交
226 227 228 229 230 231
    their direction. Weight Norm has been implemented as discussed in this
    paper: `Weight Normalization: A Simple Reparameterization to Accelerate
    Training of Deep Neural Networks
    <https://arxiv.org/pdf/1602.07868.pdf>`_.

    Args:
232
        dim(int, optional): Dimension over which to compute the norm. Dim is a non-negative
233
            number which is less than the rank of weight Tensor. For Example, dim can
T
tianshuo78520a 已提交
234
            be chosen from 0, 1, 2, 3 for convolution whose weight shape is [cout, cin, kh, kw]
235 236 237
            and rank is 4. Default None, meaning that all elements will be normalized.
        name(str, optional): The parameter's name. Default None, meaning that the name would
            be created automatically. Please refer to :ref:`api_guide_Name` for more details.
238 239
        initializer(Initializer, optional): The method to initialize this parameter, such as
            ``initializer = paddle.nn.initializer.Constant(1.0)``. Default None,
240 241
            meaning that the weight parameter is initialized by Xavier initializer, and
            the bias parameter is initialized by 0.
242
        learning_rate(float32, optional): The parameter's learning rate when
243
            optimizer is :math:`global\_lr * parameter\_lr * scheduler\_factor`.
X
Xin Pan 已提交
244
            Default 1.0.
245
        regularizer (WeightDecayRegularizer, optional): Regularization strategy. There are
246 247
            two method: :ref:`api_paddle_regularizer_L1Decay` ,
            :ref:`api_paddle_regularizer_L2Decay`.
248 249 250
            If regularizer isralso set in ``optimizer``
            (such as :ref:`api_paddle_optimizer_SGD` ), that regularizer setting in
            optimizer will be ignored. Default None, meaning there is no regularization.
251 252
        trainable(bool, optional): Whether this parameter is trainable. Default True.
        do_model_average(bool, optional): Whether this parameter should do model average.
X
Xin Pan 已提交
253
            Default False.
254
        need_clip (bool, optional): Whether the parameter gradient need to be cliped in optimizer. Default is True.
C
chengduoZH 已提交
255 256

    Examples:
257

C
chengduoZH 已提交
258
        .. code-block:: python
259

260 261 262 263 264 265
            import paddle

            paddle.enable_static()

            data = paddle.static.data(name="data", shape=[3, 32, 32], dtype="float32")

266
            fc = paddle.static.nn.fc(x=data,
267
                                     size=1000,
268 269 270 271 272 273 274 275 276
                                     weight_attr=paddle.static.WeightNormParamAttr(
                                         dim=None,
                                         name='weight_norm_param',
                                         initializer=paddle.nn.initializer.Constant(1.0),
                                         learning_rate=1.0,
                                         regularizer=paddle.regularizer.L2Decay(0.1),
                                         trainable=True,
                                         do_model_average=False,
                                         need_clip=True))
C
chengduoZH 已提交
277

G
guosheng 已提交
278 279 280
    """
    # List to record the parameters reparameterized by weight normalization.
    # If these parameters are treated as Variable rather than Parameter,
281
    # it can be used to discriminate these parameters and help to serialize
G
guosheng 已提交
282 283 284
    # these paramters for inference.
    params_with_weight_norm = []

285 286 287 288 289 290 291 292 293 294 295
    def __init__(
        self,
        dim=None,
        name=None,
        initializer=None,
        learning_rate=1.0,
        regularizer=None,
        trainable=True,
        do_model_average=False,
        need_clip=True,
    ):
296
        super().__init__(
297 298 299 300 301 302 303 304
            name=name,
            initializer=initializer,
            learning_rate=learning_rate,
            regularizer=regularizer,
            trainable=trainable,
            do_model_average=do_model_average,
            need_clip=need_clip,
        )
G
guosheng 已提交
305
        self.dim = dim