layers.py 38.6 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

X
Xin Pan 已提交
15
import collections
16 17 18
import contextlib
import sys
import numpy as np
19
import six
20
import re
21 22 23 24
import copy
import weakref
import warnings

C
chengduo 已提交
25
from . import parallel_helper
X
Xin Pan 已提交
26
from .. import unique_name
27
from paddle.fluid import core
28
from .layer_object_helper import LayerObjectHelper
29
from .base import program_desc_tracing_guard, param_guard
30
from paddle.fluid import framework
31
from ..param_attr import ParamAttr
32 33 34
from paddle.fluid.executor import Executor, global_scope
from paddle.fluid.framework import in_dygraph_mode
from paddle.fluid.framework import _current_expected_place as _get_device
35

36
__all__ = ['Layer']
37

38 39 40 41 42 43 44 45
_first_cap_re = re.compile('(.)([A-Z][a-z]+)')
_all_cap_re = re.compile('([a-z])([A-Z])')


def _convert_camel_to_snake(name):
    s1 = _first_cap_re.sub(r'\1_\2', name)
    return _all_cap_re.sub(r'\1_\2', s1).lower()

46

47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62
class HookRemoveHelper(object):
    """ A HookRemoveHelper that can be used to remove hook. """

    next_hook_id = 0

    def __init__(self, hooks):
        self._hooks_ref = weakref.ref(hooks)
        self._hook_id = HookRemoveHelper.next_hook_id
        HookRemoveHelper.next_hook_id += 1

    def remove(self):
        hooks = self._hooks_ref()
        if hooks is not None and self._hook_id in hooks:
            del hooks[self._hook_id]


X
Xin Pan 已提交
63
class Layer(core.Layer):
64 65
    """
    Dynamic graph Layer based on OOD, includes the parameters of the layer, the structure of the forward graph and so on.
X
Xin Pan 已提交
66

67
    Parameters:
68 69
        name_scope (str, optional): prefix name used by the layer to name parameters.
            If prefix is "my_layer", parameter name in MyLayer
70 71 72
            can be "my_layer_0.w_n", where "w" is the parameter
            base name and "n" is an unique suffix auto-generated.
            If None, prefix name will be snake cased class name. Default: None.
73 74 75 76 77 78 79
        dtype(str or core.VarDesc.VarType, optional): data type of this parameter.
                If set str, it can be "bool",  "float16", "float32", "float64",
                "int8", "int16", "int32", "int64", "uint8" or "uint16".
                Default: ``core.VarDesc.VarType.FP32``
    
    Returns:
        None
X
Xin Pan 已提交
80
    """
X
Xin Pan 已提交
81

82
    def __init__(self, name_scope=None, dtype=core.VarDesc.VarType.FP32):
83
        self.training = True
84
        if name_scope is None:
85 86
            name_scope = _convert_camel_to_snake(self.__class__.__name__)
        self._full_name = unique_name.generate(name_scope)
87
        self._helper = LayerObjectHelper(self._full_name)
X
Xin Pan 已提交
88
        self._built = False
M
minqiyang 已提交
89
        self._dtype = dtype
90
        self._init_in_dynamic_mode = framework.in_dygraph_mode()
91

X
Xin Pan 已提交
92
        self._parameters = collections.OrderedDict()
93 94 95
        # Buffers the variable (not parameter) created in layer
        self._buffers = collections.OrderedDict()
        self._non_persistable_buffer_names_set = set()
X
Xin Pan 已提交
96
        self._sub_layers = collections.OrderedDict()
L
lujun 已提交
97
        self._loaddict_holder = collections.OrderedDict()
98

99 100 101
        self._forward_pre_hooks = collections.OrderedDict()
        self._forward_post_hooks = collections.OrderedDict()

M
minqiyang 已提交
102
    def train(self):
103 104 105 106 107 108 109 110
        """
        Sets this Layer and all its sublayers to training mode.
        This only effects certain modules like `Dropout` and `BatchNorm`.

        Returns:
            None
        """
        # global setting
M
minqiyang 已提交
111
        framework._dygraph_tracer().train_mode()
112 113 114 115
        # Layer-level setting
        self.training = True
        for layer in self.sublayers():
            layer.train()
M
minqiyang 已提交
116 117

    def eval(self):
118 119 120 121 122 123 124 125
        """
        Sets this Layer and all its sublayers to evaluation mode.
        This only effects certain modules like `Dropout` and `BatchNorm`.

        Returns:
            None
        """
        # global setting
M
minqiyang 已提交
126
        framework._dygraph_tracer().eval_mode()
127 128 129 130
        # Layer-level setting
        self.training = False
        for layer in self.sublayers():
            layer.eval()
M
minqiyang 已提交
131

L
LielinJiang 已提交
132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147
    def apply(self, fn):
        """
        Applies ``fn`` recursively to every sublayer (as returned by ``.sublayers()``)
        as well as self. Typical use includes initializing the parameters of a model.

        Parameters:
            fn (function): a function to be applied to each sublayer

        Returns:
            Layer: self

        Example::
            .. code-block:: python

              import paddle
              import paddle.nn as nn
W
Wang Huan 已提交
148

L
LielinJiang 已提交
149 150 151 152 153 154 155 156 157 158 159 160 161
              net = nn.Sequential(nn.Linear(2, 2), nn.Linear(2, 2))

              def init_weights(layer):
                  if type(layer) == nn.Linear:
                      print('before init weight:', layer.weight.numpy())
                      new_weight = paddle.fill_constant(layer.weight.shape, layer.weight.dtype, value=0.9)
                      layer.weight.set_value(new_weight)
                      print('after init weight:', layer.weight.numpy())

              net.apply(init_weights)

              print(net.state_dict())
        """
162
        for layer in self.children():
L
LielinJiang 已提交
163 164 165 166 167 168
            layer.apply(fn)

        fn(self)

        return self

X
Xin Pan 已提交
169
    def full_name(self):
170
        """Full name for this layer, composed by name_scope + "/" + MyLayer.__class__.__name__
X
Xin Pan 已提交
171

172 173
        Returns:
            str: full name of this layer.
X
Xin Pan 已提交
174 175 176
        """
        return self._full_name

177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193
    def register_forward_post_hook(self, hook):
        """Register a forward post-hook for Layer. The hook will be called after `forward` function has been computed.

        It should have the following form, `input` and `output` of the `hook` is `input` and `output` of the `Layer` respectively.
        User can use forward post-hook to change the output of the Layer or perform information statistics tasks on the Layer.
 
        hook(Layer, input, output) -> None or modified output

        Parameters:
            hook(function): a function registered as a forward post-hook

        Returns:
            HookRemoveHelper: a HookRemoveHelper object that can be used to remove the added hook by calling `hook_remove_helper.remove()` .

        Examples:
            .. code-block:: python

W
Wang Huan 已提交
194 195 196 197 198 199 200 201 202
                import paddle
                import numpy as np

                # the forward_post_hook change the output of the layer: output = output * 2
                def forward_post_hook(layer, input, output):
                    # user can use layer, input and output for information statistis tasks

                    # change the output
                    return output * 2
203

W
Wang Huan 已提交
204
                linear = paddle.nn.Linear(13, 5)
205

W
Wang Huan 已提交
206 207
                # register the hook
                forward_post_hook_handle = linear.register_forward_post_hook(forward_post_hook)
208

W
Wang Huan 已提交
209 210
                value1 = np.arange(26).reshape(2, 13).astype("float32")
                in1 = paddle.to_tensor(value1)
211

W
Wang Huan 已提交
212
                out0 = linear(in1)
213

W
Wang Huan 已提交
214 215
                # remove the hook
                forward_post_hook_handle.remove()
216

W
Wang Huan 已提交
217 218 219 220
                out1 = linear(in1)

                # hook change the linear's output to output * 2, so out0 is equal to out1 * 2.
                assert (out0.numpy() == (out1.numpy()) * 2).any()
221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244
        """
        hook_remove_helper = HookRemoveHelper(self._forward_post_hooks)
        self._forward_post_hooks[hook_remove_helper._hook_id] = hook
        return hook_remove_helper

    def register_forward_pre_hook(self, hook):
        """Register a forward pre-hook for Layer. The hook will be called before `forward` function has been computed.
        
        It should have the following form, `input` of the `hook` is `input` of the `Layer`,
        hook can either return a tuple or a single modified value in the hook. We will wrap the value into a tuple if 
        a single value is returned(unless that value is already a tuple).
        User can use forward pre-hook to change the input of the Layer or perform information statistics tasks on the Layer.

        hook(Layer, input) -> None or modified input

        Parameters:
            hook(function): a function registered as a forward pre-hook

        Returns:
            HookRemoveHelper: a HookRemoveHelper object that can be used to remove the added hook by calling `hook_remove_helper.remove()` .

        Examples:
            .. code-block:: python

W
Wang Huan 已提交
245 246
                import paddle
                import numpy as np
247

W
Wang Huan 已提交
248 249 250
                # the forward_post_hook change the input of the layer: input = input * 2
                def forward_pre_hook(layer, input):
                    # user can use layer and input for information statistis tasks
251

W
Wang Huan 已提交
252 253 254
                    # change the input
                    input_return = (input[0] * 2)
                    return input_return
255

W
Wang Huan 已提交
256
                linear = paddle.nn.Linear(13, 5)
257

W
Wang Huan 已提交
258 259
                # register the hook
                forward_pre_hook_handle = linear.register_forward_pre_hook(forward_pre_hook)
260

W
Wang Huan 已提交
261 262 263
                value0 = np.arange(26).reshape(2, 13).astype("float32")
                in0 = paddle.to_tensor(value0)
                out0 = linear(in0)
264

W
Wang Huan 已提交
265 266
                # remove the hook
                forward_pre_hook_handle.remove()
267

W
Wang Huan 已提交
268 269 270
                value1 = value0 * 2
                in1 = paddle.to_tensor(value1)
                out1 = linear(in1)
271

W
Wang Huan 已提交
272 273
                # hook change the linear's input to input * 2, so out0 is equal to out1.
                assert (out0.numpy() == out1.numpy()).any()
274 275 276 277 278
        """
        hook_remove_helper = HookRemoveHelper(self._forward_pre_hooks)
        self._forward_pre_hooks[hook_remove_helper._hook_id] = hook
        return hook_remove_helper

279 280
    def create_parameter(self,
                         shape,
281
                         attr=None,
282
                         dtype=None,
283 284
                         is_bias=False,
                         default_initializer=None):
285 286 287
        """Create parameters for this layer.
        
        Parameters:
288 289 290
            shape(list): Shape of the parameter.
            attr(ParamAttr, optional): Parameter attribute of weight. Please refer to :ref:`api_fluid_ParamAttr`. Default: None.
            dtype(str or core.VarDesc.VarType or str, optional): Data type of this parameter.
291
                If set str, it can be "bool",  "float16", "float32", "float64",
292 293
                "int8", "int16", "int32", "int64", "uint8" or "uint16". Default: "float32".
            is_bias(bool, optional): if this is a bias parameter. Default: False.
294 295
            default_initializer(Initializer, optional): the default initializer for this parameter.
                If set None, default initializer will be set to :ref:`api_fluid_initializer_XavierInitializer` and :ref:`api_fluid_initializer_ConstantInitializer`
296
                for non-bias and bias parameter, respectively. Default: None.
297

298 299
        Returns:
            :ref:`api_guide_Variable_en` : created parameter.
300
        """
H
hong 已提交
301 302 303 304
        temp_attr = copy.deepcopy(attr)
        if isinstance(temp_attr, six.string_types) and temp_attr == "":
            temp_attr = None
        return self._helper.create_parameter(temp_attr, shape, dtype, is_bias,
305 306 307 308 309 310 311 312
                                             default_initializer)

    # TODO: Add more parameter list when we need them
    def create_variable(self,
                        name=None,
                        persistable=None,
                        dtype=None,
                        type=core.VarDesc.VarType.LOD_TENSOR):
313
        """Create Variable for this layer.
314

315 316 317 318 319 320 321 322
        Parameters:
            name(str, optional): name of the variable. Please refer to :ref:`api_guide_Name` . Default: None
            persistable(bool, optional): if set this variable persistable. Default: False
            dtype(str or core.VarDesc.VarType, optional): data type of this parameter.
                If set str, it can be "bool",  "float16", "float32", "float64",
                "int8", "int16", "int32", "int64", "uint8" or "uint16".
                If set None, it will be ``core.VarDesc.VarType.FP32``. Default: None
            type(core.VarDesc.VarType, optional): type of the variable. No need to set this parameter. Default: ``core.VarDesc.VarType.LOD_TENSOR``
323

324 325
        Returns:
            :ref:`api_guide_Variable_en` : created Variable.
326 327 328 329 330 331 332 333 334 335
        """
        if name is not None:
            var_name = ".".join([self._full_name, name])
        else:
            var_name = unique_name.generate(".".join(
                [self._full_name, "_generated_var"]))

        return self._helper.main_program.current_block().create_var(
            name=var_name, persistable=persistable, dtype=dtype, type=type)

X
polish  
Xin Pan 已提交
336
    def parameters(self, include_sublayers=True):
337
        """Returns a list of all Parameters from current layer and its sub-layers.
X
Xin Pan 已提交
338

339 340
        Parameters:
            include_sublayers(bool, optional): Whether include the parameters of sublayers. If True, also include the parameters from sublayers. Default: True
X
Xin Pan 已提交
341

342 343
        Returns:
            list of :ref:`api_guide_Variable_en` : a list of Parameters.
X
Xin Pan 已提交
344
        """
345 346 347 348 349
        ret = [
            param
            for _, param in self.named_parameters(
                include_sublayers=include_sublayers)
        ]
X
polish  
Xin Pan 已提交
350
        return ret
X
Xin Pan 已提交
351

352 353 354 355 356 357 358 359 360
    def children(self):
        """Returns an iterator over immediate children layers.

        Yields:
            Layer: a child layer

        Examples:
            .. code-block:: python

W
Wang Huan 已提交
361 362 363 364 365
                import paddle

                fc1 = paddle.nn.Linear(10, 3)
                fc2 = paddle.nn.Linear(3, 10, bias_attr=False)
                model = paddle.nn.Sequential(fc1, fc2)
366

W
Wang Huan 已提交
367
                layer_list = list(model.children())
368

W
Wang Huan 已提交
369
                print(layer_list)
370 371 372 373 374 375 376 377 378 379 380 381 382 383 384

        """
        for _, layer in self.named_children():
            yield layer

    def named_children(self):
        """Returns an iterator over immediate children layers, yielding both
        the name of the layer as well as the layer itself.

        Yields:
            (string, Layer): Tuple containing a name and child layer

        Examples:
            .. code-block:: python

W
Wang Huan 已提交
385
                import paddle
386

W
Wang Huan 已提交
387 388 389 390 391
                fc1 = paddle.nn.Linear(10, 3)
                fc2 = paddle.nn.Linear(3, 10, bias_attr=False)
                model = paddle.nn.Sequential(fc1, fc2)
                for prefix, layer in model.named_children():
                    print(prefix, layer)
392 393 394 395 396 397 398 399

        """
        memo = set()
        for name, layer in self._sub_layers.items():
            if layer is not None and layer not in memo:
                memo.add(layer)
                yield name, layer

X
Xin Pan 已提交
400 401 402
    def sublayers(self, include_sublayers=True):
        """Returns a list of sub layers.

403 404
        Parameters:
            include_sublayers(bool, optional): Whether return the sublayers of sublayers. If True, also include the sublayers of sublayers. Default: True
X
Xin Pan 已提交
405

406 407
        Returns:
            list of Layer : a list of sub layers.
X
Xin Pan 已提交
408
        """
409 410 411 412 413
        ret = [
            layer
            for _, layer in self.named_sublayers(
                include_sublayers=include_sublayers)
        ]
X
Xin Pan 已提交
414 415
        return ret

416 417 418 419 420 421 422 423 424 425 426 427 428 429 430
    def named_parameters(self, prefix='', include_sublayers=True):
        """
        Returns an iterator over all parameters in the Layer, yielding tuple of name and parameter.

        Parameters:
            prefix(str, optional): Prefix to prepend to all parameter names. Default: ''.
            include_sublayers(bool, optional): Whether include the parameters of sublayers.
                If True, also include the named parameters from sublayers. Default: True.

        Yields:
            (string, Parameter): Tuple of name and Parameter

        Examples:
            .. code-block:: python

W
Wang Huan 已提交
431
                import paddle
432

W
Wang Huan 已提交
433 434 435 436 437
                fc1 = paddle.nn.Linear(10, 3)
                fc2 = paddle.nn.Linear(3, 10, bias_attr=False)
                model = paddle.nn.Sequential(fc1, fc2)
                for name, param in model.named_parameters():
                    print(name, param)
438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474

        """
        params_set = set()
        named_sublayers = self.named_sublayers(
            prefix=prefix,
            include_sublayers=include_sublayers,
            include_self=True)
        for layer_prefix, sublayer in named_sublayers:
            params = sublayer._parameters.items()
            for key, param in params:
                if param is None or param in params_set:
                    continue
                params_set.add(param)
                name = layer_prefix + ('.' if layer_prefix else '') + key
                yield name, param

    def named_sublayers(self,
                        prefix='',
                        include_sublayers=True,
                        include_self=False,
                        layers_set=None):
        """
        Returns an iterator over all sublayers in the Layer, yielding tuple of name and sublayer.
        The duplicate sublayer will only be yielded once.

        Parameters:
            prefix(str, optional): Prefix to prepend to all parameter names. Default: ''.
            include_sublayers(bool, optional): Whether include the sublayers. Default: True.
            include_self(bool, optional): Whether include the Layer itself. Default: False.
            layers_set(set, optioanl): The set to record duplicate sublayers. Default: None.

        Yields:
            (string, Layer): Tuple of name and Layer

        Examples:
            .. code-block:: python

W
Wang Huan 已提交
475
                import paddle
476

W
Wang Huan 已提交
477 478 479 480 481
                fc1 = paddle.nn.Linear(10, 3)
                fc2 = paddle.nn.Linear(3, 10, bias_attr=False)
                model = paddle.nn.Sequential(fc1, fc2)
                for prefix, layer in model.named_sublayers():
                    print(prefix, layer)
482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500

        """
        if layers_set is None:
            layers_set = set()
        if include_self and self not in layers_set:
            layers_set.add(self)
            yield prefix, self
        if include_sublayers:
            for key, layer in self._sub_layers.items():
                if layer is None:
                    continue
                layer_prefix = prefix + ('.' if prefix else '') + key
                for p, l in layer.named_sublayers(
                        prefix=layer_prefix,
                        include_sublayers=include_sublayers,
                        include_self=True,
                        layers_set=layers_set):
                    yield p, l

501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526
    def register_buffer(self, name, variable, persistable=True):
        """
        Registers a variable as buffer into the layer.

        `buffer` is a non-parameteric variable and will not be updated by optimizer,
        but is necessary for evaluation and inference. For example, the mean and variance in BatchNorm layers.
        The registered buffer is persistable by default, and will be saved into
        `state_dict` alongside parameters. If set persistable=False, it registers
        a non-persistable buffer, so that it will not be a part of `state_dict` .

        Buffers can be accessed as attributes using given names.

        Parameters:
            name (string): name of the buffer. The buffer can be accessed
                from this layer using the given name
            variable (Variable): the variable to be registered as buffer.
            persistable (bool): whether the buffer is part of this layer's
                state_dict.

        Returns:
            None
        
        Examples:
            .. code-block:: python

                import numpy as np
W
Wang Huan 已提交
527
                import paddle
528

W
Wang Huan 已提交
529 530 531 532 533 534 535
                linear = paddle.nn.Linear(10, 3)
                value = np.array([0]).astype("float32")
                buffer = paddle.to_tensor(value)
                linear.register_buffer("buf_name", buffer, persistable=True)

                # get the buffer by attribute.
                print(linear.buf_name)
536 537 538 539 540 541 542 543 544 545 546

        """

        if '_buffers' not in self.__dict__:
            raise ValueError(
                "super(YourLayer, self).__init__() should be called first")
        elif not isinstance(name, six.string_types):
            raise TypeError(
                "The name of buffer should be a string, but received {}.".
                format(type(name).__name__))
        elif '.' in name:
547 548 549 550
            raise KeyError(
                "The name of buffer can not contain `.`, "
                "because when you access the newly added buffer in the "
                "form of `self.**.**`, it will cause AttributeError.")
551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598
        elif name == '':
            raise KeyError("The name of buffer can not be empty.")
        elif hasattr(self, name) and name not in self._buffers:
            raise KeyError("attribute '{}' already exists.".format(name))
        elif variable is not None and not type(variable) == core.VarBase:
            raise TypeError(
                "The registered buffer should be a core.VarBase, but received {}.".
                format(type(variable).__name__))
        else:
            self._buffers[name] = variable
            if persistable:
                self._non_persistable_buffer_names_set.discard(name)
            else:
                self._non_persistable_buffer_names_set.add(name)

    def buffers(self, include_sublayers=True):
        """
        Returns a list of all buffers from current layer and its sub-layers.

        Parameters:
            include_sublayers(bool, optional): Whether include the buffers of sublayers. If True, also include the buffers from sublayers. Default: True

        Returns:
            list of :ref:`api_guide_Variable_en` : a list of buffers.
        """
        ret = [
            buffer
            for _, buffer in self.named_buffers(
                include_sublayers=include_sublayers)
        ]
        return ret

    def named_buffers(self, prefix='', include_sublayers=True):
        """
        Returns an iterator over all buffers in the Layer, yielding tuple of name and Variable.

        Parameters:
            prefix(str, optional): Prefix to prepend to all buffer names. Default: ''.
            include_sublayers(bool, optional): Whether include the buffers of sublayers.
                If True, also include the named buffers from sublayers. Default: True.

        Yields:
            (string, Variable): Tuple of name and Variable

        Examples:
            .. code-block:: python

                import numpy as np
W
Wang Huan 已提交
599
                import paddle
600

W
Wang Huan 已提交
601 602 603 604
                fc1 = paddle.nn.Linear(10, 3)
                buffer1 = paddle.to_tensor(np.array([0]).astype("float32"))
                # register a variable as buffer by specific `persistable`
                fc1.register_buffer("buf_name_1", buffer1, persistable=True)
605

W
Wang Huan 已提交
606 607 608 609 610
                fc2 = paddle.nn.Linear(3, 10)
                buffer2 = paddle.to_tensor(np.array([1]).astype("float32"))
                # register a buffer by assigning an attribute with Variable.
                # The `persistable` can only be False by this way.
                fc2.buf_name_2 = buffer2
611

W
Wang Huan 已提交
612
                model = paddle.nn.Sequential(fc1, fc2)
613

W
Wang Huan 已提交
614 615 616
                # get all named buffers
                for name, buffer in model.named_buffers():
                    print(name, buffer)
617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632

        """
        buffers_set = set()
        named_sublayers = self.named_sublayers(
            prefix=prefix,
            include_sublayers=include_sublayers,
            include_self=True)
        for layer_prefix, sublayer in named_sublayers:
            buffers = sublayer._buffers.items()
            for key, buffer in buffers:
                if buffer is None or buffer in buffers_set:
                    continue
                buffers_set.add(buffer)
                name = layer_prefix + ('.' if layer_prefix else '') + key
                yield name, buffer

X
Xin Pan 已提交
633
    def clear_gradients(self):
634 635 636 637 638 639 640 641 642
        """
        Clear the gradients of all parameters for this layer.
        
        Returns:
            None
        
        Examples:
            .. code-block:: python

W
Wang Huan 已提交
643
                import paddle
644 645
                import numpy as np

W
Wang Huan 已提交
646 647 648 649 650 651 652 653 654
                value = np.arange(26).reshape(2, 13).astype("float32")
                a = paddle.to_tensor(value)
                linear = paddle.nn.Linear(13, 5)
                adam = paddle.optimizer.Adam(learning_rate=0.01,
                                            parameters=linear.parameters())
                out = linear(a)
                out.backward()
                adam.minimize(out)
                linear.clear_gradients()
655 656

        """
X
Xin Pan 已提交
657
        for p in self.parameters():
658 659
            if p.trainable:
                p.clear_gradient()
X
Xin Pan 已提交
660

661
    def _build_once(self, *args, **kwargs):
662 663
        pass

664
    def __call__(self, *inputs, **kwargs):
665 666 667 668 669 670 671
        for forward_pre_hook in self._forward_pre_hooks.values():
            hook_result = forward_pre_hook(self, inputs)
            if hook_result is not None:
                if not isinstance(hook_result, tuple):
                    hook_result = (hook_result, )
                inputs = hook_result

X
Xin Pan 已提交
672
        if not self._built:
673 674 675 676 677
            with program_desc_tracing_guard(False):
                self._build_once(*inputs, **kwargs)
                if parallel_helper._is_data_parallel_mode():
                    parallel_helper._broadcast_parameters(
                        self._parameters.values())
678
            self._built = True
679

680
        with param_guard(self._parameters), param_guard(self._buffers):
681
            outputs = self.forward(*inputs, **kwargs)
682 683 684 685 686 687

        for forward_post_hook in self._forward_post_hooks.values():
            hook_result = forward_post_hook(self, inputs, outputs)
            if hook_result is not None:
                outputs = hook_result

M
minqiyang 已提交
688
        return outputs
M
minqiyang 已提交
689

690
    def forward(self, *inputs, **kwargs):
691 692 693 694 695 696 697 698
        """
        Defines the computation performed at every call.
        Should be overridden by all subclasses.

        Parameters:
            *inputs(tuple): unpacked tuple arguments
            **kwargs(dict): unpacked dict arguments
        """
699
        raise NotImplementedError
X
Xin Pan 已提交
700 701 702 703

    def backward(self, *inputs):
        raise ValueError("Layer shouldn't implement backward")

X
Xin Pan 已提交
704 705 706
    def add_sublayer(self, name, sublayer):
        """Adds a sub Layer instance.

707
        Added sublayer can be accessed by self.name
X
Xin Pan 已提交
708

709 710 711
        Parameters:
            name(str): name of this sublayer.
            sublayer(Layer): an instance of Layer.
X
Xin Pan 已提交
712
        Returns:
713
            Layer: the sublayer passed in.
X
Xin Pan 已提交
714 715
        """
        assert isinstance(sublayer, core.Layer)
716

X
Xin Pan 已提交
717 718 719 720 721 722
        self._sub_layers[name] = sublayer
        return sublayer

    def add_parameter(self, name, parameter):
        """Adds a Parameter instance.

723
        Added parameter can be accessed by self.name
X
Xin Pan 已提交
724

725 726 727
        Parameters:
            name(str): name of this sublayer.
            parameter(Parameter): an instance of Parameter.
X
Xin Pan 已提交
728
        Returns:
729
            Parameter: the parameter passed in.
X
Xin Pan 已提交
730
        """
731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748
        if '_parameters' not in self.__dict__:
            raise RuntimeError(
                "super(YourLayer, self).__init__() should be called firstly.")
        elif not isinstance(name, six.string_types):
            raise TypeError(
                "The name of parameter should be a string, but received {}.".
                format(type(name).__name__))
        elif '.' in name:
            raise KeyError(
                "The name of parameter can not contain `.`, "
                "because when you access the newly added parameter in the "
                "form of `self.**.**`, it will cause AttributeError.")
        elif name == '':
            raise KeyError("The name of parameter can not be empty.")
        elif hasattr(self, name) and name not in self._parameters:
            raise KeyError("The parameter '{}' already exists.".format(name))
        elif parameter is not None and not isinstance(parameter,
                                                      framework.Parameter):
749
            raise TypeError(
750 751 752 753 754
                "The parameter to be added should be a Parameter, but received {}.".
                format(type(parameter).__name__))
        else:
            if parameter is None:
                self._parameters[name] = None
755

756 757 758
            if len(self._loaddict_holder) > 0:
                assert parameter.name in self._loaddict_holder, "Parameter not found, Can't not find [ {} ] in state_dict".format(
                    parameter.name)
H
hong 已提交
759

760
                parameter.set_value(self._loaddict_holder[parameter.name])
761

762
            self._parameters[name] = parameter
X
Xin Pan 已提交
763 764
        return parameter

X
Xin Pan 已提交
765 766 767 768 769
    def __getattr__(self, name):
        if name in self._parameters:
            return self._parameters[name]
        elif name in self._sub_layers:
            return self._sub_layers[name]
770 771
        elif name in self._buffers:
            return self._buffers[name]
772 773
        else:
            return object.__getattribute__(self, name)
X
Xin Pan 已提交
774 775

    def __setattr__(self, name, value):
S
songyouwei 已提交
776 777 778 779 780
        def _remove_if_exist(*dicts):
            for d in dicts:
                if name in d:
                    del d[name]

781 782
        if isinstance(getattr(type(self), name, None), property):
            object.__setattr__(self, name, value)
783
        params = self.__dict__.get('_parameters', None)
X
Xin Pan 已提交
784 785 786 787
        if isinstance(value, framework.Parameter):
            if params is None:
                raise ValueError(
                    "super(YourLayer, self).__init__() should be called first")
H
hong 已提交
788
            if len(self._loaddict_holder) > 0:
789
                assert value.name in self._loaddict_holder, "Parameter not found, Can't not find [ {} ] in state_dict".format(
H
hong 已提交
790 791 792 793
                    value.name)

                value.set_value(self._loaddict_holder[value.name])

794
            _remove_if_exist(self.__dict__, self._buffers, self._sub_layers)
795
            params[name] = value
796 797 798 799 800 801
        elif params is not None and name in params:
            if value is not None:
                raise TypeError(
                    "assignment to parameter '{}' should be of type Parameter or None, but got '{}'"
                    .format(name, type(value).__name__))
            params[name] = None
X
Xin Pan 已提交
802
        else:
803 804 805 806 807 808 809
            layers = self.__dict__.get('_sub_layers', None)
            if isinstance(value, core.Layer):
                if layers is None:
                    raise ValueError(
                        "super(YourLayer, self).__init__() should be called first"
                    )

810
                _remove_if_exist(self.__dict__, self._parameters, self._buffers)
811 812 813 814 815 816 817 818
                layers[name] = value
            elif layers is not None and name in layers:
                if value is not None:
                    raise TypeError(
                        "assignment to sublayer '{}' should be of type Layer or None, but got '{}'"
                        .format(name, type(value).__name__))
                layers[name] = None
            else:
819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841
                _buffers = self.__dict__.get('_buffers', None)
                if type(value) == core.VarBase:
                    if _buffers is None:
                        raise ValueError(
                            "super(YourLayer, self).__init__() should be called first"
                        )
                    _remove_if_exist(self.__dict__, self._parameters,
                                     self._sub_layers)
                    # Set persistable=False by default. Only `register_buffer` can
                    # add a persistable buffer.
                    if name not in self._buffers:
                        self._non_persistable_buffer_names_set.add(name)
                    _buffers[name] = value
                elif _buffers is not None and name in _buffers:
                    if value is not None:
                        raise TypeError(
                            "assignment to buffers '{}' should be of type core.VarBase or None, but got '{}'"
                            .format(name, type(value).__name__))
                    # Assigning None will remove the buffer, but if re-assign a new varBase to it,
                    # it will be remarked as a buffer with same `persistable` attribute.
                    _buffers[name] = None
                else:
                    object.__setattr__(self, name, value)
X
Xin Pan 已提交
842 843 844 845 846 847

    def __delattr__(self, name):
        if name in self._parameters:
            del self._parameters[name]
        elif name in self._sub_layers:
            del self._sub_layers[name]
848 849 850
        elif name in self._buffers:
            del self._buffers[name]
            self._non_persistable_buffer_names_set.discard(name)
X
Xin Pan 已提交
851 852 853
        else:
            object.__delattr__(self, name)

854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888
    def __dir__(self):
        """
        Return a list. Get all parameters, buffers(non-parameter variables), sublayers, method and attr of Layer.

        Examples:
            import paddle.fluid as fluid
            import numpy as np

            fluid.dygraph.enable_dygraph()

            class Mylayer(fluid.dygraph.Layer):
                def __init__(self):
                    super(Mylayer, self).__init__()
                    self.linear1 = fluid.dygraph.Linear(10, 10)
                    self.linear2 = fluid.dygraph.Linear(5, 5)
                    self.conv2d = fluid.dygraph.Conv2D(3, 2, 3)
                    self.embedding = fluid.dygraph.Embedding(size=[128, 16])
                    self.h_0 = fluid.dygraph.to_variable(np.zeros([10, 10]).astype('float32'))

            mylayer = Mylayer()
            print(dir(mylayer))
            # only parts are shown, because of list have too much content
            # ['__call__', '__class__',  ... , 'conv2d', 'embedding', 'h_0', 'linear1', 'linear2', ... , 'sublayers', 'train']

        """
        method = dir(self.__class__)
        attrs = list(self.__dict__.keys())
        parameters = list(self._parameters.keys())
        sublayers = list(self._sub_layers.keys())
        buffers = list(self._buffers.keys())

        keys = method + attrs + parameters + sublayers + buffers

        return keys

H
hong 已提交
889 890 891 892
    def state_dict(self,
                   destination=None,
                   include_sublayers=True,
                   structured_name_prefix=""):
H
hong 已提交
893
        '''
894
        Get all parameters and persistable buffers of current layer and its sub-layers. And set them into a dict
H
hong 已提交
895

896
        Parameters:
897 898
            destination(dict, optional) : If provide, all the parameters and persistable buffers will be set to this dict . Default: None
            include_sublayers(bool, optional) : If true, also include the parameters and persistable buffers from sublayers. Default: True
H
hong 已提交
899 900

        Retruns:
901
            dict: a dict contains all the parameters and persistable buffers.
H
hong 已提交
902 903

        Examples:
904 905
            .. code-block:: python

W
Wang Huan 已提交
906 907 908
                import paddle

                emb = paddle.nn.Embedding(10, 10)
H
hong 已提交
909

W
Wang Huan 已提交
910 911
                state_dict = emb.state_dict()
                paddle.save( state_dict, "paddle_dy")
H
hong 已提交
912 913 914

        '''

915 916 917 918
        if destination is None:
            destination = collections.OrderedDict()
        for name, data in self._parameters.items():
            if data is not None:
H
hong 已提交
919
                destination[structured_name_prefix + name] = data
920 921 922
        for name, buffer in self._buffers.items():
            if buffer is not None and name not in self._non_persistable_buffer_names_set:
                destination[structured_name_prefix + name] = buffer
923 924 925 926 927 928

        if include_sublayers:
            for layer_name, layer_item in self._sub_layers.items():
                if layer_item is not None:
                    destination_temp = destination.copy()
                    destination_temp.update(
H
hong 已提交
929 930 931
                        layer_item.state_dict(
                            destination_temp, include_sublayers,
                            structured_name_prefix + layer_name + "."))
932 933 934
                    destination = destination_temp
        return destination

935 936 937 938 939
    @framework.deprecate_stat_dict
    def set_state_dict(self,
                       state_dict,
                       include_sublayers=True,
                       use_structured_name=True):
H
hong 已提交
940
        '''
941
        Set parameters and persistable buffers from state_dict. All the parameters and buffers will be reset by the tensor in the state_dict
H
hong 已提交
942

943
        Parameters:
944 945 946
            state_dict(dict) : Dict contains all the parameters and persistable buffers.
            include_sublayers(bool, optional) : If true, also include the parameters and peresistable buffers from sublayers. Default: True
            use_structured_name(bool, optional) : If true, use structured name as key, otherwise, use parameter or buffer name as key. 
H
hong 已提交
947
                                                  Default: True
H
hong 已提交
948 949 950 951
        Returns:
            None

        Examples:
952 953
            .. code-block:: python

954
                import paddle
W
Wang Huan 已提交
955

956
                emb = paddle.nn.Embedding(10, 10)
H
hong 已提交
957

958
                state_dict = emb.state_dict()
959 960
                paddle.save(state_dict, "paddle_dy.pdparams")
                para_state_dict = paddle.load("paddle_dy.pdparams")
961
                emb.set_state_dict(para_state_dict)
H
hong 已提交
962

H
hong 已提交
963 964
        '''

965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988
        def _check_match(key, param):
            state = state_dict.get(key, None)
            if state is None:
                raise ValueError("{} is not found in the provided dict.".format(
                    key))
            if list(state.shape) != list(param.shape):
                raise ValueError(
                    "{} receives a shape {}, but the expected shape is {}.".
                    format(key, list(state.shape), list(param.shape)))
            return param, state

        matched_param_state = []
        for key, param in self.state_dict().items():
            key_name = key if use_structured_name else param.name
            try:
                match_res = _check_match(key_name, param)
                matched_param_state.append(match_res)
            except ValueError as err:
                warnings.warn(("Skip loading for {}. ".format(key) + str(err)))

        if in_dygraph_mode():
            for param, state in matched_param_state:
                param.set_value(state)
        else:
H
hong 已提交
989

990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013
            def _set_var(var, ndarray):
                t = global_scope().find_var(var.name).get_tensor()
                p = t._place()
                if p.is_cpu_place():
                    place = core.CPUPlace()
                elif p.is_cuda_pinned_place():
                    place = core.CUDAPinnedPlace()
                else:
                    p = core.Place()
                    p.set_place(t._place())
                    place = core.CUDAPlace(p.gpu_device_id())
                t.set(ndarray, place)

            executor = Executor(_get_device())._default_executor
            # restore parameter states
            core._create_loaded_parameter(
                [param for param, state in matched_param_state],
                global_scope(), executor)
            for param, state in matched_param_state:
                _set_var(param, state)

    # [aliases] Compatible with old method names
    set_dict = set_state_dict
    load_dict = set_state_dict