layers.py 15.0 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

X
Xin Pan 已提交
15
import collections
16 17 18
import contextlib
import sys
import numpy as np
M
minqiyang 已提交
19
import collections
20
import six
C
chengduo 已提交
21
from . import parallel_helper
X
Xin Pan 已提交
22
from .. import unique_name
23
from paddle.fluid import core
24
from .layer_object_helper import LayerObjectHelper
25
from .base import program_desc_tracing_guard
26
from paddle.fluid import framework
27
from ..param_attr import ParamAttr
28

29
__all__ = ['Layer']
30 31


X
Xin Pan 已提交
32
class Layer(core.Layer):
33
    """Dynamic graph Layer based on OOD, includes the parameters of the layer, the structure of the forward graph and so on.
X
Xin Pan 已提交
34

35
    Parameters:
36 37 38
        name_scope (str, optional): prefix name used by the layer to name parameters.
            If prefix is "my_layer", parameter name in MyLayer
            can be "mylayer_0.w_n", where w is the parameter
X
Xin Pan 已提交
39
            base name and n is an unique suffix auto-generated.
40
            If None, prefix name will be lower cased class name. Default: None.
41 42 43 44 45 46 47
        dtype(str or core.VarDesc.VarType, optional): data type of this parameter.
                If set str, it can be "bool",  "float16", "float32", "float64",
                "int8", "int16", "int32", "int64", "uint8" or "uint16".
                Default: ``core.VarDesc.VarType.FP32``
    
    Returns:
        None
X
Xin Pan 已提交
48
    """
X
Xin Pan 已提交
49

50 51 52 53 54 55 56 57 58
    def __init__(self, name_scope=None, dtype=core.VarDesc.VarType.FP32):
        if name_scope is None:
            name_scope = self.__class__.__name__.lower()
            self._full_name = unique_name.generate(name_scope)
        else:
            # TODO: remove name_scope parameter and all hard-coded usages
            self._full_name = unique_name.generate(name_scope + "/" +
                                                   self.__class__.__name__)
        self._helper = LayerObjectHelper(self._full_name)
X
Xin Pan 已提交
59
        self._built = False
M
minqiyang 已提交
60
        self._dtype = dtype
61

X
Xin Pan 已提交
62 63
        self._parameters = collections.OrderedDict()
        self._sub_layers = collections.OrderedDict()
L
lujun 已提交
64
        self._loaddict_holder = collections.OrderedDict()
65

M
minqiyang 已提交
66
    def train(self):
M
minqiyang 已提交
67
        framework._dygraph_tracer().train_mode()
M
minqiyang 已提交
68 69

    def eval(self):
M
minqiyang 已提交
70
        framework._dygraph_tracer().eval_mode()
M
minqiyang 已提交
71

X
Xin Pan 已提交
72
    def full_name(self):
73
        """Full name for this layer, composed by name_scope + "/" + MyLayer.__class__.__name__
X
Xin Pan 已提交
74

75 76
        Returns:
            str: full name of this layer.
X
Xin Pan 已提交
77 78 79
        """
        return self._full_name

80 81
    def create_parameter(self,
                         shape,
82 83
                         attr=None,
                         dtype='float32',
84 85
                         is_bias=False,
                         default_initializer=None):
86 87 88
        """Create parameters for this layer.
        
        Parameters:
89 90 91
            shape(list): Shape of the parameter.
            attr(ParamAttr, optional): Parameter attribute of weight. Please refer to :ref:`api_fluid_ParamAttr`. Default: None.
            dtype(str or core.VarDesc.VarType or str, optional): Data type of this parameter.
92
                If set str, it can be "bool",  "float16", "float32", "float64",
93 94
                "int8", "int16", "int32", "int64", "uint8" or "uint16". Default: "float32".
            is_bias(bool, optional): if this is a bias parameter. Default: False.
95 96
            default_initializer(Initializer, optional): the default initializer for this parameter.
                If set None, default initializer will be set to :ref:`api_fluid_initializer_XavierInitializer` and :ref:`api_fluid_initializer_ConstantInitializer`
97
                for non-bias and bias parameter, respectively. Default: None.
98

99 100
        Returns:
            :ref:`api_guide_Variable_en` : created parameter.
101
        """
102 103 104 105
        if isinstance(attr, ParamAttr) and (attr.name is not None):
            attr.name = ".".join([self._full_name, attr.name])
        elif isinstance(attr, six.string_types):
            attr = ".".join([self._full_name, attr])
106 107 108 109 110 111 112 113 114
        return self._helper.create_parameter(attr, shape, dtype, is_bias,
                                             default_initializer)

    # TODO: Add more parameter list when we need them
    def create_variable(self,
                        name=None,
                        persistable=None,
                        dtype=None,
                        type=core.VarDesc.VarType.LOD_TENSOR):
115
        """Create Variable for this layer.
116

117 118 119 120 121 122 123 124
        Parameters:
            name(str, optional): name of the variable. Please refer to :ref:`api_guide_Name` . Default: None
            persistable(bool, optional): if set this variable persistable. Default: False
            dtype(str or core.VarDesc.VarType, optional): data type of this parameter.
                If set str, it can be "bool",  "float16", "float32", "float64",
                "int8", "int16", "int32", "int64", "uint8" or "uint16".
                If set None, it will be ``core.VarDesc.VarType.FP32``. Default: None
            type(core.VarDesc.VarType, optional): type of the variable. No need to set this parameter. Default: ``core.VarDesc.VarType.LOD_TENSOR``
125

126 127
        Returns:
            :ref:`api_guide_Variable_en` : created Variable.
128 129 130 131 132 133 134 135 136 137
        """
        if name is not None:
            var_name = ".".join([self._full_name, name])
        else:
            var_name = unique_name.generate(".".join(
                [self._full_name, "_generated_var"]))

        return self._helper.main_program.current_block().create_var(
            name=var_name, persistable=persistable, dtype=dtype, type=type)

X
polish  
Xin Pan 已提交
138
    def parameters(self, include_sublayers=True):
139
        """Returns a list of all Parameters from current layer and its sub-layers.
X
Xin Pan 已提交
140

141 142
        Parameters:
            include_sublayers(bool, optional): Whether include the parameters of sublayers. If True, also include the parameters from sublayers. Default: True
X
Xin Pan 已提交
143

144 145
        Returns:
            list of :ref:`api_guide_Variable_en` : a list of Parameters.
X
Xin Pan 已提交
146
        """
X
polish  
Xin Pan 已提交
147 148 149 150 151 152
        ret = [p for p in self._parameters.values()]
        if include_sublayers:
            for l in self._sub_layers.values():
                for p in l.parameters(include_sublayers):
                    ret.append(p)
        return ret
X
Xin Pan 已提交
153

X
Xin Pan 已提交
154 155 156
    def sublayers(self, include_sublayers=True):
        """Returns a list of sub layers.

157 158
        Parameters:
            include_sublayers(bool, optional): Whether return the sublayers of sublayers. If True, also include the sublayers of sublayers. Default: True
X
Xin Pan 已提交
159

160 161
        Returns:
            list of Layer : a list of sub layers.
X
Xin Pan 已提交
162 163 164 165 166 167 168 169
        """
        ret = [l for l in self._sub_layers.values()]
        if include_sublayers:
            for l in self._sub_layers.values():
                for sub_l in l.sublayers(include_sublayers):
                    ret.append(sub_l)
        return ret

X
Xin Pan 已提交
170 171
    def clear_gradients(self):
        for p in self.parameters():
172 173
            if p.trainable:
                p.clear_gradient()
X
Xin Pan 已提交
174

175
    def _build_once(self, *args, **kwargs):
176 177
        pass

178
    def __call__(self, *inputs, **kwargs):
X
Xin Pan 已提交
179
        if not self._built:
180 181 182 183 184
            with program_desc_tracing_guard(False):
                self._build_once(*inputs, **kwargs)
                if parallel_helper._is_data_parallel_mode():
                    parallel_helper._broadcast_parameters(
                        self._parameters.values())
185

186
        outputs = self.forward(*inputs, **kwargs)
X
Xin Pan 已提交
187
        self._built = True
M
minqiyang 已提交
188
        return outputs
M
minqiyang 已提交
189

190
    def forward(self, *inputs, **kwargs):
191 192 193 194 195 196 197 198
        """
        Defines the computation performed at every call.
        Should be overridden by all subclasses.

        Parameters:
            *inputs(tuple): unpacked tuple arguments
            **kwargs(dict): unpacked dict arguments
        """
199
        raise NotImplementedError
X
Xin Pan 已提交
200 201 202 203

    def backward(self, *inputs):
        raise ValueError("Layer shouldn't implement backward")

X
Xin Pan 已提交
204 205 206
    def add_sublayer(self, name, sublayer):
        """Adds a sub Layer instance.

207
        Added sublayer can be accessed by self.name
X
Xin Pan 已提交
208

209 210 211
        Parameters:
            name(str): name of this sublayer.
            sublayer(Layer): an instance of Layer.
X
Xin Pan 已提交
212
        Returns:
213
            Layer: the sublayer passed in.
X
Xin Pan 已提交
214 215
        """
        assert isinstance(sublayer, core.Layer)
216

X
Xin Pan 已提交
217 218 219 220 221 222
        self._sub_layers[name] = sublayer
        return sublayer

    def add_parameter(self, name, parameter):
        """Adds a Parameter instance.

223
        Added parameter can be accessed by self.name
X
Xin Pan 已提交
224

225 226 227
        Parameters:
            name(str): name of this sublayer.
            parameter(Parameter): an instance of Parameter.
X
Xin Pan 已提交
228
        Returns:
229
            Parameter: the parameter passed in.
X
Xin Pan 已提交
230 231
        """
        assert isinstance(parameter, framework.Parameter)
232

H
hong 已提交
233 234 235 236 237
        if len(self._loaddict_holder) > 0:
            assert parameter.name in self._loaddict_holder, "Parameter not found, Can't not find [ {} ] in stat_dict".format(
                parameter.name)

            parameter.set_value(self._loaddict_holder[parameter.name])
238 239

        self._parameters[name] = parameter
X
Xin Pan 已提交
240 241
        return parameter

X
Xin Pan 已提交
242 243 244 245 246
    def __getattr__(self, name):
        if name in self._parameters:
            return self._parameters[name]
        elif name in self._sub_layers:
            return self._sub_layers[name]
247 248
        else:
            return object.__getattribute__(self, name)
X
Xin Pan 已提交
249 250

    def __setattr__(self, name, value):
251 252
        if isinstance(getattr(type(self), name, None), property):
            object.__setattr__(self, name, value)
X
Xin Pan 已提交
253 254 255 256 257
        if isinstance(value, framework.Parameter):
            params = self.__dict__.get('_parameters', None)
            if params is None:
                raise ValueError(
                    "super(YourLayer, self).__init__() should be called first")
H
hong 已提交
258 259 260 261 262 263
            if len(self._loaddict_holder) > 0:
                assert value.name in self._loaddict_holder, "Parameter not found, Can't not find [ {} ] in stat_dict".format(
                    value.name)

                value.set_value(self._loaddict_holder[value.name])

264 265 266 267 268
            if name in params:
                # remove unused param in tracer
                if framework._dygraph_tracer_ is not None:
                    framework._dygraph_tracer_._vars.pop(params[name].name,
                                                         None)
269
            params[name] = value
X
Xin Pan 已提交
270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286
        elif isinstance(value, core.Layer):
            layers = self.__dict__.get('_sub_layers', None)
            if layers is None:
                raise ValueError(
                    "super(YourLayer, self).__init__() should be called first")
            layers[name] = value
        else:
            object.__setattr__(self, name, value)

    def __delattr__(self, name):
        if name in self._parameters:
            del self._parameters[name]
        elif name in self._sub_layers:
            del self._sub_layers[name]
        else:
            object.__delattr__(self, name)

287
    def state_dict(self, destination=None, include_sublayers=True):
H
hong 已提交
288
        '''
289
        Get all parameters of current layer and its sub-layers. And set all the parameters into a dict
H
hong 已提交
290

291 292 293
        Parameters:
            destination(dict, optional) : If provide, all the parameters will set to this dict . Default: None
            include_sublayers(bool, optional) : If true, also include the parameters from sublayers. Default: True
H
hong 已提交
294 295

        Retruns:
296
            dict: a dict contains all the parameters
H
hong 已提交
297 298

        Examples:
299 300
            .. code-block:: python

H
hong 已提交
301 302
                import paddle.fluid as fluid
                with fluid.dygraph.guard():
303
                    emb = fluid.dygraph.Embedding([10, 10])
H
hong 已提交
304 305 306 307 308 309

                    state_dict = emb.state_dict()
                    fluid.save_dygraph( state_dict, "paddle_dy")

        '''

310 311 312 313
        if destination is None:
            destination = collections.OrderedDict()
        for name, data in self._parameters.items():
            if data is not None:
314
                destination[data.name] = data
315 316 317 318 319 320

        if include_sublayers:
            for layer_name, layer_item in self._sub_layers.items():
                if layer_item is not None:
                    destination_temp = destination.copy()
                    destination_temp.update(
321
                        layer_item.state_dict(destination_temp,
322 323 324 325
                                              include_sublayers))
                    destination = destination_temp
        return destination

H
hong 已提交
326 327
    def set_dict(self, stat_dict, include_sublayers=True):
        '''
328
        Set parameters from stat_dict. All the parameters will be reset by the tensor in the stat_dict
H
hong 已提交
329

330 331 332
        Parameters:
            state_dict(dict) : Dict contains all the parameters
            include_sublayers(bool, optional) : If true, also include the parameters from sublayers. Default: True
H
hong 已提交
333 334 335 336
        Returns:
            None

        Examples:
337 338
            .. code-block:: python

H
hong 已提交
339 340
                import paddle.fluid as fluid
                with fluid.dygraph.guard():
341
                    emb = fluid.dygraph.Embedding([10, 10])
H
hong 已提交
342 343 344 345 346 347 348 349 350 351 352

                    state_dict = emb.state_dict()
                    fluid.save_dygraph( state_dict, "paddle_dy")
                    
                    para_state_dict, _ = fluid.load_dygraph( "paddle_dy")

                    emb.set_dict( para_state_dict )

        '''
        self.load_dict(stat_dict, include_sublayers=include_sublayers)

353
    def load_dict(self, stat_dict, include_sublayers=True):
H
hong 已提交
354
        '''
355
        Set parameters from stat_dict. All the parameters will be reset by the tensor in the stat_dict
H
hong 已提交
356 357 358

        This api will be Deprecated. Please use set_dict

359 360 361
        Parameters:
            state_dict(dict) : Dict contains all the parameters
            include_sublayers(bool, optional) : If true, also include the parameters from sublayers. Default: True
H
hong 已提交
362 363 364 365
        Returns:
            None

        Examples:
366 367
            .. code-block:: python

H
hong 已提交
368 369
                import paddle.fluid as fluid
                with fluid.dygraph.guard():
370
                    emb = fluid.dygraph.Embedding([10, 10])
H
hong 已提交
371 372 373 374 375 376 377 378 379 380

                    state_dict = emb.state_dict()
                    fluid.save_dygraph( state_dict, "paddle_dy")
                    
                    para_state_dict, _ = fluid.load_dygraph( "paddle_dy")

                    emb.load_dict( para_state_dict )

        '''

L
lujun 已提交
381
        self._loaddict_holder = stat_dict
382 383
        for name, item in self.__dict__.get('_parameters', None).items():
            if item.name in stat_dict:
H
hong 已提交
384 385 386 387 388
                item.set_value(stat_dict[item.name])
            else:
                raise RuntimeError(
                    "Parameter not found, Can't not find [ {} ] in stat_dict".
                    format(item.name))
389 390 391 392 393

        if include_sublayers:
            for layer_name, layer_item in self._sub_layers.items():
                if layer_item is not None:
                    layer_item.load_dict(stat_dict)