layer.py 17.1 KB
Newer Older
Q
qiaolongfei 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
14
"""
Y
Yu Yang 已提交
15 16 17
`paddle.v2.layer` is a part of model config packages in paddle.v2. In API v2,
we want to make Paddle a plain Python package. The model config package defined
the way how to configure a neural network topology in Paddle Python code.
18

Y
Yu Yang 已提交
19
The primary usage shows below.
20

Y
Yu Yang 已提交
21
..  code-block:: python
22

Y
Yu Yang 已提交
23
    import paddle.v2 as paddle
24

Y
Yu Yang 已提交
25 26 27 28
    img = paddle.layer.data(name='img', type=paddle.data_type.dense_vector(784))
    hidden = paddle.layer.fc(input=img, size=200)
    prediction = paddle.layer.fc(input=hidden, size=10,
                                 act=paddle.activation.Softmax())
29

Y
Yu Yang 已提交
30
    # use prediction instance where needed.
Y
Yu Yang 已提交
31
    parameters = paddle.parameters.create(cost)
32
"""
Q
qiaolongfei 已提交
33

Q
qiaolongfei 已提交
34
import collections
Y
Yu Yang 已提交
35
import inspect
Y
Yu Yang 已提交
36
from config_base import Layer, __convert_to_v2__
Q
qiaolongfei 已提交
37 38 39
import paddle.trainer_config_helpers as conf_helps
from paddle.trainer_config_helpers.config_parser_utils import \
    parse_network_config as __parse__
40
from paddle.trainer_config_helpers.default_decorators import wrap_act_default
Y
Yu Yang 已提交
41 42
from paddle.trainer_config_helpers.default_decorators import \
    wrap_bias_attr_default
Q
qiaolongfei 已提交
43
from paddle.trainer_config_helpers.default_decorators import wrap_name_default
44
from paddle.trainer_config_helpers.layers import layer_support
45 46 47
from paddle.trainer.config_parser import \
    RecurrentLayerGroupWithoutOutLinksBegin, RecurrentLayerGroupSetOutLink, \
    RecurrentLayerGroupEnd, model_type
Q
qiaolongfei 已提交
48

L
Luo Tao 已提交
49
import activation
Y
Yu Yang 已提交
50
import re
Q
qiaolongfei 已提交
51
import data_type
Q
qiaolongfei 已提交
52

Y
Yu Yang 已提交
53
__all__ = ['parse_network', 'data']
Q
qiaolongfei 已提交
54

Q
qiaolongfei 已提交
55

Q
qiaolongfei 已提交
56 57
def parse_network(*outputs):
    """
Y
Yu Yang 已提交
58 59 60 61 62 63 64 65 66 67 68
    Parse all output layers and then generate a ModelConfig object.

    ..  note::

        This function is used internally in paddle.v2 module. User should never
        invoke this method.

    :param outputs: Output layers.
    :type outputs: Layer
    :return: A ModelConfig object instance.
    :rtype: ModelConfig
Q
qiaolongfei 已提交
69 70 71
    """

    def __real_func__():
Y
Yu Yang 已提交
72 73 74 75
        """
        __real_func__ is the function that config_parser.parse invoked. It is
        the plain old paddle configuration function.
        """
Q
qiaolongfei 已提交
76 77 78 79 80 81 82
        context = dict()
        real_output = [each.to_proto(context=context) for each in outputs]
        conf_helps.outputs(real_output)

    return __parse__(__real_func__)


Q
qiaolongfei 已提交
83 84 85 86 87 88 89
"""
Some layer may need some special config, and can not use __convert_to_v2__ to convert.
So we also need to implement some special LayerV2.
"""


class DataLayerV2(Layer):
Y
Yu Yang 已提交
90 91
    METHOD_NAME = 'data_layer'

Q
qiaolongfei 已提交
92
    def __init__(self, name, type, **kwargs):
93
        assert isinstance(type, data_type.InputType)
Q
qiaolongfei 已提交
94

Q
qiaolongfei 已提交
95
        self.type = type
Q
qiaolongfei 已提交
96 97
        self.__method_name__ = 'data_layer'
        self.__kwargs__ = kwargs
Q
qiaolongfei 已提交
98 99 100 101 102

        super(DataLayerV2, self).__init__(name=name, parent_layers=dict())

    def to_proto_impl(self, **kwargs):
        args = dict()
Q
qiaolongfei 已提交
103
        args['size'] = self.type.dim
Q
qiaolongfei 已提交
104 105
        for each in kwargs:
            args[each] = kwargs[each]
Q
qiaolongfei 已提交
106 107
        for each in self.__kwargs__:
            args[each] = self.__kwargs__[each]
Q
qiaolongfei 已提交
108 109
        return getattr(conf_helps, self.__method_name__)(name=self.name, **args)

Y
Yu Yang 已提交
110 111 112 113 114 115 116 117 118 119 120
    def __map_docstr__(doc):
        doc = re.sub(r'(data = [^\)]+)\).*',
                     "data = paddle.layer.data(name=\"input\", "
                     "type=paddle.data_type.dense_vector(1000))", doc)

        doc = re.sub(r':param size:.*',
                     ':param type: Data type of this data layer', doc)
        doc = re.sub(r':type size:.*',
                     ":type size: paddle.v2.data_type.InputType", doc)
        return doc

Q
qiaolongfei 已提交
121

Y
Yu Yang 已提交
122 123 124
class WithExtraParent(Layer):
    def extra_parent(self):
        return self.__extra_parent__
Q
qiaolongfei 已提交
125

Q
qiaolongfei 已提交
126
    def __init__(self, name=None, parent_layers=None):
Y
Yu Yang 已提交
127
        self.__extra_parent__ = []
Q
qiaolongfei 已提交
128
        super(WithExtraParent, self).__init__(
Q
qiaolongfei 已提交
129
            name=name, parent_layers=parent_layers)
Q
qiaolongfei 已提交
130

Y
Yu Yang 已提交
131 132
    def append_extra_parent(self, parent):
        self.__extra_parent__.append(parent)
Q
qiaolongfei 已提交
133

Y
Yu Yang 已提交
134 135 136 137
    def to_proto(self, context):
        """
        function to set proto attribute
        """
Q
qiaolongfei 已提交
138 139 140 141 142 143 144 145
        # short cut if myself is parsed before.
        if self.context_name() in context:
            if self.use_context_name():
                return context[self.context_name()]
            else:
                return context[self.name]

        # parse parents
Y
Yu Yang 已提交
146 147 148 149 150 151 152 153 154 155 156 157 158 159
        kwargs = dict()
        for p in self.__extra_parent__:
            p.to_proto(context=context)

        for layer_name in self.__parent_layers__:
            if not isinstance(self.__parent_layers__[layer_name],
                              collections.Sequence):
                v1_layer = self.__parent_layers__[layer_name].to_proto(
                    context=context)
            else:
                v1_layer = map(lambda x: x.to_proto(context=context),
                               self.__parent_layers__[layer_name])
            kwargs[layer_name] = v1_layer

Q
qiaolongfei 已提交
160
        # parse self
Y
Yu Yang 已提交
161 162 163 164 165 166
        if self.context_name() is None:
            return self.to_proto_impl(context=context, **kwargs)
        elif self.context_name() not in context:
            context[self.context_name()] = self.to_proto_impl(
                context=context, **kwargs)

Q
qiaolongfei 已提交
167 168 169 170 171 172 173 174 175 176 177 178 179 180
        # parse children.
        aaa = self.__children_layers__
        for layer, pnames in self.__children_layers__:
            drop = False

            # child will only be parsed if all parents are in context.
            for pname in pnames:
                if pname not in context:
                    drop = True
                    break
            if drop:
                continue
            layer.to_proto(context=context)

Y
Yu Yang 已提交
181 182 183 184 185 186 187
        if self.use_context_name():
            return context[self.context_name()]
        else:
            return context[self.name]


class MemoryV2(WithExtraParent):
Q
qiaolongfei 已提交
188
    def __init__(self, name, extra_input=None, **kwargs):
Y
Yu Yang 已提交
189
        self.name = name
Q
qiaolongfei 已提交
190
        super(MemoryV2, self).__init__(name=name, parent_layers=dict())
Y
Yu Yang 已提交
191 192
        self.__kwargs__ = kwargs
        self.__boot_layer_name__ = None
Q
qiaolongfei 已提交
193

Y
Yu Yang 已提交
194 195 196 197 198 199 200
        if 'boot_layer' in kwargs:
            begin_of_current_rnn = []
            # TODO(yuyang18): Fix inspect, it could be wrong when user invoke a
            # function inside step.
            st = inspect.stack()
            for i in xrange(len(st)):
                locs = inspect.stack()[i][0].f_locals
Q
qiaolongfei 已提交
201 202 203
                keys = locs.keys()
                for key in keys:
                    val = locs[key]
Y
Yu Yang 已提交
204 205
                    if isinstance(val, RecurrentLayerInput):
                        begin_of_current_rnn.append(val)
Q
qiaolongfei 已提交
206 207 208 209
                    elif isinstance(val, collections.Sequence):
                        for v in val:
                            if isinstance(v, RecurrentLayerInput):
                                begin_of_current_rnn.append(v)
Y
Yu Yang 已提交
210 211 212 213 214 215 216 217 218 219 220

                if begin_of_current_rnn:
                    break
            assert begin_of_current_rnn is not None
            for extra in begin_of_current_rnn:
                self.append_extra_parent(extra)
                assert isinstance(extra, WithExtraParent)
                extra.append_extra_parent(kwargs['boot_layer'])
                self.__boot_layer_name__ = kwargs['boot_layer'].name

    def to_proto_impl(self, context, **kwargs):
Q
qiaolongfei 已提交
221 222 223 224 225
        args = dict()
        for each in kwargs:
            args[each] = kwargs[each]
        for each in self.__kwargs__:
            args[each] = self.__kwargs__[each]
Q
qiaolongfei 已提交
226

Y
Yu Yang 已提交
227 228
        if self.__boot_layer_name__ is not None:
            args['boot_layer'] = context[self.__boot_layer_name__]
Q
qiaolongfei 已提交
229

Q
qiaolongfei 已提交
230 231 232 233 234 235 236
        size = args.get('size', None)
        if size is not None:
            if callable(size):
                real_size = size()
            else:
                real_size = size
            args['size'] = real_size
Q
qiaolongfei 已提交
237
        return conf_helps.memory(name=self.name, **args)
Q
qiaolongfei 已提交
238

239 240 241
    def context_name(self):
        return self.name + "#memory"

Q
qiaolongfei 已提交
242 243 244 245 246 247 248
    def use_context_name(self):
        """
        memory layer will have the same name with some layer
        :return:
        """
        return True

Q
qiaolongfei 已提交
249

Q
qiaolongfei 已提交
250 251 252 253 254 255 256
class StaticInputV2(object):
    def __init__(self, input, is_seq=False, size=None):
        assert isinstance(input, LayerV2)
        self.name = input.name
        self.input = input
        self.is_seq = is_seq
        self.size = size
257
        # TODO(add size check)
Q
qiaolongfei 已提交
258
        # assert input.size is not None or size is not None
259 260


261 262 263 264 265 266 267 268 269 270
class MixedLayerV2(Layer):
    """
    This class is use to support `with` grammar. If not, the following code
    could convert mixed_layer simply.

        mixed = __convert_to_v2__(
            'mixed_layer', name_prefix='mixed', parent_names=['input'])
    """

    class AddToSealedMixedLayerExceptionV2(Exception):
D
dangqingqing 已提交
271
        pass
272 273 274 275 276 277 278 279 280 281

    def __init__(self,
                 size=0,
                 input=None,
                 name=None,
                 act=None,
                 bias_attr=None,
                 layer_attr=None):
        self.__method_name__ = 'mixed_layer'
        self.finalized = False
D
dangqingqing 已提交
282
        self.__inputs__ = []
283
        if input is not None:
D
dangqingqing 已提交
284
            self.__inputs__ = input
285

D
dangqingqing 已提交
286 287
        other_kwargs = dict()
        other_kwargs['name'] = name
288 289 290 291
        other_kwargs['size'] = size
        other_kwargs['act'] = act
        other_kwargs['bias_attr'] = bias_attr
        other_kwargs['layer_attr'] = layer_attr
D
dangqingqing 已提交
292
        parent_layers = {"input": self.__inputs__}
Q
qiaolongfei 已提交
293
        super(MixedLayerV2, self).__init__(name, parent_layers)
294 295 296 297
        self.__other_kwargs__ = other_kwargs

    def __iadd__(self, other):
        if not self.finalized:
D
dangqingqing 已提交
298
            self.__inputs__.append(other)
299 300
            return self
        else:
Y
Yu Yang 已提交
301
            raise MixedLayerV2.AddToSealedMixedLayerExceptionV2()
302 303

    def __enter__(self):
D
dangqingqing 已提交
304
        assert len(self.__inputs__) == 0
305 306 307 308 309 310 311 312 313 314 315
        return self

    def __exit__(self, *args, **kwargs):
        self.finalized = True

    def to_proto_impl(self, **kwargs):
        args = dict()
        for each in kwargs:
            args[each] = kwargs[each]
        for each in self.__other_kwargs__:
            args[each] = self.__other_kwargs__[each]
Q
qiaolongfei 已提交
316
        size = args.get('size', None)
Q
qiaolongfei 已提交
317 318 319 320 321 322
        if size is not None:
            if callable(size):
                real_size = size()
            else:
                real_size = size
            args['size'] = real_size
D
dangqingqing 已提交
323
        return getattr(conf_helps, self.__method_name__)(**args)
324 325 326


@wrap_name_default("mixed")
D
dangqingqing 已提交
327
@wrap_act_default(act=activation.Linear())
328 329 330 331 332 333 334 335 336 337 338
@wrap_bias_attr_default(has_bias=False)
@layer_support(conf_helps.layers.ERROR_CLIPPING, conf_helps.layers.DROPOUT)
def mixed(size=0,
          name=None,
          input=None,
          act=None,
          bias_attr=False,
          layer_attr=None):
    return MixedLayerV2(size, input, name, act, bias_attr, layer_attr)


Y
Yu Yang 已提交
339
class RecurrentLayerInput(WithExtraParent):
340
    def __init__(self, recurrent_name, index, parent_layers):
Q
qiaolongfei 已提交
341 342 343 344 345 346
        parents_len = len(parent_layers)
        assert parents_len <= 1
        if parents_len == 0:
            self.__parents__ = []
        else:
            self.__parents__ = parent_layers.values()[0]
Q
qiaolongfei 已提交
347 348
        self.__recurrent_name__ = recurrent_name
        name = self.__parents__[index].name if index >= 0 else self.context_name()
349
        super(RecurrentLayerInput, self).__init__(
Q
qiaolongfei 已提交
350
            name=name, parent_layers=parent_layers)
351 352 353 354

    def context_name(self):
        return self.__recurrent_name__ + ".begin"

Y
Yu Yang 已提交
355
    def to_proto_impl(self, context, **kwargs):
356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379
        model_type('recurrent_nn')
        RecurrentLayerGroupWithoutOutLinksBegin(
            name=self.__recurrent_name__,
            in_links=map(lambda x: x.name, self.__parents__))
        return self


class RecurrentLayerOutput(Layer):
    def __init__(self, recurrent_name, index, parent_layers):
        assert len(parent_layers) == 1
        self.__parents__ = parent_layers.values()[0]
        super(RecurrentLayerOutput, self).__init__(
            name=self.__parents__[index].name, parent_layers=parent_layers)
        self.__recurrent_name__ = recurrent_name

    def context_name(self):
        return self.__recurrent_name__ + ".end"

    def to_proto_impl(self, **kwargs):
        for l in self.__parents__:
            RecurrentLayerGroupSetOutLink(l.name)
        RecurrentLayerGroupEnd(name=self.__recurrent_name__)


Q
qiaolongfei 已提交
380
LayerV2 = Layer
Q
qiaolongfei 已提交
381
data = DataLayerV2
Y
Yu Yang 已提交
382
data.__name__ = 'data'
L
Luo Tao 已提交
383 384
AggregateLevel = conf_helps.layers.AggregateLevel
ExpandLevel = conf_helps.layers.ExpandLevel
Q
qiaolongfei 已提交
385
memory = MemoryV2
Q
qiaolongfei 已提交
386

Y
Yu Yang 已提交
387 388

def __layer_name_mapping__(inname):
Q
qiaolongfei 已提交
389
    if inname in ['data_layer', 'memory', 'mixed_layer', 'recurrent_group']:
Y
Yu Yang 已提交
390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410
        # Do Not handle these layers
        return
    elif inname == 'maxid_layer':
        return 'max_id'
    elif inname.endswith('memory') or inname.endswith(
            '_seq') or inname.endswith('_sim') or inname == 'hsigmoid':
        return inname
    elif inname in [
            'cross_entropy', 'multi_binary_label_cross_entropy',
            'cross_entropy_with_selfnorm'
    ]:
        return inname + "_cost"
    elif inname.endswith('_cost'):
        return inname
    elif inname.endswith("_layer"):
        return inname[:-len("_layer")]


def __layer_name_mapping_parent_names__(inname):
    all_args = getattr(conf_helps, inname).argspec.args
    return filter(
Y
Yu Yang 已提交
411 412 413
        lambda x: x in ['input1', 'input2', 'label', 'input', 'a', 'b',
                        'expand_as',
                        'weights', 'vectors', 'weight', 'score', 'left',
Q
qiaolongfei 已提交
414
                        'right', 'output_mem'],
Y
Yu Yang 已提交
415 416 417 418 419 420 421
        all_args)


def __convert_layer__(_new_name_, _old_name_, _parent_names_):
    global __all__
    __all__.append(_new_name_)
    globals()[new_name] = __convert_to_v2__(_old_name_, _parent_names_)
Y
Yu Yang 已提交
422
    globals()[new_name].__name__ = new_name
Y
Yu Yang 已提交
423 424 425 426 427 428 429 430 431 432 433 434


for each_layer_name in dir(conf_helps):
    new_name = __layer_name_mapping__(each_layer_name)
    if new_name is not None:
        parent_names = __layer_name_mapping_parent_names__(each_layer_name)
        assert len(parent_names) != 0, each_layer_name
        __convert_layer__(new_name, each_layer_name, parent_names)

del parent_names
del new_name
del each_layer_name
Q
qiaolongfei 已提交
435

Q
qiaolongfei 已提交
436 437 438 439 440 441 442 443

@wrap_name_default()
def recurrent_group(step, input, name=None):
    if not isinstance(input, collections.Sequence):
        input = [input]

    non_static_inputs = filter(lambda x: not isinstance(x, StaticInputV2),
                               input)
Q
qiaolongfei 已提交
444 445 446
    static_inputs = filter(lambda x: isinstance(x, StaticInputV2), input)
    static_inputs = [static_input.input for static_input in static_inputs]

Q
qiaolongfei 已提交
447 448 449 450 451 452 453 454
    actual_input = [
        RecurrentLayerInput(
            recurrent_name=name,
            index=i,
            parent_layers={'recurrent_inputs': non_static_inputs})
        for i in xrange(len(non_static_inputs))
    ]

Q
qiaolongfei 已提交
455 456 457 458 459 460 461
    extra_input = None
    if len(non_static_inputs) == 0:
        extra_input = RecurrentLayerInput(
            recurrent_name=name,
            index=-1,
            parent_layers={})

Q
qiaolongfei 已提交
462 463 464 465 466 467 468
    def __real_step__(*args):
        rnn_input = list(args)
        static_inputs = filter(lambda x: isinstance(x, StaticInputV2), input)
        for static_input in static_inputs:
            mem_name = "__%s_memory__" % static_input.input.name
            mem = memory(
                name=mem_name,
Q
qiaolongfei 已提交
469
                extra_input=extra_input,
Q
qiaolongfei 已提交
470
                is_seq=static_input.is_seq,
Q
qiaolongfei 已提交
471
                size=static_input.input.calculate_size,
Q
qiaolongfei 已提交
472 473 474
                boot_layer=static_input.input)
            with mixed(
                    name=mem_name,
Q
qiaolongfei 已提交
475
                    size=static_input.input.calculate_size,
Q
qiaolongfei 已提交
476 477
                    act=activation.Identity()) as mix:
                mix += identity_projection(input=mem)
Q
qiaolongfei 已提交
478 479
            mem.append_child(layer=mix, parent_names=[mem.context_name()])
            rnn_input.insert(input.index(static_input), mem)
Q
qiaolongfei 已提交
480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497
        return step(*rnn_input)

    actual_output = __real_step__(*actual_input)

    if not isinstance(actual_output, collections.Sequence):
        actual_output = [actual_output]

    retv = [
        RecurrentLayerOutput(
            recurrent_name=name,
            index=i,
            parent_layers={'recurrent_outputs': actual_output})
        for i in xrange(len(actual_output))
    ]
    if len(retv) == 1:
        return retv[0]
    else:
        return retv
Y
Yu Yang 已提交
498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523


__projection_names__ = filter(lambda x: x.endswith('_projection'),
                              dir(conf_helps))

__all__ += __projection_names__

__operator_names__ = filter(lambda x: x.endswith('_operator'), dir(conf_helps))
__all__ += __operator_names__

# convert projection
for prj in __projection_names__:
    globals()[prj] = __convert_to_v2__(
        prj, parent_names=['input'], is_default_name=False)
    globals()[prj].__name__ = prj

# convert operator
operator_list = [
    # [V1_method_name, parent_names],
    ['dotmul_operator', ['a', 'b']],
    ['conv_operator', ['img', 'filter']]
]
for op in operator_list:
    globals()[op[0]] = __convert_to_v2__(
        op[0], parent_names=op[1], is_default_name=False)
    globals()[op[0]].__name__ = op[0]