config_base.py 5.1 KB
Newer Older
Y
Yu Yang 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

Y
Yu Yang 已提交
15
import collections
Y
Yu Yang 已提交
16
import re
Y
Yu Yang 已提交
17 18 19 20
from paddle.trainer_config_helpers.default_decorators import wrap_name_default
import paddle.trainer_config_helpers as conf_helps


Y
Yu Yang 已提交
21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55
class LayerType(type):
    def __new__(cls, name, bases, attrs):
        method_name = attrs.get('METHOD_NAME', None)
        if method_name is not None:
            method = getattr(conf_helps, method_name)
            if method.__doc__ is not None:
                mapper = attrs.get("__map_docstr__", None)
                if mapper is not None:
                    attrs['__doc__'] = LayerType.__map_docstr__(
                        mapper(method.__doc__),
                        method_name=method_name,
                        name=name)
                else:
                    attrs['__doc__'] = LayerType.__map_docstr__(
                        method.__doc__, method_name=method_name, name=name)
        return super(LayerType, cls).__new__(cls, name, bases, attrs)

    @staticmethod
    def __map_docstr__(doc, name, method_name):
        assert isinstance(doc, basestring)

        # replace LayerOutput to paddle.v2.config_base.Layer
        doc = doc.replace("LayerOutput", "paddle.v2.config_base.Layer")

        # xxx_layer to xxx
        doc = re.sub(r"(?P<name>[a-z]+)_layer", r"\g<name>", doc)

        # XxxxActivation to paddle.v2.Activation.Xxxx
        doc = re.sub(r"(?P<name>[A-Z][a-zA-Z]+)Activation",
                     r"paddle.v2.Activation.\g<name>", doc)

        # TODO(yuyang18): Add more rules if needed.
        return doc


Y
Yu Yang 已提交
56
class Layer(object):
Y
Yu Yang 已提交
57 58
    __metaclass__ = LayerType

Q
qiaolongfei 已提交
59
    def __init__(self, name=None, parent_layers=None):
Y
Yu Yang 已提交
60 61
        assert isinstance(parent_layers, dict)
        self.name = name
Q
qiaolongfei 已提交
62
        self.__contex__ = {}
Y
Yu Yang 已提交
63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79
        self.__parent_layers__ = parent_layers

    def to_proto(self, context):
        """
        function to set proto attribute
        """
        kwargs = dict()
        for layer_name in self.__parent_layers__:
            if not isinstance(self.__parent_layers__[layer_name],
                              collections.Sequence):
                v1_layer = self.__parent_layers__[layer_name].to_proto(
                    context=context)
            else:
                v1_layer = map(lambda x: x.to_proto(context=context),
                               self.__parent_layers__[layer_name])
            kwargs[layer_name] = v1_layer

Q
qiaolongfei 已提交
80
        if self.context_name() is None:
Y
Yu Yang 已提交
81
            return self.to_proto_impl(**kwargs)
Q
qiaolongfei 已提交
82 83
        elif self.context_name() not in context:
            context[self.context_name()] = self.to_proto_impl(**kwargs)
Q
qiaolongfei 已提交
84
        self.__contex__ = context
Q
qiaolongfei 已提交
85 86 87 88
        if self.use_context_name():
            return context[self.context_name()]
        else:
            return context[self.name]
Y
Yu Yang 已提交
89 90 91 92

    def to_proto_impl(self, **kwargs):
        raise NotImplementedError()

Q
qiaolongfei 已提交
93 94 95 96 97 98 99 100 101 102 103
    def context_name(self):
        """
        Context name means the context which stores `to_proto_impl` result.
        If multiple layer share same context_name, the `to_proto_impl` of them
        will be invoked only once.
        """
        return self.name

    def use_context_name(self):
        return False

Q
qiaolongfei 已提交
104 105 106 107 108 109
    def calculate_size(self):
        """
        lazy calculate size of the layer, should be called when to_proto_impl of
        this layer is called.
        :return:
        """
Q
qiaolongfei 已提交
110 111
        return self.__contex__[self.context_name()].size

Y
Yu Yang 已提交
112 113 114 115 116 117 118 119

def __convert_to_v2__(method_name, parent_names, is_default_name=True):
    if is_default_name:
        wrapper = wrap_name_default(name_prefix=method_name)
    else:
        wrapper = None

    class V2LayerImpl(Layer):
Y
Yu Yang 已提交
120 121
        METHOD_NAME = method_name

Y
Yu Yang 已提交
122 123 124 125 126 127 128 129 130 131 132 133
        def __init__(self, **kwargs):
            parent_layers = dict()
            other_kwargs = dict()
            for pname in parent_names:
                if kwargs.has_key(pname):
                    parent_layers[pname] = kwargs[pname]

            for key in kwargs.keys():
                if key not in parent_names:
                    other_kwargs[key] = kwargs[key]

            name = kwargs.get('name', None)
Q
qiaolongfei 已提交
134
            super(V2LayerImpl, self).__init__(name, parent_layers)
Y
Yu Yang 已提交
135 136 137 138 139 140 141 142 143 144 145 146 147 148
            self.__other_kwargs__ = other_kwargs

        if wrapper is not None:
            __init__ = wrapper(__init__)

        def to_proto_impl(self, **kwargs):
            args = dict()
            for each in kwargs:
                args[each] = kwargs[each]
            for each in self.__other_kwargs__:
                args[each] = self.__other_kwargs__[each]
            return getattr(conf_helps, method_name)(**args)

    return V2LayerImpl