From 6dd21658dc96ac46c22c2287a21cd78c1f616667 Mon Sep 17 00:00:00 2001 From: Yu Yang Date: Wed, 1 Mar 2017 14:07:04 +0800 Subject: [PATCH] Add config_base.py for Layer --- python/paddle/v2/config_base.py | 72 ++++++++++++++++++++++++++++++++ python/paddle/v2/layer.py | 73 +-------------------------------- 2 files changed, 73 insertions(+), 72 deletions(-) create mode 100644 python/paddle/v2/config_base.py diff --git a/python/paddle/v2/config_base.py b/python/paddle/v2/config_base.py new file mode 100644 index 00000000000..c733f4866dc --- /dev/null +++ b/python/paddle/v2/config_base.py @@ -0,0 +1,72 @@ +import collections + +from paddle.trainer_config_helpers.default_decorators import wrap_name_default +import paddle.trainer_config_helpers as conf_helps + + +class Layer(object): + def __init__(self, name=None, parent_layers=None): + assert isinstance(parent_layers, dict) + self.name = name + self.__parent_layers__ = parent_layers + + def to_proto(self, context): + """ + function to set proto attribute + """ + kwargs = dict() + for layer_name in self.__parent_layers__: + if not isinstance(self.__parent_layers__[layer_name], + collections.Sequence): + v1_layer = self.__parent_layers__[layer_name].to_proto( + context=context) + else: + v1_layer = map(lambda x: x.to_proto(context=context), + self.__parent_layers__[layer_name]) + kwargs[layer_name] = v1_layer + + if self.name is None: + return self.to_proto_impl(**kwargs) + elif self.name not in context: + context[self.name] = self.to_proto_impl(**kwargs) + + return context[self.name] + + def to_proto_impl(self, **kwargs): + raise NotImplementedError() + + +def __convert_to_v2__(method_name, parent_names, is_default_name=True): + if is_default_name: + wrapper = wrap_name_default(name_prefix=method_name) + else: + wrapper = None + + class V2LayerImpl(Layer): + def __init__(self, **kwargs): + parent_layers = dict() + other_kwargs = dict() + for pname in parent_names: + if kwargs.has_key(pname): + parent_layers[pname] = kwargs[pname] + + for key in kwargs.keys(): + if key not in parent_names: + other_kwargs[key] = kwargs[key] + + name = kwargs.get('name', None) + super(V2LayerImpl, self).__init__(name, parent_layers) + self.__other_kwargs__ = other_kwargs + + if wrapper is not None: + __init__ = wrapper(__init__) + + def to_proto_impl(self, **kwargs): + args = dict() + for each in kwargs: + args[each] = kwargs[each] + for each in self.__other_kwargs__: + args[each] = self.__other_kwargs__[each] + return getattr(conf_helps, method_name)(**args) + + return V2LayerImpl diff --git a/python/paddle/v2/layer.py b/python/paddle/v2/layer.py index 19f3c2f998d..67111f1315f 100644 --- a/python/paddle/v2/layer.py +++ b/python/paddle/v2/layer.py @@ -65,10 +65,7 @@ to be in a Python function but could be anywhere. Also, the creation of a protobuf message is hidden in the invocation of paddle.v2.parameters.create, no longer exposed to users. """ - -import collections -import inspect - +from config_base import Layer, __convert_to_v2__ import paddle.trainer_config_helpers as conf_helps from paddle.trainer_config_helpers.config_parser_utils import \ parse_network_config as __parse__ @@ -107,74 +104,6 @@ def parse_network(*outputs): return __parse__(__real_func__) -class Layer(object): - def __init__(self, name=None, parent_layers=None): - assert isinstance(parent_layers, dict) - self.name = name - self.__parent_layers__ = parent_layers - - def to_proto(self, context): - """ - function to set proto attribute - """ - kwargs = dict() - for layer_name in self.__parent_layers__: - if not isinstance(self.__parent_layers__[layer_name], - collections.Sequence): - v1_layer = self.__parent_layers__[layer_name].to_proto( - context=context) - else: - v1_layer = map(lambda x: x.to_proto(context=context), - self.__parent_layers__[layer_name]) - kwargs[layer_name] = v1_layer - - if self.name is None: - return self.to_proto_impl(**kwargs) - elif self.name not in context: - context[self.name] = self.to_proto_impl(**kwargs) - - return context[self.name] - - def to_proto_impl(self, **kwargs): - raise NotImplementedError() - - -def __convert_to_v2__(method_name, parent_names, is_default_name=True): - if is_default_name: - wrapper = wrap_name_default(name_prefix=method_name) - else: - wrapper = None - - class V2LayerImpl(Layer): - def __init__(self, **kwargs): - parent_layers = dict() - other_kwargs = dict() - for pname in parent_names: - if kwargs.has_key(pname): - parent_layers[pname] = kwargs[pname] - - for key in kwargs.keys(): - if key not in parent_names: - other_kwargs[key] = kwargs[key] - - name = kwargs.get('name', None) - super(V2LayerImpl, self).__init__(name, parent_layers) - self.__other_kwargs__ = other_kwargs - - if wrapper is not None: - __init__ = wrapper(__init__) - - def to_proto_impl(self, **kwargs): - args = dict() - for each in kwargs: - args[each] = kwargs[each] - for each in self.__other_kwargs__: - args[each] = self.__other_kwargs__[each] - return getattr(conf_helps, method_name)(**args) - - return V2LayerImpl - - """ Some layer may need some special config, and can not use __convert_to_v2__ to convert. So we also need to implement some special LayerV2. -- GitLab