diff --git a/python/paddle/trainer_config_helpers/layers.py b/python/paddle/trainer_config_helpers/layers.py index b68460b6a3ab621904f4dc4e48352044ab265a38..b94f8f9a783552519ca73e7cfc0937b302d3445b 100755 --- a/python/paddle/trainer_config_helpers/layers.py +++ b/python/paddle/trainer_config_helpers/layers.py @@ -795,17 +795,16 @@ def data_layer(name, size, height=None, width=None, layer_attr=None): .. code-block:: python - data = data_layer(name="input", - size=1000) + data = data_layer(name="input", size=1000) :param name: Name of this data layer. :type name: basestring :param size: Size of this data layer. :type size: int :param height: Height of this data layer, used for image - :type size: int|None + :type height: int|None :param width: Width of this data layer, used for image - :type size: int|None + :type width: int|None :param layer_attr: Extra Layer Attribute. :type layer_attr: ExtraLayerAttribute. :return: LayerOutput object. diff --git a/python/paddle/v2/config_base.py b/python/paddle/v2/config_base.py index fa2ccec6c3270541dd6b13fdfd2323d10ceac642..64de568fcc74cb6b1f8c45b5d039d231f3dda8b0 100644 --- a/python/paddle/v2/config_base.py +++ b/python/paddle/v2/config_base.py @@ -13,12 +13,49 @@ # limitations under the License. import collections - +import re from paddle.trainer_config_helpers.default_decorators import wrap_name_default import paddle.trainer_config_helpers as conf_helps +class LayerType(type): + def __new__(cls, name, bases, attrs): + method_name = attrs.get('METHOD_NAME', None) + if method_name is not None: + method = getattr(conf_helps, method_name) + if method.__doc__ is not None: + mapper = attrs.get("__map_docstr__", None) + if mapper is not None: + attrs['__doc__'] = LayerType.__map_docstr__( + mapper(method.__doc__), + method_name=method_name, + name=name) + else: + attrs['__doc__'] = LayerType.__map_docstr__( + method.__doc__, method_name=method_name, name=name) + return super(LayerType, cls).__new__(cls, name, bases, attrs) + + @staticmethod + def __map_docstr__(doc, name, method_name): + assert isinstance(doc, basestring) + + # replace LayerOutput to paddle.v2.config_base.Layer + doc = doc.replace("LayerOutput", "paddle.v2.config_base.Layer") + + # xxx_layer to xxx + doc = re.sub(r"(?P[a-z]+)_layer", r"\g", doc) + + # XxxxActivation to paddle.v2.Activation.Xxxx + doc = re.sub(r"(?P[A-Z][a-zA-Z]+)Activation", + r"paddle.v2.Activation.\g", doc) + + # TODO(yuyang18): Add more rules if needed. + return doc + + class Layer(object): + __metaclass__ = LayerType + def __init__(self, name=None, parent_layers=None): assert isinstance(parent_layers, dict) self.name = name @@ -80,6 +117,8 @@ def __convert_to_v2__(method_name, parent_names, is_default_name=True): wrapper = None class V2LayerImpl(Layer): + METHOD_NAME = method_name + def __init__(self, **kwargs): parent_layers = dict() other_kwargs = dict() diff --git a/python/paddle/v2/layer.py b/python/paddle/v2/layer.py index 711226d659d49fc2646c34c011c7773ae2517ec9..1608aa3f7147d7d6dd9620caca213829b55133d3 100644 --- a/python/paddle/v2/layer.py +++ b/python/paddle/v2/layer.py @@ -47,26 +47,32 @@ from paddle.trainer.config_parser import \ RecurrentLayerGroupEnd, model_type import activation +import re import data_type __all__ = ['parse_network', 'data'] -__projection_names__ = filter(lambda x: x.endswith('_projection'), - dir(conf_helps)) -__all__ += __projection_names__ - -__operator_names__ = filter(lambda x: x.endswith('_operator'), dir(conf_helps)) -__all__ += __operator_names__ - def parse_network(*outputs): """ - parse all output layers and then generate a model config proto. - :param outputs: - :return: + Parse all output layers and then generate a ModelConfig object. + + .. note:: + + This function is used internally in paddle.v2 module. User should never + invoke this method. + + :param outputs: Output layers. + :type outputs: Layer + :return: A ModelConfig object instance. + :rtype: ModelConfig """ def __real_func__(): + """ + __real_func__ is the function that config_parser.parse invoked. It is + the plain old paddle configuration function. + """ context = dict() real_output = [each.to_proto(context=context) for each in outputs] conf_helps.outputs(real_output) @@ -81,6 +87,8 @@ So we also need to implement some special LayerV2. class DataLayerV2(Layer): + METHOD_NAME = 'data_layer' + def __init__(self, name, type, **kwargs): assert isinstance(type, data_type.InputType) @@ -99,6 +107,17 @@ class DataLayerV2(Layer): args[each] = self.__kwargs__[each] return getattr(conf_helps, self.__method_name__)(name=self.name, **args) + def __map_docstr__(doc): + doc = re.sub(r'(data = [^\)]+)\).*', + "data = paddle.layer.data(name=\"input\", " + "type=paddle.data_type.dense_vector(1000))", doc) + + doc = re.sub(r':param size:.*', + ':param type: Data type of this data layer', doc) + doc = re.sub(r':type size:.*', + ":type size: paddle.v2.data_type.InputType", doc) + return doc + class WithExtraParent(Layer): def extra_parent(self): @@ -347,6 +366,7 @@ class RecurrentLayerOutput(Layer): LayerV2 = Layer data = DataLayerV2 +data.__name__ = 'data' AggregateLevel = conf_helps.layers.AggregateLevel ExpandLevel = conf_helps.layers.ExpandLevel memory = MemoryV2 @@ -386,6 +406,7 @@ def __convert_layer__(_new_name_, _old_name_, _parent_names_): global __all__ __all__.append(_new_name_) globals()[new_name] = __convert_to_v2__(_old_name_, _parent_names_) + globals()[new_name].__name__ = new_name for each_layer_name in dir(conf_helps): @@ -399,21 +420,6 @@ del parent_names del new_name del each_layer_name -# convert projection -for prj in __projection_names__: - globals()[prj] = __convert_to_v2__( - prj, parent_names=['input'], is_default_name=False) - -# convert operator -operator_list = [ - # [V1_method_name, parent_names], - ['dotmul_operator', ['a', 'b']], - ['conv_operator', ['img', 'filter']] -] -for op in operator_list: - globals()[op[0]] = __convert_to_v2__( - op[0], parent_names=op[1], is_default_name=False) - @wrap_name_default() def recurrent_group(step, input, name=None): @@ -464,3 +470,29 @@ def recurrent_group(step, input, name=None): return retv[0] else: return retv + + +__projection_names__ = filter(lambda x: x.endswith('_projection'), + dir(conf_helps)) + +__all__ += __projection_names__ + +__operator_names__ = filter(lambda x: x.endswith('_operator'), dir(conf_helps)) +__all__ += __operator_names__ + +# convert projection +for prj in __projection_names__: + globals()[prj] = __convert_to_v2__( + prj, parent_names=['input'], is_default_name=False) + globals()[prj].__name__ = prj + +# convert operator +operator_list = [ + # [V1_method_name, parent_names], + ['dotmul_operator', ['a', 'b']], + ['conv_operator', ['img', 'filter']] +] +for op in operator_list: + globals()[op[0]] = __convert_to_v2__( + op[0], parent_names=op[1], is_default_name=False) + globals()[op[0]].__name__ = op[0]