提交 8d5a18a2 编写于 作者: Y Yu Yang

Complete Layers documentation

上级 af607df2
...@@ -795,17 +795,16 @@ def data_layer(name, size, height=None, width=None, layer_attr=None): ...@@ -795,17 +795,16 @@ def data_layer(name, size, height=None, width=None, layer_attr=None):
.. code-block:: python .. code-block:: python
data = data_layer(name="input", data = data_layer(name="input", size=1000)
size=1000)
:param name: Name of this data layer. :param name: Name of this data layer.
:type name: basestring :type name: basestring
:param size: Size of this data layer. :param size: Size of this data layer.
:type size: int :type size: int
:param height: Height of this data layer, used for image :param height: Height of this data layer, used for image
:type size: int|None :type height: int|None
:param width: Width of this data layer, used for image :param width: Width of this data layer, used for image
:type size: int|None :type width: int|None
:param layer_attr: Extra Layer Attribute. :param layer_attr: Extra Layer Attribute.
:type layer_attr: ExtraLayerAttribute. :type layer_attr: ExtraLayerAttribute.
:return: LayerOutput object. :return: LayerOutput object.
......
...@@ -13,12 +13,49 @@ ...@@ -13,12 +13,49 @@
# limitations under the License. # limitations under the License.
import collections import collections
import re
from paddle.trainer_config_helpers.default_decorators import wrap_name_default from paddle.trainer_config_helpers.default_decorators import wrap_name_default
import paddle.trainer_config_helpers as conf_helps import paddle.trainer_config_helpers as conf_helps
class LayerType(type):
def __new__(cls, name, bases, attrs):
method_name = attrs.get('METHOD_NAME', None)
if method_name is not None:
method = getattr(conf_helps, method_name)
if method.__doc__ is not None:
mapper = attrs.get("__map_docstr__", None)
if mapper is not None:
attrs['__doc__'] = LayerType.__map_docstr__(
mapper(method.__doc__),
method_name=method_name,
name=name)
else:
attrs['__doc__'] = LayerType.__map_docstr__(
method.__doc__, method_name=method_name, name=name)
return super(LayerType, cls).__new__(cls, name, bases, attrs)
@staticmethod
def __map_docstr__(doc, name, method_name):
assert isinstance(doc, basestring)
# replace LayerOutput to paddle.v2.config_base.Layer
doc = doc.replace("LayerOutput", "paddle.v2.config_base.Layer")
# xxx_layer to xxx
doc = re.sub(r"(?P<name>[a-z]+)_layer", r"\g<name>", doc)
# XxxxActivation to paddle.v2.Activation.Xxxx
doc = re.sub(r"(?P<name>[A-Z][a-zA-Z]+)Activation",
r"paddle.v2.Activation.\g<name>", doc)
# TODO(yuyang18): Add more rules if needed.
return doc
class Layer(object): class Layer(object):
__metaclass__ = LayerType
def __init__(self, name=None, parent_layers=None): def __init__(self, name=None, parent_layers=None):
assert isinstance(parent_layers, dict) assert isinstance(parent_layers, dict)
self.name = name self.name = name
...@@ -80,6 +117,8 @@ def __convert_to_v2__(method_name, parent_names, is_default_name=True): ...@@ -80,6 +117,8 @@ def __convert_to_v2__(method_name, parent_names, is_default_name=True):
wrapper = None wrapper = None
class V2LayerImpl(Layer): class V2LayerImpl(Layer):
METHOD_NAME = method_name
def __init__(self, **kwargs): def __init__(self, **kwargs):
parent_layers = dict() parent_layers = dict()
other_kwargs = dict() other_kwargs = dict()
......
...@@ -47,26 +47,32 @@ from paddle.trainer.config_parser import \ ...@@ -47,26 +47,32 @@ from paddle.trainer.config_parser import \
RecurrentLayerGroupEnd, model_type RecurrentLayerGroupEnd, model_type
import activation import activation
import re
import data_type import data_type
__all__ = ['parse_network', 'data'] __all__ = ['parse_network', 'data']
__projection_names__ = filter(lambda x: x.endswith('_projection'),
dir(conf_helps))
__all__ += __projection_names__
__operator_names__ = filter(lambda x: x.endswith('_operator'), dir(conf_helps))
__all__ += __operator_names__
def parse_network(*outputs): def parse_network(*outputs):
""" """
parse all output layers and then generate a model config proto. Parse all output layers and then generate a ModelConfig object.
:param outputs:
:return: .. note::
This function is used internally in paddle.v2 module. User should never
invoke this method.
:param outputs: Output layers.
:type outputs: Layer
:return: A ModelConfig object instance.
:rtype: ModelConfig
""" """
def __real_func__(): def __real_func__():
"""
__real_func__ is the function that config_parser.parse invoked. It is
the plain old paddle configuration function.
"""
context = dict() context = dict()
real_output = [each.to_proto(context=context) for each in outputs] real_output = [each.to_proto(context=context) for each in outputs]
conf_helps.outputs(real_output) conf_helps.outputs(real_output)
...@@ -81,6 +87,8 @@ So we also need to implement some special LayerV2. ...@@ -81,6 +87,8 @@ So we also need to implement some special LayerV2.
class DataLayerV2(Layer): class DataLayerV2(Layer):
METHOD_NAME = 'data_layer'
def __init__(self, name, type, **kwargs): def __init__(self, name, type, **kwargs):
assert isinstance(type, data_type.InputType) assert isinstance(type, data_type.InputType)
...@@ -99,6 +107,17 @@ class DataLayerV2(Layer): ...@@ -99,6 +107,17 @@ class DataLayerV2(Layer):
args[each] = self.__kwargs__[each] args[each] = self.__kwargs__[each]
return getattr(conf_helps, self.__method_name__)(name=self.name, **args) return getattr(conf_helps, self.__method_name__)(name=self.name, **args)
def __map_docstr__(doc):
doc = re.sub(r'(data = [^\)]+)\).*',
"data = paddle.layer.data(name=\"input\", "
"type=paddle.data_type.dense_vector(1000))", doc)
doc = re.sub(r':param size:.*',
':param type: Data type of this data layer', doc)
doc = re.sub(r':type size:.*',
":type size: paddle.v2.data_type.InputType", doc)
return doc
class WithExtraParent(Layer): class WithExtraParent(Layer):
def extra_parent(self): def extra_parent(self):
...@@ -347,6 +366,7 @@ class RecurrentLayerOutput(Layer): ...@@ -347,6 +366,7 @@ class RecurrentLayerOutput(Layer):
LayerV2 = Layer LayerV2 = Layer
data = DataLayerV2 data = DataLayerV2
data.__name__ = 'data'
AggregateLevel = conf_helps.layers.AggregateLevel AggregateLevel = conf_helps.layers.AggregateLevel
ExpandLevel = conf_helps.layers.ExpandLevel ExpandLevel = conf_helps.layers.ExpandLevel
memory = MemoryV2 memory = MemoryV2
...@@ -386,6 +406,7 @@ def __convert_layer__(_new_name_, _old_name_, _parent_names_): ...@@ -386,6 +406,7 @@ def __convert_layer__(_new_name_, _old_name_, _parent_names_):
global __all__ global __all__
__all__.append(_new_name_) __all__.append(_new_name_)
globals()[new_name] = __convert_to_v2__(_old_name_, _parent_names_) globals()[new_name] = __convert_to_v2__(_old_name_, _parent_names_)
globals()[new_name].__name__ = new_name
for each_layer_name in dir(conf_helps): for each_layer_name in dir(conf_helps):
...@@ -399,21 +420,6 @@ del parent_names ...@@ -399,21 +420,6 @@ del parent_names
del new_name del new_name
del each_layer_name del each_layer_name
# convert projection
for prj in __projection_names__:
globals()[prj] = __convert_to_v2__(
prj, parent_names=['input'], is_default_name=False)
# convert operator
operator_list = [
# [V1_method_name, parent_names],
['dotmul_operator', ['a', 'b']],
['conv_operator', ['img', 'filter']]
]
for op in operator_list:
globals()[op[0]] = __convert_to_v2__(
op[0], parent_names=op[1], is_default_name=False)
@wrap_name_default() @wrap_name_default()
def recurrent_group(step, input, name=None): def recurrent_group(step, input, name=None):
...@@ -464,3 +470,29 @@ def recurrent_group(step, input, name=None): ...@@ -464,3 +470,29 @@ def recurrent_group(step, input, name=None):
return retv[0] return retv[0]
else: else:
return retv return retv
__projection_names__ = filter(lambda x: x.endswith('_projection'),
dir(conf_helps))
__all__ += __projection_names__
__operator_names__ = filter(lambda x: x.endswith('_operator'), dir(conf_helps))
__all__ += __operator_names__
# convert projection
for prj in __projection_names__:
globals()[prj] = __convert_to_v2__(
prj, parent_names=['input'], is_default_name=False)
globals()[prj].__name__ = prj
# convert operator
operator_list = [
# [V1_method_name, parent_names],
['dotmul_operator', ['a', 'b']],
['conv_operator', ['img', 'filter']]
]
for op in operator_list:
globals()[op[0]] = __convert_to_v2__(
op[0], parent_names=op[1], is_default_name=False)
globals()[op[0]].__name__ = op[0]
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册