From e853ece0a29978960a2735526c6ac7f272c885d3 Mon Sep 17 00:00:00 2001 From: Feiyu Chan Date: Sat, 8 Aug 2020 19:46:56 +0800 Subject: [PATCH] update document template for unary elementwise layers (#25896) 1. update document template for unary elementwise layers(a.k.a. activation layer); 2. remove generate_op_noattr and use generate_activation instead; remove redundant function copies; 3. minor update for docstring to fix rst format errors. 4. fix doc for Rsqrt OP 5. add sample code for each activation separately; 6. remove the unused deprecated decorator. --- paddle/fluid/operators/activation_op.cc | 4 +- .../fluid/layers/layer_function_generator.py | 60 +-- python/paddle/fluid/layers/ops.py | 359 +++++++++++++++++- python/paddle/tensor/math.py | 76 +--- 4 files changed, 381 insertions(+), 118 deletions(-) diff --git a/paddle/fluid/operators/activation_op.cc b/paddle/fluid/operators/activation_op.cc index b9a92c2207d..1ecb9dd26da 100644 --- a/paddle/fluid/operators/activation_op.cc +++ b/paddle/fluid/operators/activation_op.cc @@ -199,7 +199,7 @@ $$out = x - \\frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$ UNUSED constexpr char SqrtDoc[] = R"DOC( Sqrt Activation Operator. -.. math:: out=\sqrt x=x^{1/2} +.. math:: out=\\sqrt{x}=x^{1/2} **Note**: input value must be greater than or equal to zero. @@ -211,7 +211,7 @@ Rsqrt Activation Operator. Please make sure input is legal in case of numeric errors. -$$out = \frac{1}{\sqrt{x}}$$ +$$out = \\frac{1}{\\sqrt{x}}$$ )DOC"; diff --git a/python/paddle/fluid/layers/layer_function_generator.py b/python/paddle/fluid/layers/layer_function_generator.py index 5c14d26f3fe..7aedb2ca256 100755 --- a/python/paddle/fluid/layers/layer_function_generator.py +++ b/python/paddle/fluid/layers/layer_function_generator.py @@ -25,8 +25,7 @@ from ..layer_helper import LayerHelper from ..data_feeder import check_variable_and_dtype __all__ = [ - 'deprecated', 'generate_layer_fn', 'generate_activation_fn', 'autodoc', - 'templatedoc' + 'generate_layer_fn', 'generate_activation_fn', 'autodoc', 'templatedoc' ] @@ -82,8 +81,9 @@ def _generate_doc_string_(op_proto, buf.write(escape_math(op_proto.comment)) buf.write('\nArgs:\n') for each_input in op_proto.inputs: - line_begin = ' {0}: '.format(_convert_(each_input.name)) + line_begin = ' {0}'.format(_convert_(each_input.name)) buf.write(line_begin) + buf.write(" (Tensor): ") buf.write(escape_math(each_input.comment)) if each_input.duplicable: buf.write(" Duplicatable.") @@ -125,6 +125,8 @@ def _generate_doc_string_(op_proto, for each_opt in op_proto.outputs: if not each_opt.intermediate: break + buf.write(_convert_(each_opt.name)) + buf.write(' (Tensor): ') buf.write(escape_math(each_opt.comment)) return buf.getvalue() @@ -275,50 +277,11 @@ def generate_activation_fn(op_type): func.__doc__ = _generate_doc_string_( op_proto, additional_args_lines=[ - "name(str, optional): The default value is None. Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name` ." + "name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`." ]) - func.__doc__ = func.__doc__ + """ - -Return type - Variable - -Examples: - .. code-block:: python - - import paddle - import numpy as np - - paddle.enable_imperative() - x_data = np.array([1, 2, 3, 4]).astype(np.float32) - x = paddle.imperative.to_variable(x_data) - res = paddle.%s(x) - print(res.numpy()) -""" % op_type return func -def deprecated(func_or_class): - """ - Deprecated warning decorator. It will result a warning message. - Should be used before class or function, member function - """ - - @functools.wraps(func) - def func_wrapper(*args, **kwargs): - """ - Wrap func with deprecated warning - """ - warnings.simplefilter('always', DeprecationWarning) # turn off filter - warnings.warn( - "Call to deprecated function {}.".format(func.__name__), - category=DeprecationWarning, - stacklevel=2) - warnings.simplefilter('default', DeprecationWarning) # reset filter - return func(*args, **kwargs) - - return func_wrapper - - def autodoc(comment=""): def __impl__(func): func.__doc__ = _generate_doc_string_(OpProtoHolder.instance( @@ -384,3 +347,14 @@ def templatedoc(op_type=None): return func return __impl__ + + +def add_sample_code(func, sample_code): + """ + Append sample code for dynamically generated functions. + + Args: + func: The function of the function to be append sample code to. + sample_code: sample code session in rst format. + """ + func.__doc__ = func.__doc__ + sample_code diff --git a/python/paddle/fluid/layers/ops.py b/python/paddle/fluid/layers/ops.py index 3adb243c8f8..c8f74c809a7 100644 --- a/python/paddle/fluid/layers/ops.py +++ b/python/paddle/fluid/layers/ops.py @@ -14,7 +14,7 @@ from __future__ import print_function import os -from .layer_function_generator import generate_layer_fn, generate_activation_fn +from .layer_function_generator import generate_layer_fn, generate_activation_fn, add_sample_code from .. import core from ..framework import convert_np_dtype_to_dtype_, Variable from ..data_feeder import convert_dtype, check_variable_and_dtype, check_type, check_dtype @@ -61,6 +61,363 @@ __all__ += __activations_noattr__ for _OP in set(__activations_noattr__): globals()[_OP] = generate_activation_fn(_OP) +add_sample_code(globals()["sigmoid"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + import paddle.nn.functional as F + paddle.enable_imperative() + + x_data = np.array([-0.4, -0.2, 0.1, 0.3]) + x = paddle.imperative.to_variable(x_data) + out = F.sigmoid(x) + print(out.numpy()) + # [0.40131234 0.450166 0.52497919 0.57444252] + +""") + +add_sample_code(globals()["logsigmoid"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + import paddle.nn.functional as F + paddle.enable_imperative() + + x_data = np.array([-0.4, -0.2, 0.1, 0.3]) + x = paddle.imperative.to_variable(x_data) + out = F.logsigmoid(x) + print(out.numpy()) + # [-0.91301525 -0.79813887 -0.64439666 -0.55435524] + +""") + +add_sample_code(globals()["exp"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + paddle.enable_imperative() + + x_data = np.array([-0.4, -0.2, 0.1, 0.3]) + x = paddle.imperative.to_variable(x_data) + out = paddle.exp(x) + print(out.numpy()) + # [0.67032005 0.81873075 1.10517092 1.34985881] + +""") + +add_sample_code(globals()["tanh"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + paddle.enable_imperative() + + x_data = np.array([-0.4, -0.2, 0.1, 0.3]) + x = paddle.imperative.to_variable(x_data) + out = paddle.tanh(x) + print(out.numpy()) + # [-0.37994896 -0.19737532 0.09966799 0.29131261] + +""") + +add_sample_code(globals()["atan"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + paddle.enable_imperative() + + x_data = np.array([-0.4, -0.2, 0.1, 0.3]) + x = paddle.imperative.to_variable(x_data) + out = paddle.atan(x) + print(out.numpy()) + # [-0.38050638 -0.19739556 0.09966865 0.29145679] + +""") + +add_sample_code(globals()["tanh_shrink"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + import paddle.nn.functional as F + paddle.enable_imperative() + + x_data = np.array([-0.4, -0.2, 0.1, 0.3]) + x = paddle.imperative.to_variable(x_data) + out = F.tanh_shrink(x) + print(out.numpy()) + # [-0.02005104 -0.00262468 0.00033201 0.00868739] + +""") + +add_sample_code(globals()["sqrt"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + paddle.enable_imperative() + + x_data = np.array([0.1, 0.2, 0.3, 0.4]) + x = paddle.imperative.to_variable(x_data) + out = paddle.sqrt(x) + print(out.numpy()) + # [0.31622777 0.4472136 0.54772256 0.63245553] + +""") + +add_sample_code(globals()["rsqrt"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + paddle.enable_imperative() + + x_data = np.array([0.1, 0.2, 0.3, 0.4]) + x = paddle.imperative.to_variable(x_data) + out = paddle.rsqrt(x) + print(out.numpy()) + # [3.16227766 2.23606798 1.82574186 1.58113883] + +""") + +add_sample_code(globals()["abs"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + paddle.enable_imperative() + + x_data = np.array([-0.4, -0.2, 0.1, 0.3]) + x = paddle.imperative.to_variable(x_data) + out = paddle.abs(x) + print(out.numpy()) + # [0.4 0.2 0.1 0.3] + +""") + +add_sample_code(globals()["ceil"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + paddle.enable_imperative() + + x_data = np.array([-0.4, -0.2, 0.1, 0.3]) + x = paddle.imperative.to_variable(x_data) + out = paddle.ceil(x) + print(out.numpy()) + # [-0. -0. 1. 1.] + +""") + +add_sample_code(globals()["floor"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + paddle.enable_imperative() + + x_data = np.array([-0.4, -0.2, 0.1, 0.3]) + x = paddle.imperative.to_variable(x_data) + out = paddle.floor(x) + print(out.numpy()) + # [-1. -1. 0. 0.] + +""") + +add_sample_code(globals()["cos"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + paddle.enable_imperative() + + x_data = np.array([-0.4, -0.2, 0.1, 0.3]) + x = paddle.imperative.to_variable(x_data) + out = paddle.cos(x) + print(out.numpy()) + # [0.92106099 0.98006658 0.99500417 0.95533649] + +""") + +add_sample_code(globals()["acos"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + paddle.enable_imperative() + + x_data = np.array([-0.4, -0.2, 0.1, 0.3]) + x = paddle.imperative.to_variable(x_data) + out = paddle.acos(x) + print(out.numpy()) + # [1.98231317 1.77215425 1.47062891 1.26610367] + +""") + +add_sample_code(globals()["sin"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + paddle.enable_imperative() + + x_data = np.array([-0.4, -0.2, 0.1, 0.3]) + x = paddle.imperative.to_variable(x_data) + out = paddle.sin(x) + print(out.numpy()) + # [-0.38941834 -0.19866933 0.09983342 0.29552021] + +""") + +add_sample_code(globals()["asin"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + paddle.enable_imperative() + + x_data = np.array([-0.4, -0.2, 0.1, 0.3]) + x = paddle.imperative.to_variable(x_data) + out = paddle.asin(x) + print(out.numpy()) + # [-0.41151685 -0.20135792 0.10016742 0.30469265] + +""") + +add_sample_code(globals()["cosh"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + paddle.enable_imperative() + + x_data = np.array([-0.4, -0.2, 0.1, 0.3]) + x = paddle.imperative.to_variable(x_data) + out = paddle.cosh(x) + print(out.numpy()) + # [1.08107237 1.02006676 1.00500417 1.04533851] + +""") + +add_sample_code(globals()["sinh"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + paddle.enable_imperative() + + x_data = np.array([-0.4, -0.2, 0.1, 0.3]) + x = paddle.imperative.to_variable(x_data) + out = paddle.sinh(x) + print(out.numpy()) + # [-0.41075233 -0.201336 0.10016675 0.30452029] + +""") + +add_sample_code(globals()["round"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + paddle.enable_imperative() + + x_data = np.array([-0.5, -0.2, 0.6, 1.5]) + x = paddle.imperative.to_variable(x_data) + out = paddle.round(x) + print(out.numpy()) + # [-1. -0. 1. 2.] + +""") + +add_sample_code(globals()["reciprocal"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + paddle.enable_imperative() + + x_data = np.array([-0.4, -0.2, 0.1, 0.3]) + x = paddle.imperative.to_variable(x_data) + out = paddle.reciprocal(x) + print(out.numpy()) + # [-2.5 -5. 10. 3.33333333] + +""") + +add_sample_code(globals()["square"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + paddle.enable_imperative() + + x_data = np.array([-0.4, -0.2, 0.1, 0.3]) + x = paddle.imperative.to_variable(x_data) + out = paddle.square(x) + print(out.numpy()) + # [0.16 0.04 0.01 0.09] + +""") + +add_sample_code(globals()["softplus"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + import paddle.nn.functional as F + paddle.enable_imperative() + + x_data = np.array([-0.4, -0.2, 0.1, 0.3]) + x = paddle.imperative.to_variable(x_data) + out = F.softplus(x) + print(out.numpy()) + # [0.51301525 0.59813887 0.74439666 0.85435524] + +""") + +add_sample_code(globals()["softsign"], r""" +Examples: + .. code-block:: python + + import numpy as np + import paddle + import paddle.nn.functional as F + paddle.enable_imperative() + + x_data = np.array([-0.4, -0.2, 0.1, 0.3]) + x = paddle.imperative.to_variable(x_data) + out = F.softsign(x) + print(out.numpy()) + # [-0.28571429 -0.16666667 0.09090909 0.23076923] + +""") + __all__ += ['softshrink'] _softshrink_ = generate_layer_fn('softshrink') diff --git a/python/paddle/tensor/math.py b/python/paddle/tensor/math.py index c531eb7f5a4..f8fa29757d8 100644 --- a/python/paddle/tensor/math.py +++ b/python/paddle/tensor/math.py @@ -21,7 +21,7 @@ from ..fluid import layers from ..fluid.framework import core, _varbase_creator, in_dygraph_mode, Variable from ..fluid.layer_helper import LayerHelper from ..fluid.data_feeder import check_variable_and_dtype, check_type, check_dtype, convert_dtype -from ..fluid.layers.layer_function_generator import _generate_doc_string_ +from ..fluid.layers.layer_function_generator import _generate_doc_string_, generate_activation_fn import sys # TODO: define math functions @@ -59,6 +59,9 @@ from ..fluid.layers import square #DEFINE_ALIAS from ..fluid.layers import stanh #DEFINE_ALIAS from ..fluid.layers import atan #DEFINE_ALIAS from ..fluid.layers import erf #DEFINE_ALIAS +from ..fluid.layers import sqrt #DEFINE_ALIAS +from ..fluid.layers import sin #DEFINE_ALIAS +from ..fluid.layers import tanh #DEFINE_ALIAS from ..fluid.layers import increment #DEFINE_ALIAS from ..fluid.layers import multiplex #DEFINE_ALIAS @@ -123,68 +126,8 @@ __all__ = [ 'trace', 'kron' ] - - # yapf: enable. - -def generate_op_noattr(op_type): - """Register the Python layer for an Operator without Attribute.. - - Args: - op_type: The name of the operator to be created. - - This function takes in the operator type (sin, tanh etc) and - creates the operator functionality. - - """ - op_proto = OpProtoHolder.instance().get_op_proto(op_type) - - def func(x, name=None): - if in_dygraph_mode(): - op = getattr(core.ops, op_type) - return op(x) - - check_variable_and_dtype(x, 'x', ['float16', 'float32', 'float64'], - op_type) - helper = LayerHelper(op_type, **locals()) - - out = helper.create_variable_for_type_inference(dtype=x.dtype) - helper.append_op(type=op_type, inputs={"X": x}, outputs={"Out": out}) - return out - - func.__name__ = op_type - func.__doc__ = _generate_doc_string_( - op_proto, - additional_args_lines=[ - "name(str, optional): The default value is None. Normally there is no need for user to set this property. For more information, please refer to :ref:`api_guide_Name`.\n " - "out(Variable, optional): The default value is None. Optional output can be any created Variable that meets the requirements to store the result of operation. if out is None, a new Varibale will be create to store the result." - ]) - func.__doc__ = func.__doc__ + """ - -Return type - Variable -Examples: - .. code-block:: python - - import numpy as np - - import paddle - import paddle.fluid as fluid - - inputs = fluid.data(name="x", shape = [None, 4], dtype='float32') - output = paddle.%s(inputs) - - exe = fluid.Executor(fluid.CPUPlace()) - exe.run(fluid.default_startup_program()) - - #input.shape=1X4, batch_size=1 - img = np.array([[1.0, 2.0, 3.0, 4.0]]).astype(np.float32) - res = exe.run(fluid.default_main_program(), feed={'x':img}, fetch_list=[output]) - print(res) -""" % op_type - return func - @templatedoc() def pow(input, exponent, name=None): """ @@ -245,17 +188,6 @@ def pow(input, exponent, name=None): return out -__ops__noattr__ = [ - 'atan', - 'sin', - 'sqrt', - 'tanh', -] - -for _OP in set(__ops__noattr__): - globals()[_OP] = generate_op_noattr(_OP) - - @dygraph_only def _elementwise_op_in_dygraph(x, y, -- GitLab