From 5b1dd387f74bd65de6fd8cf5f4d48a85e986689b Mon Sep 17 00:00:00 2001 From: zyfncg Date: Mon, 24 Oct 2022 18:55:54 +0800 Subject: [PATCH] [code-gen] Generate static graph code for exp op (#47120) * gene static graph code for exp * refactor the doc of exp * fix bug * fix bug * update doc of exp * fix sparse op --- paddle/fluid/operators/activation_op.cc | 25 ------- paddle/fluid/operators/activation_op.h | 2 - paddle/fluid/pybind/op_function_generator.cc | 5 ++ paddle/phi/api/yaml/backward.yaml | 11 +++ paddle/phi/api/yaml/generator/generate_op.py | 10 ++- .../generator/templates/operator_utils.c.j2 | 2 +- paddle/phi/api/yaml/legacy_backward.yaml | 11 --- paddle/phi/api/yaml/legacy_ops.yaml | 10 --- paddle/phi/api/yaml/op_compat.yaml | 6 ++ paddle/phi/api/yaml/ops.yaml | 10 +++ paddle/phi/ops/compat/activation_sig.cc | 2 - python/paddle/tensor/ops.py | 75 ++++++++++++++----- 12 files changed, 98 insertions(+), 71 deletions(-) diff --git a/paddle/fluid/operators/activation_op.cc b/paddle/fluid/operators/activation_op.cc index dfc5be8de08..d75ae20da3a 100644 --- a/paddle/fluid/operators/activation_op.cc +++ b/paddle/fluid/operators/activation_op.cc @@ -159,13 +159,6 @@ $$out = \\log \\frac{1}{1 + e^{-x}}$$ )DOC"; -UNUSED constexpr char ExpDoc[] = R"DOC( -Exp Operator. Computes exp of x element-wise with a natural number :math:`e` as the base. - -$$out = e^x$$ - -)DOC"; - UNUSED constexpr char Expm1Doc[] = R"DOC( Expm1 Operator. Computes expm1 of x element-wise with a natural number :math:`e` as the base. @@ -806,7 +799,6 @@ It is recommended to use the defaults for this activation. REGISTER_ACTIVATION_OP_MAKER(Sigmoid, SigmoidDoc); REGISTER_ACTIVATION_OP_MAKER(Silu, SiluDoc); REGISTER_ACTIVATION_OP_MAKER(LogSigmoid, LogSigmoidDoc); -REGISTER_ACTIVATION_OP_MAKER(Exp, ExpDoc); REGISTER_ACTIVATION_OP_MAKER(Expm1, Expm1Doc); REGISTER_ACTIVATION_OP_MAKER(Relu, ReluDoc); REGISTER_ACTIVATION_OP_MAKER(Tanh, TanhDoc); @@ -1721,23 +1713,6 @@ REGISTER_OPERATOR(pow_grad, ops::ActivationGradOpInplaceInferer); /* ========================================================================== */ -/* ========================== exp register ============================ */ -REGISTER_OPERATOR( - exp, - ops::ActivationOp, - ops::ExpOpMaker, - ops::ActivationOpInferVarType, - ops::ActivationGradOpMaker::FwdDeps(), - paddle::framework::OpDesc>, - ops::ActivationGradOpMaker::FwdDeps(), - paddle::imperative::OpBase>, - std::conditional>(), - ops::ActFwdInplaceInferer, - void>::type); -REGISTER_OPERATOR(exp_grad, - ops::ActivationOpGrad, - ops::ActivationGradOpInplaceInferer); - /* ========================== Log register ==================================*/ REGISTER_OPERATOR( log, diff --git a/paddle/fluid/operators/activation_op.h b/paddle/fluid/operators/activation_op.h index e5af6258dc4..833015b803d 100644 --- a/paddle/fluid/operators/activation_op.h +++ b/paddle/fluid/operators/activation_op.h @@ -273,7 +273,6 @@ USE_PHI_FUNCTOR(Asinh) USE_PHI_FUNCTOR(Acosh) USE_PHI_FUNCTOR(Atanh) USE_PHI_FUNCTOR(Tanh) -USE_PHI_FUNCTOR(Exp) USE_PHI_DOUBLE_GRAD_FUNCTOR(Tanh) USE_PHI_TRIPLE_GRAD_FUNCTOR(Tanh) USE_PHI_FUNCTOR(BRelu) @@ -301,7 +300,6 @@ USE_PHI_FUNCTOR(Log1p) USE_PHI_FUNCTOR(Swish) USE_PHI_FUNCTOR(HardSwish) USE_PHI_FUNCTOR(Pow) -USE_PHI_FUNCTOR(Exp) USE_PHI_FUNCTOR(Expm1) USE_PHI_FUNCTOR(Mish) USE_PHI_FUNCTOR(STanh) diff --git a/paddle/fluid/pybind/op_function_generator.cc b/paddle/fluid/pybind/op_function_generator.cc index f659a671c39..4caa2c207b8 100644 --- a/paddle/fluid/pybind/op_function_generator.cc +++ b/paddle/fluid/pybind/op_function_generator.cc @@ -447,6 +447,11 @@ GenerateOpFunctions(int split_count) { !phi::KernelFactory::Instance().HasCompatiblePhiKernel(op_type)) { continue; } + // Skip the sparse op + if (op_type.compare(0, 7, "sparse_") == 0 && op_type != "sparse_momentum" && + op_type != "sparse_attention") { + continue; + } op_info_map_need_gen.emplace(pair); } diff --git a/paddle/phi/api/yaml/backward.yaml b/paddle/phi/api/yaml/backward.yaml index 113f50397cd..6a14d8e0290 100644 --- a/paddle/phi/api/yaml/backward.yaml +++ b/paddle/phi/api/yaml/backward.yaml @@ -115,6 +115,17 @@ kernel : func : erfinv_grad +- backward_op : exp_grad + forward : exp (Tensor x) -> Tensor(out) + args : (Tensor out, Tensor out_grad) + output : Tensor(x_grad) + infer_meta : + func : UnchangedInferMeta + param : [out] + kernel : + func : exp_grad + inplace : (out_grad -> x_grad) + - backward_op : fft_c2c_grad forward: fft_c2c(Tensor x, int64_t[] axes, str normalization, bool forward) -> Tensor(out) args : (Tensor out_grad, int64_t[] axes, str normalization, bool forward) diff --git a/paddle/phi/api/yaml/generator/generate_op.py b/paddle/phi/api/yaml/generator/generate_op.py index a7efe853980..df2281ee3d8 100644 --- a/paddle/phi/api/yaml/generator/generate_op.py +++ b/paddle/phi/api/yaml/generator/generate_op.py @@ -136,7 +136,6 @@ def replace_compat_name(api_op_map, forward_api_dict, backward_api_dict): key = args_map[key] if val in args_map: val = args_map[val] - key, val = val, key inplace_map[key] = val forward_api_item['inplace'] = inplace_map @@ -208,6 +207,15 @@ def replace_compat_name(api_op_map, forward_api_dict, backward_api_dict): args_map[param] if param in args_map else param for param in backward_api_item['no_need_buffer'] ] + if backward_api_item['inplace']: + inplace_map = {} + for key, val in backward_api_item['inplace'].items(): + if key in args_map: + key = args_map[key] + if val in args_map: + val = args_map[val] + inplace_map[key] = val + backward_api_item['inplace'] = inplace_map def process_invoke_op(forward_api_dict, backward_api_dict): diff --git a/paddle/phi/api/yaml/generator/templates/operator_utils.c.j2 b/paddle/phi/api/yaml/generator/templates/operator_utils.c.j2 index da497e2b3bd..d2b0cf3290b 100644 --- a/paddle/phi/api/yaml/generator/templates/operator_utils.c.j2 +++ b/paddle/phi/api/yaml/generator/templates/operator_utils.c.j2 @@ -274,7 +274,7 @@ DECLARE_INFER_SHAPE_FUNCTOR({{api["op_name"]}}, {{api["op_name"] | to_pascal_cas {% if api["inplace"] is not none %} {% set inplace_map %} {% for source, target in api["inplace"].items() %} -{{"{"}}{{source | to_opmaker_name}}, {{target | to_opmaker_name}}{{"}"}}{{", " if not loop.last}} +{{"{"}}{{target | to_opmaker_name}}, {{source | to_opmaker_name}}{{"}"}}{{", " if not loop.last}} {%- endfor %} {%- endset %} DECLARE_INPLACE_OP_INFERER({{api["op_name"] | to_pascal_case}}InplaceInferer, diff --git a/paddle/phi/api/yaml/legacy_backward.yaml b/paddle/phi/api/yaml/legacy_backward.yaml index 0d00f91998d..499597897fd 100755 --- a/paddle/phi/api/yaml/legacy_backward.yaml +++ b/paddle/phi/api/yaml/legacy_backward.yaml @@ -734,17 +734,6 @@ output : Tensor(weight_grad) invoke : embedding_grad_impl(x, weight, out_grad, padding_idx, sparse, weight_grad) -- backward_op : exp_grad - forward : exp (Tensor x) -> Tensor(out) - args : (Tensor out, Tensor out_grad) - output : Tensor(x_grad) - infer_meta : - func : UnchangedInferMeta - param : [out] - kernel : - func : exp_grad - inplace : (out_grad -> x_grad) - - backward_op : expand_as_grad forward : expand_as (Tensor x, Tensor y, int[] target_shape) -> Tensor(out) args : (Tensor x, Tensor out_grad, int[] target_shape) diff --git a/paddle/phi/api/yaml/legacy_ops.yaml b/paddle/phi/api/yaml/legacy_ops.yaml index 0d37d3e76f6..4a0d3170249 100755 --- a/paddle/phi/api/yaml/legacy_ops.yaml +++ b/paddle/phi/api/yaml/legacy_ops.yaml @@ -856,16 +856,6 @@ kernel : func : equal_all -- op : exp - args : (Tensor x) - output : Tensor(out) - infer_meta : - func : UnchangedInferMeta - kernel : - func : exp - inplace : (x -> out) - backward : exp_grad - - op : expand args : (Tensor x, IntArray shape) output : Tensor diff --git a/paddle/phi/api/yaml/op_compat.yaml b/paddle/phi/api/yaml/op_compat.yaml index 29e88420262..6d54ade201f 100644 --- a/paddle/phi/api/yaml/op_compat.yaml +++ b/paddle/phi/api/yaml/op_compat.yaml @@ -274,6 +274,12 @@ extra : attrs : [bool use_mkldnn = false, bool use_cudnn = false] +- op : exp + inputs : + x : X + outputs : + out : Out + - op : expand (expand_v2) backward : expand_grad (expand_v2_grad) extra : diff --git a/paddle/phi/api/yaml/ops.yaml b/paddle/phi/api/yaml/ops.yaml index ec16844fc49..4d0de760ace 100644 --- a/paddle/phi/api/yaml/ops.yaml +++ b/paddle/phi/api/yaml/ops.yaml @@ -33,6 +33,16 @@ func : cholesky_solve backward : cholesky_solve_grad +- op : exp + args : (Tensor x) + output : Tensor(out) + infer_meta : + func : UnchangedInferMeta + kernel : + func : exp + inplace : (x -> out) + backward : exp_grad + - op : cross args : (Tensor x, Tensor y, int axis = 9) output : Tensor diff --git a/paddle/phi/ops/compat/activation_sig.cc b/paddle/phi/ops/compat/activation_sig.cc index fff02697f5f..d555e2a9339 100644 --- a/paddle/phi/ops/compat/activation_sig.cc +++ b/paddle/phi/ops/compat/activation_sig.cc @@ -86,7 +86,6 @@ DEFINE_ACT_GRAD_DEPX_OP_ARGMAP(Softplus, DEFINE_ACT_GRAD_DEPOUT_OP_ARGMAP(Relu, "relu", ); // NOLINT DEFINE_ACT_GRAD_DEPOUT_OP_ARGMAP(Tanh, "tanh", ); // NOLINT DEFINE_ACT_GRAD_DEPOUT_OP_ARGMAP(Sigmoid, "sigmoid", ); // NOLINT -DEFINE_ACT_GRAD_DEPOUT_OP_ARGMAP(Exp, "exp", ); // NOLINT DEFINE_ACT_GRAD_DEPOUT_OP_ARGMAP(Expm1, "expm1", ); // NOLINT DEFINE_ACT_GRAD_DEPOUT_OP_ARGMAP(Reciprocal, "reciprocal", ); // NOLINT DEFINE_ACT_GRAD_DEPOUT_OP_ARGMAP(Sqrt, "sqrt", ); // NOLINT @@ -254,7 +253,6 @@ PD_REGISTER_ARG_MAPPING_FN(acosh_grad, phi::AcoshGradOpArgumentMapping); PD_REGISTER_ARG_MAPPING_FN(atanh_grad, phi::AtanhGradOpArgumentMapping); PD_REGISTER_ARG_MAPPING_FN(relu_grad, phi::ReluGradOpArgumentMapping); -PD_REGISTER_ARG_MAPPING_FN(exp_grad, phi::ExpGradOpArgumentMapping); PD_REGISTER_ARG_MAPPING_FN(expm1_grad, phi::Expm1GradOpArgumentMapping); PD_REGISTER_ARG_MAPPING_FN(square_grad, phi::SquareGradOpArgumentMapping); PD_REGISTER_ARG_MAPPING_FN(reciprocal_grad, diff --git a/python/paddle/tensor/ops.py b/python/paddle/tensor/ops.py index b3f88e9fcdf..a0a0f01486b 100644 --- a/python/paddle/tensor/ops.py +++ b/python/paddle/tensor/ops.py @@ -18,8 +18,10 @@ from .layer_function_generator import ( generate_inplace_fn, add_sample_code, ) -from ..fluid.framework import in_dygraph_mode -from .. import _C_ops +from ..fluid.data_feeder import check_variable_and_dtype +from ..fluid.framework import in_dygraph_mode, _in_legacy_dygraph +from ..framework import LayerHelper +from .. import _C_ops, _legacy_C_ops __deprecated_func_name__ = { 'tanh_shrink': 'tanhshrink', @@ -37,7 +39,6 @@ __activations_noattr__ = [ ] __unary_func__ = [ - 'exp', 'expm1', 'atan', 'sqrt', @@ -158,22 +159,6 @@ Examples: """, ) -add_sample_code( - globals()["exp"], - r""" -Examples: - .. code-block:: python - - import paddle - - x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3]) - out = paddle.exp(x) - print(out) - # [0.67032005 0.81873075 1.10517092 1.34985881] - -""", -) - add_sample_code( globals()["expm1"], r""" @@ -561,6 +546,58 @@ Examples: """, ) + +def exp(x, name=None): + """ + + Computes exp of x element-wise with a natural number `e` as the base. + + .. math:: + out = e^x + + Args: + x (Tensor): Input of Exp operator, an N-D Tensor, with data type float32, float64 or float16. + name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`. + + Returns: + Tensor. Output of Exp operator, a Tensor with shape same as input. + + Examples: + .. code-block:: python + + import paddle + + x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3]) + out = paddle.exp(x) + print(out) + # [0.67032005 0.81873075 1.10517092 1.34985881] + + """ + if in_dygraph_mode(): + return _C_ops.exp(x) + if _in_legacy_dygraph(): + return _legacy_C_ops.exp(x) + + check_variable_and_dtype( + x, + 'x', + [ + 'int32', + 'int64', + 'float16', + 'float32', + 'float64', + 'complex64', + 'complex128', + ], + 'exp', + ) + helper = LayerHelper('exp', **locals()) + out = helper.create_variable_for_type_inference(dtype=x.dtype) + helper.append_op(type='exp', inputs={"X": x}, outputs={"Out": out}) + return out + + __all__ += ['erf'] _erf_ = generate_layer_fn('erf') -- GitLab