diff --git a/paddle/fluid/operators/activation_op.cc b/paddle/fluid/operators/activation_op.cc index dfc5be8de08bc46c334187b60f2b1cf74fccb66f..d75ae20da3ad92ed4340aa6d90bfb2af2c43ccd5 100644 --- a/paddle/fluid/operators/activation_op.cc +++ b/paddle/fluid/operators/activation_op.cc @@ -159,13 +159,6 @@ $$out = \\log \\frac{1}{1 + e^{-x}}$$ )DOC"; -UNUSED constexpr char ExpDoc[] = R"DOC( -Exp Operator. Computes exp of x element-wise with a natural number :math:`e` as the base. - -$$out = e^x$$ - -)DOC"; - UNUSED constexpr char Expm1Doc[] = R"DOC( Expm1 Operator. Computes expm1 of x element-wise with a natural number :math:`e` as the base. @@ -806,7 +799,6 @@ It is recommended to use the defaults for this activation. REGISTER_ACTIVATION_OP_MAKER(Sigmoid, SigmoidDoc); REGISTER_ACTIVATION_OP_MAKER(Silu, SiluDoc); REGISTER_ACTIVATION_OP_MAKER(LogSigmoid, LogSigmoidDoc); -REGISTER_ACTIVATION_OP_MAKER(Exp, ExpDoc); REGISTER_ACTIVATION_OP_MAKER(Expm1, Expm1Doc); REGISTER_ACTIVATION_OP_MAKER(Relu, ReluDoc); REGISTER_ACTIVATION_OP_MAKER(Tanh, TanhDoc); @@ -1721,23 +1713,6 @@ REGISTER_OPERATOR(pow_grad, ops::ActivationGradOpInplaceInferer); /* ========================================================================== */ -/* ========================== exp register ============================ */ -REGISTER_OPERATOR( - exp, - ops::ActivationOp, - ops::ExpOpMaker, - ops::ActivationOpInferVarType, - ops::ActivationGradOpMaker::FwdDeps(), - paddle::framework::OpDesc>, - ops::ActivationGradOpMaker::FwdDeps(), - paddle::imperative::OpBase>, - std::conditional>(), - ops::ActFwdInplaceInferer, - void>::type); -REGISTER_OPERATOR(exp_grad, - ops::ActivationOpGrad, - ops::ActivationGradOpInplaceInferer); - /* ========================== Log register ==================================*/ REGISTER_OPERATOR( log, diff --git a/paddle/fluid/operators/activation_op.h b/paddle/fluid/operators/activation_op.h index e5af6258dc477c693d47b64577b1c7b79aef0336..833015b803dd1a05d8be80d1d0407716a9247bed 100644 --- a/paddle/fluid/operators/activation_op.h +++ b/paddle/fluid/operators/activation_op.h @@ -273,7 +273,6 @@ USE_PHI_FUNCTOR(Asinh) USE_PHI_FUNCTOR(Acosh) USE_PHI_FUNCTOR(Atanh) USE_PHI_FUNCTOR(Tanh) -USE_PHI_FUNCTOR(Exp) USE_PHI_DOUBLE_GRAD_FUNCTOR(Tanh) USE_PHI_TRIPLE_GRAD_FUNCTOR(Tanh) USE_PHI_FUNCTOR(BRelu) @@ -301,7 +300,6 @@ USE_PHI_FUNCTOR(Log1p) USE_PHI_FUNCTOR(Swish) USE_PHI_FUNCTOR(HardSwish) USE_PHI_FUNCTOR(Pow) -USE_PHI_FUNCTOR(Exp) USE_PHI_FUNCTOR(Expm1) USE_PHI_FUNCTOR(Mish) USE_PHI_FUNCTOR(STanh) diff --git a/paddle/fluid/pybind/op_function_generator.cc b/paddle/fluid/pybind/op_function_generator.cc index f659a671c3947a6e9b619aa264b10284130ff59a..4caa2c207b80cc15e53a62593b3d8ad1317f93ff 100644 --- a/paddle/fluid/pybind/op_function_generator.cc +++ b/paddle/fluid/pybind/op_function_generator.cc @@ -447,6 +447,11 @@ GenerateOpFunctions(int split_count) { !phi::KernelFactory::Instance().HasCompatiblePhiKernel(op_type)) { continue; } + // Skip the sparse op + if (op_type.compare(0, 7, "sparse_") == 0 && op_type != "sparse_momentum" && + op_type != "sparse_attention") { + continue; + } op_info_map_need_gen.emplace(pair); } diff --git a/paddle/phi/api/yaml/backward.yaml b/paddle/phi/api/yaml/backward.yaml index 113f50397cda33b43cac790f74a1d217ca174b71..6a14d8e02902a60a96075ba5bb6a23c14207e27c 100644 --- a/paddle/phi/api/yaml/backward.yaml +++ b/paddle/phi/api/yaml/backward.yaml @@ -115,6 +115,17 @@ kernel : func : erfinv_grad +- backward_op : exp_grad + forward : exp (Tensor x) -> Tensor(out) + args : (Tensor out, Tensor out_grad) + output : Tensor(x_grad) + infer_meta : + func : UnchangedInferMeta + param : [out] + kernel : + func : exp_grad + inplace : (out_grad -> x_grad) + - backward_op : fft_c2c_grad forward: fft_c2c(Tensor x, int64_t[] axes, str normalization, bool forward) -> Tensor(out) args : (Tensor out_grad, int64_t[] axes, str normalization, bool forward) diff --git a/paddle/phi/api/yaml/generator/generate_op.py b/paddle/phi/api/yaml/generator/generate_op.py index a7efe85398045142b3cbfc23a5adff9ae8625744..df2281ee3d8a1054d282a231712aff6e431da77a 100644 --- a/paddle/phi/api/yaml/generator/generate_op.py +++ b/paddle/phi/api/yaml/generator/generate_op.py @@ -136,7 +136,6 @@ def replace_compat_name(api_op_map, forward_api_dict, backward_api_dict): key = args_map[key] if val in args_map: val = args_map[val] - key, val = val, key inplace_map[key] = val forward_api_item['inplace'] = inplace_map @@ -208,6 +207,15 @@ def replace_compat_name(api_op_map, forward_api_dict, backward_api_dict): args_map[param] if param in args_map else param for param in backward_api_item['no_need_buffer'] ] + if backward_api_item['inplace']: + inplace_map = {} + for key, val in backward_api_item['inplace'].items(): + if key in args_map: + key = args_map[key] + if val in args_map: + val = args_map[val] + inplace_map[key] = val + backward_api_item['inplace'] = inplace_map def process_invoke_op(forward_api_dict, backward_api_dict): diff --git a/paddle/phi/api/yaml/generator/templates/operator_utils.c.j2 b/paddle/phi/api/yaml/generator/templates/operator_utils.c.j2 index da497e2b3bd00bffca5855b18c282f3f45cc22a2..d2b0cf3290b8dbb538e0d389c35d536b30cfa627 100644 --- a/paddle/phi/api/yaml/generator/templates/operator_utils.c.j2 +++ b/paddle/phi/api/yaml/generator/templates/operator_utils.c.j2 @@ -274,7 +274,7 @@ DECLARE_INFER_SHAPE_FUNCTOR({{api["op_name"]}}, {{api["op_name"] | to_pascal_cas {% if api["inplace"] is not none %} {% set inplace_map %} {% for source, target in api["inplace"].items() %} -{{"{"}}{{source | to_opmaker_name}}, {{target | to_opmaker_name}}{{"}"}}{{", " if not loop.last}} +{{"{"}}{{target | to_opmaker_name}}, {{source | to_opmaker_name}}{{"}"}}{{", " if not loop.last}} {%- endfor %} {%- endset %} DECLARE_INPLACE_OP_INFERER({{api["op_name"] | to_pascal_case}}InplaceInferer, diff --git a/paddle/phi/api/yaml/legacy_backward.yaml b/paddle/phi/api/yaml/legacy_backward.yaml index 0d00f91998d7d8201f7a76fc1827223c25891f14..499597897fd479461d26720e44f7adfd557644fa 100755 --- a/paddle/phi/api/yaml/legacy_backward.yaml +++ b/paddle/phi/api/yaml/legacy_backward.yaml @@ -734,17 +734,6 @@ output : Tensor(weight_grad) invoke : embedding_grad_impl(x, weight, out_grad, padding_idx, sparse, weight_grad) -- backward_op : exp_grad - forward : exp (Tensor x) -> Tensor(out) - args : (Tensor out, Tensor out_grad) - output : Tensor(x_grad) - infer_meta : - func : UnchangedInferMeta - param : [out] - kernel : - func : exp_grad - inplace : (out_grad -> x_grad) - - backward_op : expand_as_grad forward : expand_as (Tensor x, Tensor y, int[] target_shape) -> Tensor(out) args : (Tensor x, Tensor out_grad, int[] target_shape) diff --git a/paddle/phi/api/yaml/legacy_ops.yaml b/paddle/phi/api/yaml/legacy_ops.yaml index 0d37d3e76f6bed294199f5939d4655e694ecdab7..4a0d31702494a26fa02fea038767c724e7cd5544 100755 --- a/paddle/phi/api/yaml/legacy_ops.yaml +++ b/paddle/phi/api/yaml/legacy_ops.yaml @@ -856,16 +856,6 @@ kernel : func : equal_all -- op : exp - args : (Tensor x) - output : Tensor(out) - infer_meta : - func : UnchangedInferMeta - kernel : - func : exp - inplace : (x -> out) - backward : exp_grad - - op : expand args : (Tensor x, IntArray shape) output : Tensor diff --git a/paddle/phi/api/yaml/op_compat.yaml b/paddle/phi/api/yaml/op_compat.yaml index 29e88420262762bcd5590a16bbd0066bb8b3562a..6d54ade201fcb3f69499f889e4b5e8fe9af22a0c 100644 --- a/paddle/phi/api/yaml/op_compat.yaml +++ b/paddle/phi/api/yaml/op_compat.yaml @@ -274,6 +274,12 @@ extra : attrs : [bool use_mkldnn = false, bool use_cudnn = false] +- op : exp + inputs : + x : X + outputs : + out : Out + - op : expand (expand_v2) backward : expand_grad (expand_v2_grad) extra : diff --git a/paddle/phi/api/yaml/ops.yaml b/paddle/phi/api/yaml/ops.yaml index ec16844fc4918d4993e4399f851f411f8d947feb..4d0de760ace5d83e430a8857c274b3c3f7fb9bc8 100644 --- a/paddle/phi/api/yaml/ops.yaml +++ b/paddle/phi/api/yaml/ops.yaml @@ -33,6 +33,16 @@ func : cholesky_solve backward : cholesky_solve_grad +- op : exp + args : (Tensor x) + output : Tensor(out) + infer_meta : + func : UnchangedInferMeta + kernel : + func : exp + inplace : (x -> out) + backward : exp_grad + - op : cross args : (Tensor x, Tensor y, int axis = 9) output : Tensor diff --git a/paddle/phi/ops/compat/activation_sig.cc b/paddle/phi/ops/compat/activation_sig.cc index fff02697f5f88f831d89ae4d90ef2f3bf7b4db8e..d555e2a93396726aaf03be5b288c27f806637fa1 100644 --- a/paddle/phi/ops/compat/activation_sig.cc +++ b/paddle/phi/ops/compat/activation_sig.cc @@ -86,7 +86,6 @@ DEFINE_ACT_GRAD_DEPX_OP_ARGMAP(Softplus, DEFINE_ACT_GRAD_DEPOUT_OP_ARGMAP(Relu, "relu", ); // NOLINT DEFINE_ACT_GRAD_DEPOUT_OP_ARGMAP(Tanh, "tanh", ); // NOLINT DEFINE_ACT_GRAD_DEPOUT_OP_ARGMAP(Sigmoid, "sigmoid", ); // NOLINT -DEFINE_ACT_GRAD_DEPOUT_OP_ARGMAP(Exp, "exp", ); // NOLINT DEFINE_ACT_GRAD_DEPOUT_OP_ARGMAP(Expm1, "expm1", ); // NOLINT DEFINE_ACT_GRAD_DEPOUT_OP_ARGMAP(Reciprocal, "reciprocal", ); // NOLINT DEFINE_ACT_GRAD_DEPOUT_OP_ARGMAP(Sqrt, "sqrt", ); // NOLINT @@ -254,7 +253,6 @@ PD_REGISTER_ARG_MAPPING_FN(acosh_grad, phi::AcoshGradOpArgumentMapping); PD_REGISTER_ARG_MAPPING_FN(atanh_grad, phi::AtanhGradOpArgumentMapping); PD_REGISTER_ARG_MAPPING_FN(relu_grad, phi::ReluGradOpArgumentMapping); -PD_REGISTER_ARG_MAPPING_FN(exp_grad, phi::ExpGradOpArgumentMapping); PD_REGISTER_ARG_MAPPING_FN(expm1_grad, phi::Expm1GradOpArgumentMapping); PD_REGISTER_ARG_MAPPING_FN(square_grad, phi::SquareGradOpArgumentMapping); PD_REGISTER_ARG_MAPPING_FN(reciprocal_grad, diff --git a/python/paddle/tensor/ops.py b/python/paddle/tensor/ops.py index b3f88e9fcdf415b292fd71530cebffea95351e5a..a0a0f01486b6206ea4ac35608ec5694e7a881507 100644 --- a/python/paddle/tensor/ops.py +++ b/python/paddle/tensor/ops.py @@ -18,8 +18,10 @@ from .layer_function_generator import ( generate_inplace_fn, add_sample_code, ) -from ..fluid.framework import in_dygraph_mode -from .. import _C_ops +from ..fluid.data_feeder import check_variable_and_dtype +from ..fluid.framework import in_dygraph_mode, _in_legacy_dygraph +from ..framework import LayerHelper +from .. import _C_ops, _legacy_C_ops __deprecated_func_name__ = { 'tanh_shrink': 'tanhshrink', @@ -37,7 +39,6 @@ __activations_noattr__ = [ ] __unary_func__ = [ - 'exp', 'expm1', 'atan', 'sqrt', @@ -158,22 +159,6 @@ Examples: """, ) -add_sample_code( - globals()["exp"], - r""" -Examples: - .. code-block:: python - - import paddle - - x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3]) - out = paddle.exp(x) - print(out) - # [0.67032005 0.81873075 1.10517092 1.34985881] - -""", -) - add_sample_code( globals()["expm1"], r""" @@ -561,6 +546,58 @@ Examples: """, ) + +def exp(x, name=None): + """ + + Computes exp of x element-wise with a natural number `e` as the base. + + .. math:: + out = e^x + + Args: + x (Tensor): Input of Exp operator, an N-D Tensor, with data type float32, float64 or float16. + name (str, optional): Name for the operation (optional, default is None). For more information, please refer to :ref:`api_guide_Name`. + + Returns: + Tensor. Output of Exp operator, a Tensor with shape same as input. + + Examples: + .. code-block:: python + + import paddle + + x = paddle.to_tensor([-0.4, -0.2, 0.1, 0.3]) + out = paddle.exp(x) + print(out) + # [0.67032005 0.81873075 1.10517092 1.34985881] + + """ + if in_dygraph_mode(): + return _C_ops.exp(x) + if _in_legacy_dygraph(): + return _legacy_C_ops.exp(x) + + check_variable_and_dtype( + x, + 'x', + [ + 'int32', + 'int64', + 'float16', + 'float32', + 'float64', + 'complex64', + 'complex128', + ], + 'exp', + ) + helper = LayerHelper('exp', **locals()) + out = helper.create_variable_for_type_inference(dtype=x.dtype) + helper.append_op(type='exp', inputs={"X": x}, outputs={"Out": out}) + return out + + __all__ += ['erf'] _erf_ = generate_layer_fn('erf')