diff --git a/paddle/fluid/operators/activation_op.cc b/paddle/fluid/operators/activation_op.cc index c53427b465bc3cefe2eb9d539433eef13c0eee74..348902c656cec1ea1eeaccc90feefd56d307111d 100644 --- a/paddle/fluid/operators/activation_op.cc +++ b/paddle/fluid/operators/activation_op.cc @@ -602,7 +602,6 @@ REGISTER_ACTIVATION_OP_MAKER(Softsign, SoftsignDoc); namespace ops = paddle::operators; - #define REGISTER_ACTIVATION_OP(KERNEL_TYPE, OP_NAME, functor, grad_functor) \ REGISTER_OPERATOR( \ KERNEL_TYPE, ops::ActivationOp, ops::OP_NAME##OpMaker, \ @@ -619,7 +618,6 @@ namespace ops = paddle::operators; #define REGISTER_ACTIVATION_CPU_KERNEL(act_type, op_name, functor, \ grad_functor) \ - REGISTER_OP_CPU_KERNEL( \ act_type, ops::ActivationKernel>, \ diff --git a/paddle/fluid/operators/activation_op.h b/paddle/fluid/operators/activation_op.h index e50f3bf766d139a43b95fdae2b9e48e8761cc87a..1732f61582f79365d6872e15b9df1ee8f053903c 100644 --- a/paddle/fluid/operators/activation_op.h +++ b/paddle/fluid/operators/activation_op.h @@ -36,7 +36,6 @@ limitations under the License. */ namespace paddle { namespace operators { - enum ActBwdOpFwdDeps { kNoDeps = 0x00, // Do not need any forward input/output kDepX = 0x01, // Only need forward input X @@ -528,6 +527,8 @@ struct RsqrtGradFunctor : public BaseActivationFunctor { void operator()(Device d, X x, Out out, dOut dout, dX dx) const { dx.device(d) = static_cast(-0.5) * dout * out * out * out; } + + static constexpr ActBwdOpFwdDeps FwdDeps() { return kDepOut; } }; // ceil(x) = ceiling(x) @@ -1200,7 +1201,6 @@ struct SwishGradFunctor : public BaseActivationFunctor { } // namespace operators } // namespace paddle - #define FOR_EACH_ACTIVATION_OP(__macro) \ __macro(sigmoid, Sigmoid, SigmoidFunctor, SigmoidGradFunctor); \ __macro(logsigmoid, LogSigmoid, LogSigmoidFunctor, LogSigmoidGradFunctor); \ @@ -1211,6 +1211,7 @@ struct SwishGradFunctor : public BaseActivationFunctor { __macro(atan, Atan, AtanFunctor, AtanGradFunctor); \ __macro(softshrink, SoftShrink, SoftShrinkFunctor, SoftShrinkGradFunctor); \ __macro(sqrt, Sqrt, SqrtFunctor, SqrtGradFunctor); \ + __macro(rsqrt, Rsqrt, RsqrtFunctor, RsqrtGradFunctor); \ __macro(abs, Abs, AbsFunctor, AbsGradFunctor); \ __macro(ceil, Ceil, CeilFunctor, ZeroGradFunctor); \ __macro(floor, Floor, FloorFunctor, ZeroGradFunctor); \