diff --git a/paddle/phi/kernels/activation_kernel.h b/paddle/phi/kernels/activation_kernel.h index 623f7e467ad8791aacce1236c3de8197fc30c710..be86b1e688c5b904a2654b86669957bd4350a05f 100644 --- a/paddle/phi/kernels/activation_kernel.h +++ b/paddle/phi/kernels/activation_kernel.h @@ -39,6 +39,7 @@ DECLARE_ACTIVATION_KERNEL(Relu) DECLARE_ACTIVATION_KERNEL(Tanh) DECLARE_ACTIVATION_KERNEL(Exp) DECLARE_ACTIVATION_KERNEL(Expm1) +DECLARE_ACTIVATION_KERNEL(Softsign) template void BReluKernel(const Context& dev_ctx, diff --git a/paddle/phi/kernels/cpu/activation_kernel.cc b/paddle/phi/kernels/cpu/activation_kernel.cc index ecbab5312324de88569e6bf0842d2f44d0cd8086..0535686ab1df91f301a831809ce377855bd4e7fb 100644 --- a/paddle/phi/kernels/cpu/activation_kernel.cc +++ b/paddle/phi/kernels/cpu/activation_kernel.cc @@ -74,21 +74,23 @@ DEFINE_CPU_ACTIVATION_KERNEL(Reciprocal, funcs::ReciprocalFunctor) DEFINE_CPU_ACTIVATION_KERNEL(Square, funcs::SquareFunctor) DEFINE_CPU_ACTIVATION_KERNEL(Sqrt, funcs::SqrtFunctor) DEFINE_CPU_ACTIVATION_KERNEL(Rsqrt, funcs::RsqrtFunctor) + DEFINE_CPU_ACTIVATION_KERNEL(Softsign, funcs::SoftsignFunctor) + DEFINE_CPU_ACT_KERNEL_WITH_ONE_ATTRS(LeakyRelu, funcs::LeakyReluFunctor, alpha) DEFINE_CPU_ACT_KERNEL_WITH_ONE_ATTRS(ThresholdedRelu, funcs::ThresholdedReluFunctor, threshold) -DEFINE_CPU_ACT_KERNEL_WITH_ONE_ATTRS(Mish, funcs::MishFunctor, threshold) +// DEFINE_CPU_ACT_KERNEL_WITH_ONE_ATTRS(Mish, funcs::MishFunctor, threshold) DEFINE_CPU_ACT_KERNEL_WITH_TWO_ATTRS(BRelu, funcs::BReluFunctor, t_min, t_max) DEFINE_CPU_ACT_KERNEL_WITH_TWO_ATTRS(STanh, funcs::STanhFunctor, scale_a, scale_b) -DEFINE_CPU_ACT_KERNEL_WITH_TWO_ATTRS(Softplus, - funcs::SoftplusFunctor, - beta, - threshold) +// DEFINE_CPU_ACT_KERNEL_WITH_TWO_ATTRS(Softplus, +// funcs::SoftplusFunctor, +// beta, +// threshold) } // namespace phi PD_REGISTER_KERNEL(relu, CPU, ALL_LAYOUT, phi::ReluKernel, float, double) {} @@ -111,12 +113,12 @@ PD_REGISTER_ACTIVATION_KERNEL(tanh, Tanh) PD_REGISTER_ACTIVATION_KERNEL(brelu, BRelu) PD_REGISTER_ACTIVATION_KERNEL(leaky_relu, LeakyRelu) PD_REGISTER_ACTIVATION_KERNEL(thresholded_relu, ThresholdedRelu) -PD_REGISTER_ACTIVATION_KERNEL(mish, Mish) +// PD_REGISTER_ACTIVATION_KERNEL(mish, Mish) PD_REGISTER_ACTIVATION_KERNEL(stanh, STanh) PD_REGISTER_ACTIVATION_KERNEL(reciprocal, Reciprocal) PD_REGISTER_ACTIVATION_KERNEL(sqrt, Sqrt) PD_REGISTER_ACTIVATION_KERNEL(rsqrt, Rsqrt) -PD_REGISTER_ACTIVATION_KERNEL(softplus, Softplus) +// PD_REGISTER_ACTIVATION_KERNEL(softplus, Softplus) PD_REGISTER_ACTIVATION_KERNEL(softsign, Softsign) PD_REGISTER_KERNEL( diff --git a/paddle/phi/kernels/impl/activation_impl.h b/paddle/phi/kernels/impl/activation_impl.h index 05339ceb748ef81c72b2b40aad107b8c9ba7f672..9aeb5eb482efff02522348e69fb6ac2c109c5327 100644 --- a/paddle/phi/kernels/impl/activation_impl.h +++ b/paddle/phi/kernels/impl/activation_impl.h @@ -41,7 +41,8 @@ void ActivationImpl(const Context& dev_ctx, bool use_32bit_index = out.size() < Eigen::NumTraits::highest(); bool is_gpu_place = paddle::platform::is_gpu_place(dev_ctx.GetPlace()); if (use_32bit_index && is_gpu_place) { - functor(*place, To32BitIndex(x), To32BitIndex(out)); + // functor(*place, To32BitIndex(x), To32BitIndex(out)); + functor(*place, x, out); } else { functor(*place, x, out); }