From def33631d4d91d7b96e0ea6d73b03ad2040b2d83 Mon Sep 17 00:00:00 2001 From: phlrain Date: Wed, 16 Mar 2022 16:39:18 +0000 Subject: [PATCH] update --- paddle/phi/kernels/activation_kernel.h | 1 + paddle/phi/kernels/cpu/activation_kernel.cc | 16 +++++++++------- paddle/phi/kernels/impl/activation_impl.h | 3 ++- 3 files changed, 12 insertions(+), 8 deletions(-) diff --git a/paddle/phi/kernels/activation_kernel.h b/paddle/phi/kernels/activation_kernel.h index 623f7e467a..be86b1e688 100644 --- a/paddle/phi/kernels/activation_kernel.h +++ b/paddle/phi/kernels/activation_kernel.h @@ -39,6 +39,7 @@ DECLARE_ACTIVATION_KERNEL(Relu) DECLARE_ACTIVATION_KERNEL(Tanh) DECLARE_ACTIVATION_KERNEL(Exp) DECLARE_ACTIVATION_KERNEL(Expm1) +DECLARE_ACTIVATION_KERNEL(Softsign) template void BReluKernel(const Context& dev_ctx, diff --git a/paddle/phi/kernels/cpu/activation_kernel.cc b/paddle/phi/kernels/cpu/activation_kernel.cc index ecbab53123..0535686ab1 100644 --- a/paddle/phi/kernels/cpu/activation_kernel.cc +++ b/paddle/phi/kernels/cpu/activation_kernel.cc @@ -74,21 +74,23 @@ DEFINE_CPU_ACTIVATION_KERNEL(Reciprocal, funcs::ReciprocalFunctor) DEFINE_CPU_ACTIVATION_KERNEL(Square, funcs::SquareFunctor) DEFINE_CPU_ACTIVATION_KERNEL(Sqrt, funcs::SqrtFunctor) DEFINE_CPU_ACTIVATION_KERNEL(Rsqrt, funcs::RsqrtFunctor) + DEFINE_CPU_ACTIVATION_KERNEL(Softsign, funcs::SoftsignFunctor) + DEFINE_CPU_ACT_KERNEL_WITH_ONE_ATTRS(LeakyRelu, funcs::LeakyReluFunctor, alpha) DEFINE_CPU_ACT_KERNEL_WITH_ONE_ATTRS(ThresholdedRelu, funcs::ThresholdedReluFunctor, threshold) -DEFINE_CPU_ACT_KERNEL_WITH_ONE_ATTRS(Mish, funcs::MishFunctor, threshold) +// DEFINE_CPU_ACT_KERNEL_WITH_ONE_ATTRS(Mish, funcs::MishFunctor, threshold) DEFINE_CPU_ACT_KERNEL_WITH_TWO_ATTRS(BRelu, funcs::BReluFunctor, t_min, t_max) DEFINE_CPU_ACT_KERNEL_WITH_TWO_ATTRS(STanh, funcs::STanhFunctor, scale_a, scale_b) -DEFINE_CPU_ACT_KERNEL_WITH_TWO_ATTRS(Softplus, - funcs::SoftplusFunctor, - beta, - threshold) +// DEFINE_CPU_ACT_KERNEL_WITH_TWO_ATTRS(Softplus, +// funcs::SoftplusFunctor, +// beta, +// threshold) } // namespace phi PD_REGISTER_KERNEL(relu, CPU, ALL_LAYOUT, phi::ReluKernel, float, double) {} @@ -111,12 +113,12 @@ PD_REGISTER_ACTIVATION_KERNEL(tanh, Tanh) PD_REGISTER_ACTIVATION_KERNEL(brelu, BRelu) PD_REGISTER_ACTIVATION_KERNEL(leaky_relu, LeakyRelu) PD_REGISTER_ACTIVATION_KERNEL(thresholded_relu, ThresholdedRelu) -PD_REGISTER_ACTIVATION_KERNEL(mish, Mish) +// PD_REGISTER_ACTIVATION_KERNEL(mish, Mish) PD_REGISTER_ACTIVATION_KERNEL(stanh, STanh) PD_REGISTER_ACTIVATION_KERNEL(reciprocal, Reciprocal) PD_REGISTER_ACTIVATION_KERNEL(sqrt, Sqrt) PD_REGISTER_ACTIVATION_KERNEL(rsqrt, Rsqrt) -PD_REGISTER_ACTIVATION_KERNEL(softplus, Softplus) +// PD_REGISTER_ACTIVATION_KERNEL(softplus, Softplus) PD_REGISTER_ACTIVATION_KERNEL(softsign, Softsign) PD_REGISTER_KERNEL( diff --git a/paddle/phi/kernels/impl/activation_impl.h b/paddle/phi/kernels/impl/activation_impl.h index 05339ceb74..9aeb5eb482 100644 --- a/paddle/phi/kernels/impl/activation_impl.h +++ b/paddle/phi/kernels/impl/activation_impl.h @@ -41,7 +41,8 @@ void ActivationImpl(const Context& dev_ctx, bool use_32bit_index = out.size() < Eigen::NumTraits::highest(); bool is_gpu_place = paddle::platform::is_gpu_place(dev_ctx.GetPlace()); if (use_32bit_index && is_gpu_place) { - functor(*place, To32BitIndex(x), To32BitIndex(out)); + // functor(*place, To32BitIndex(x), To32BitIndex(out)); + functor(*place, x, out); } else { functor(*place, x, out); } -- GitLab