From c3080386baa42da3959d44df2fd983c16c401409 Mon Sep 17 00:00:00 2001 From: zyfncg Date: Thu, 20 Jul 2023 11:54:58 +0800 Subject: [PATCH] rename hard_sigmoid to hardsigmoid for kernel name (#55559) --- paddle/phi/api/yaml/backward.yaml | 2 +- paddle/phi/api/yaml/ops.yaml | 2 +- paddle/phi/kernels/cpu/activation_grad_kernel.cc | 2 +- paddle/phi/kernels/cpu/activation_kernel.cc | 2 +- paddle/phi/kernels/gpu/activation_grad_kernel.cu | 2 +- paddle/phi/kernels/gpu/activation_kernel.cu | 2 +- paddle/phi/kernels/xpu/activation_grad_kernel.cc | 6 +++--- paddle/phi/kernels/xpu/activation_kernel.cc | 4 ++-- 8 files changed, 11 insertions(+), 11 deletions(-) diff --git a/paddle/phi/api/yaml/backward.yaml b/paddle/phi/api/yaml/backward.yaml index a109fac2158..465df08392d 100644 --- a/paddle/phi/api/yaml/backward.yaml +++ b/paddle/phi/api/yaml/backward.yaml @@ -991,7 +991,7 @@ func : UnchangedInferMeta param : [out] kernel : - func : hard_sigmoid_grad + func : hardsigmoid_grad inplace : (out_grad -> x_grad) - backward_op : hardtanh_grad diff --git a/paddle/phi/api/yaml/ops.yaml b/paddle/phi/api/yaml/ops.yaml index 4f80635d0e8..661de64990e 100644 --- a/paddle/phi/api/yaml/ops.yaml +++ b/paddle/phi/api/yaml/ops.yaml @@ -1090,7 +1090,7 @@ func : UnchangedInferMeta param : [x] kernel : - func : hard_sigmoid + func : hardsigmoid backward : hardsigmoid_grad - op : hardtanh diff --git a/paddle/phi/kernels/cpu/activation_grad_kernel.cc b/paddle/phi/kernels/cpu/activation_grad_kernel.cc index 7b9074ffa92..ccc688a9400 100644 --- a/paddle/phi/kernels/cpu/activation_grad_kernel.cc +++ b/paddle/phi/kernels/cpu/activation_grad_kernel.cc @@ -391,7 +391,7 @@ PD_REGISTER_ACTIVATION_GRAD_KERNEL(softsign_grad, SoftsignGradKernel) PD_REGISTER_ACTIVATION_GRAD_KERNEL(sigmoid_grad, SigmoidGradKernel) PD_REGISTER_ACTIVATION_GRAD_KERNEL(sigmoid_double_grad, SigmoidDoubleGradKernel) PD_REGISTER_ACTIVATION_GRAD_KERNEL(sigmoid_triple_grad, SigmoidTripleGradKernel) -PD_REGISTER_ACTIVATION_GRAD_KERNEL(hard_sigmoid_grad, HardSigmoidGradKernel) +PD_REGISTER_ACTIVATION_GRAD_KERNEL(hardsigmoid_grad, HardSigmoidGradKernel) PD_REGISTER_ACTIVATION_GRAD_KERNEL(logsigmoid_grad, LogSigmoidGradKernel) PD_REGISTER_ACTIVATION_GRAD_KERNEL(log_grad, LogGradKernel) PD_REGISTER_ACTIVATION_GRAD_KERNEL(log2_grad, Log2GradKernel) diff --git a/paddle/phi/kernels/cpu/activation_kernel.cc b/paddle/phi/kernels/cpu/activation_kernel.cc index 947ab5da81a..65b276fed05 100644 --- a/paddle/phi/kernels/cpu/activation_kernel.cc +++ b/paddle/phi/kernels/cpu/activation_kernel.cc @@ -219,7 +219,7 @@ PD_REGISTER_KERNEL( PD_REGISTER_ACTIVATION_KERNEL(softsign, SoftsignKernel) PD_REGISTER_ACTIVATION_KERNEL(sigmoid, SigmoidKernel) PD_REGISTER_ACTIVATION_KERNEL(logsigmoid, LogSigmoidKernel) -PD_REGISTER_ACTIVATION_KERNEL(hard_sigmoid, HardSigmoidKernel) +PD_REGISTER_ACTIVATION_KERNEL(hardsigmoid, HardSigmoidKernel) PD_REGISTER_ACTIVATION_KERNEL(swish, SwishKernel) PD_REGISTER_ACTIVATION_KERNEL(relu6, Relu6Kernel) diff --git a/paddle/phi/kernels/gpu/activation_grad_kernel.cu b/paddle/phi/kernels/gpu/activation_grad_kernel.cu index aa703ede3ba..80941fdd7e9 100644 --- a/paddle/phi/kernels/gpu/activation_grad_kernel.cu +++ b/paddle/phi/kernels/gpu/activation_grad_kernel.cu @@ -472,7 +472,7 @@ PD_REGISTER_ACTIVATION_GRAD_KERNEL(softsign_grad, SoftsignGradKernel) PD_REGISTER_ACTIVATION_GRAD_KERNEL(sigmoid_grad, SigmoidGradKernel) PD_REGISTER_ACTIVATION_GRAD_KERNEL(sigmoid_double_grad, SigmoidDoubleGradKernel) PD_REGISTER_ACTIVATION_GRAD_KERNEL(sigmoid_triple_grad, SigmoidTripleGradKernel) -PD_REGISTER_ACTIVATION_GRAD_KERNEL(hard_sigmoid_grad, HardSigmoidGradKernel) +PD_REGISTER_ACTIVATION_GRAD_KERNEL(hardsigmoid_grad, HardSigmoidGradKernel) PD_REGISTER_ACTIVATION_GRAD_KERNEL(logsigmoid_grad, LogSigmoidGradKernel) PD_REGISTER_ACTIVATION_GRAD_KERNEL(log_grad, LogGradKernel) PD_REGISTER_ACTIVATION_GRAD_KERNEL(log2_grad, Log2GradKernel) diff --git a/paddle/phi/kernels/gpu/activation_kernel.cu b/paddle/phi/kernels/gpu/activation_kernel.cu index 330182286d5..8dde6ddbd79 100644 --- a/paddle/phi/kernels/gpu/activation_kernel.cu +++ b/paddle/phi/kernels/gpu/activation_kernel.cu @@ -279,7 +279,7 @@ PD_REGISTER_ACTIVATION_KERNEL(silu, SiluKernel) PD_REGISTER_ACTIVATION_KERNEL(softsign, SoftsignKernel) PD_REGISTER_ACTIVATION_KERNEL(sigmoid, SigmoidKernel) PD_REGISTER_ACTIVATION_KERNEL(logsigmoid, LogSigmoidKernel) -PD_REGISTER_ACTIVATION_KERNEL(hard_sigmoid, HardSigmoidKernel) +PD_REGISTER_ACTIVATION_KERNEL(hardsigmoid, HardSigmoidKernel) PD_REGISTER_ACTIVATION_KERNEL(hardswish, HardSwishKernel) PD_REGISTER_ACTIVATION_KERNEL(swish, SwishKernel) PD_REGISTER_ACTIVATION_KERNEL(round, RoundKernel) diff --git a/paddle/phi/kernels/xpu/activation_grad_kernel.cc b/paddle/phi/kernels/xpu/activation_grad_kernel.cc index a40156c1031..d77d84ee5ae 100644 --- a/paddle/phi/kernels/xpu/activation_grad_kernel.cc +++ b/paddle/phi/kernels/xpu/activation_grad_kernel.cc @@ -276,13 +276,13 @@ struct XPUHardSigmoidGradFunctor : public funcs::BaseActivationFunctor { int r = xpu::hard_sigmoid_grad( xpu_context, reinterpret_cast( - y_data), // hard_sigmoid_grad do not need x_data + y_data), // hardsigmoid_grad do not need x_data reinterpret_cast(y_data), reinterpret_cast(y_grad), reinterpret_cast(x_grad), dx->numel(), slope); - PADDLE_ENFORCE_XDNN_SUCCESS(r, "hard_sigmoid_grad"); + PADDLE_ENFORCE_XDNN_SUCCESS(r, "hardsigmoid_grad"); } }; @@ -703,7 +703,7 @@ PD_REGISTER_KERNEL(square_grad, PD_REGISTER_ACTIVATION_GRAD_KERNEL(exp_grad, ExpGradKernel) PD_REGISTER_ACTIVATION_GRAD_KERNEL(log_grad, LogGradKernel) PD_REGISTER_ACTIVATION_GRAD_KERNEL(leaky_relu_grad, LeakyReluGradKernel) -PD_REGISTER_ACTIVATION_GRAD_KERNEL(hard_sigmoid_grad, HardSigmoidGradKernel) +PD_REGISTER_ACTIVATION_GRAD_KERNEL(hardsigmoid_grad, HardSigmoidGradKernel) PD_REGISTER_ACTIVATION_GRAD_KERNEL(hardswish_grad, HardSwishGradKernel) PD_REGISTER_ACTIVATION_GRAD_KERNEL(reciprocal_grad, ReciprocalGradKernel) PD_REGISTER_ACTIVATION_GRAD_KERNEL(relu6_grad, Relu6GradKernel) diff --git a/paddle/phi/kernels/xpu/activation_kernel.cc b/paddle/phi/kernels/xpu/activation_kernel.cc index bd19fc0d9c6..609f236fa17 100644 --- a/paddle/phi/kernels/xpu/activation_kernel.cc +++ b/paddle/phi/kernels/xpu/activation_kernel.cc @@ -240,7 +240,7 @@ struct XPUHardSigmoidFunctor : public funcs::BaseActivationFunctor { using XPUType = typename XPUTypeTrait::Type; int r = xpu_activation_1attr_func( dev_ctx, x, out, slope, xpu::hard_sigmoid); - PADDLE_ENFORCE_XDNN_SUCCESS(r, "hard_sigmoid"); + PADDLE_ENFORCE_XDNN_SUCCESS(r, "hardsigmoid"); } }; @@ -549,7 +549,7 @@ PD_REGISTER_KERNEL( sigmoid, XPU, ALL_LAYOUT, phi::SigmoidKernel, float, phi::dtype::float16) {} PD_REGISTER_KERNEL( swish, XPU, ALL_LAYOUT, phi::SwishKernel, float, phi::dtype::float16) {} -PD_REGISTER_KERNEL(hard_sigmoid, +PD_REGISTER_KERNEL(hardsigmoid, XPU, ALL_LAYOUT, phi::HardSigmoidKernel, -- GitLab