未验证 提交 c3080386 编写于 作者: Z zyfncg 提交者: GitHub

rename hard_sigmoid to hardsigmoid for kernel name (#55559)

上级 ab00c96c
......@@ -991,7 +991,7 @@
func : UnchangedInferMeta
param : [out]
kernel :
func : hard_sigmoid_grad
func : hardsigmoid_grad
inplace : (out_grad -> x_grad)
- backward_op : hardtanh_grad
......
......@@ -1090,7 +1090,7 @@
func : UnchangedInferMeta
param : [x]
kernel :
func : hard_sigmoid
func : hardsigmoid
backward : hardsigmoid_grad
- op : hardtanh
......
......@@ -391,7 +391,7 @@ PD_REGISTER_ACTIVATION_GRAD_KERNEL(softsign_grad, SoftsignGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(sigmoid_grad, SigmoidGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(sigmoid_double_grad, SigmoidDoubleGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(sigmoid_triple_grad, SigmoidTripleGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(hard_sigmoid_grad, HardSigmoidGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(hardsigmoid_grad, HardSigmoidGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(logsigmoid_grad, LogSigmoidGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(log_grad, LogGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(log2_grad, Log2GradKernel)
......
......@@ -219,7 +219,7 @@ PD_REGISTER_KERNEL(
PD_REGISTER_ACTIVATION_KERNEL(softsign, SoftsignKernel)
PD_REGISTER_ACTIVATION_KERNEL(sigmoid, SigmoidKernel)
PD_REGISTER_ACTIVATION_KERNEL(logsigmoid, LogSigmoidKernel)
PD_REGISTER_ACTIVATION_KERNEL(hard_sigmoid, HardSigmoidKernel)
PD_REGISTER_ACTIVATION_KERNEL(hardsigmoid, HardSigmoidKernel)
PD_REGISTER_ACTIVATION_KERNEL(swish, SwishKernel)
PD_REGISTER_ACTIVATION_KERNEL(relu6, Relu6Kernel)
......
......@@ -472,7 +472,7 @@ PD_REGISTER_ACTIVATION_GRAD_KERNEL(softsign_grad, SoftsignGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(sigmoid_grad, SigmoidGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(sigmoid_double_grad, SigmoidDoubleGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(sigmoid_triple_grad, SigmoidTripleGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(hard_sigmoid_grad, HardSigmoidGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(hardsigmoid_grad, HardSigmoidGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(logsigmoid_grad, LogSigmoidGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(log_grad, LogGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(log2_grad, Log2GradKernel)
......
......@@ -279,7 +279,7 @@ PD_REGISTER_ACTIVATION_KERNEL(silu, SiluKernel)
PD_REGISTER_ACTIVATION_KERNEL(softsign, SoftsignKernel)
PD_REGISTER_ACTIVATION_KERNEL(sigmoid, SigmoidKernel)
PD_REGISTER_ACTIVATION_KERNEL(logsigmoid, LogSigmoidKernel)
PD_REGISTER_ACTIVATION_KERNEL(hard_sigmoid, HardSigmoidKernel)
PD_REGISTER_ACTIVATION_KERNEL(hardsigmoid, HardSigmoidKernel)
PD_REGISTER_ACTIVATION_KERNEL(hardswish, HardSwishKernel)
PD_REGISTER_ACTIVATION_KERNEL(swish, SwishKernel)
PD_REGISTER_ACTIVATION_KERNEL(round, RoundKernel)
......
......@@ -276,13 +276,13 @@ struct XPUHardSigmoidGradFunctor : public funcs::BaseActivationFunctor<T> {
int r = xpu::hard_sigmoid_grad(
xpu_context,
reinterpret_cast<const XPUType*>(
y_data), // hard_sigmoid_grad do not need x_data
y_data), // hardsigmoid_grad do not need x_data
reinterpret_cast<const XPUType*>(y_data),
reinterpret_cast<const XPUType*>(y_grad),
reinterpret_cast<XPUType*>(x_grad),
dx->numel(),
slope);
PADDLE_ENFORCE_XDNN_SUCCESS(r, "hard_sigmoid_grad");
PADDLE_ENFORCE_XDNN_SUCCESS(r, "hardsigmoid_grad");
}
};
......@@ -703,7 +703,7 @@ PD_REGISTER_KERNEL(square_grad,
PD_REGISTER_ACTIVATION_GRAD_KERNEL(exp_grad, ExpGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(log_grad, LogGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(leaky_relu_grad, LeakyReluGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(hard_sigmoid_grad, HardSigmoidGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(hardsigmoid_grad, HardSigmoidGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(hardswish_grad, HardSwishGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(reciprocal_grad, ReciprocalGradKernel)
PD_REGISTER_ACTIVATION_GRAD_KERNEL(relu6_grad, Relu6GradKernel)
......
......@@ -240,7 +240,7 @@ struct XPUHardSigmoidFunctor : public funcs::BaseActivationFunctor<T> {
using XPUType = typename XPUTypeTrait<T>::Type;
int r = xpu_activation_1attr_func<Context, T, XPUType>(
dev_ctx, x, out, slope, xpu::hard_sigmoid<XPUType>);
PADDLE_ENFORCE_XDNN_SUCCESS(r, "hard_sigmoid");
PADDLE_ENFORCE_XDNN_SUCCESS(r, "hardsigmoid");
}
};
......@@ -549,7 +549,7 @@ PD_REGISTER_KERNEL(
sigmoid, XPU, ALL_LAYOUT, phi::SigmoidKernel, float, phi::dtype::float16) {}
PD_REGISTER_KERNEL(
swish, XPU, ALL_LAYOUT, phi::SwishKernel, float, phi::dtype::float16) {}
PD_REGISTER_KERNEL(hard_sigmoid,
PD_REGISTER_KERNEL(hardsigmoid,
XPU,
ALL_LAYOUT,
phi::HardSigmoidKernel,
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册