From 95113cb2e9d12f3e3f6b5aa393db2d8fdb9c180c Mon Sep 17 00:00:00 2001 From: phlrain Date: Thu, 17 Mar 2022 15:27:13 +0000 Subject: [PATCH] fix error; test=develop --- paddle/fluid/operators/activation_op.h | 122 ++----------------------- 1 file changed, 10 insertions(+), 112 deletions(-) diff --git a/paddle/fluid/operators/activation_op.h b/paddle/fluid/operators/activation_op.h index f0ded4951f..159ad4d19f 100644 --- a/paddle/fluid/operators/activation_op.h +++ b/paddle/fluid/operators/activation_op.h @@ -296,8 +296,6 @@ USE_PHI_FUNCTOR(Softsign) template using ELUGradNegativeAlphaFunctor = phi::funcs::ELUGradNegativeAlphaFunctor; - - template using ReluCPUFunctor = phi::funcs::ReluCPUFunctor; template @@ -717,106 +715,6 @@ struct PowGradFunctor : public BaseActivationFunctor { static constexpr ActBwdOpFwdDeps FwdDeps() { return ActBwdOpFwdDeps::kDepX; } }; -template -<<<<<<< HEAD -struct HardSigmoidFunctor : public BaseActivationFunctor { - float slope; - float offset; - typename BaseActivationFunctor::AttrPair GetAttrs() { - return {{"slope", &slope}, {"offset", &offset}}; -======= -struct LogitFunctor { - template - void operator()(Device d, X x, Out out, P p, float eps) const { - // logit(x) = ln(x/(1-x)) - auto tmp_x = - (x.cwiseMin(static_cast(1.0 - eps))).cwiseMax(static_cast(eps)); - - if (!eps) { - out.device(d) = (x < static_cast(0.0) || x > static_cast(1.0)) - .select(p.constant(static_cast(NAN)), - (tmp_x / (static_cast(1) - tmp_x)).log()); - } else { - out.device(d) = (tmp_x / (static_cast(1) - tmp_x)).log(); - } - } -}; - -template -struct LogitGradFunctor { - template - void operator()(Device d, X x, dOut dout, dX dx, P p, float eps) const { - // logit(x)' = 1/(x*(1-x)) - dx.device(d) = - (x < static_cast(eps) || x > static_cast(1.0 - eps)) - .select(p.constant(static_cast(0)), - dout * (static_cast(1) / ((static_cast(1) - x) * x))); - } -}; - -template -struct STanhFunctor : public BaseActivationFunctor { - float scale_a; - float scale_b; - typename BaseActivationFunctor::AttrPair GetAttrs() { - return {{"scale_a", &scale_a}, {"scale_b", &scale_b}}; ->>>>>>> 1904572ac8edb57dfb528e711588758002a168dd - } - - template - void operator()(Device d, X x, Out out) const { -<<<<<<< HEAD - auto temp = x * static_cast(slope) + static_cast(offset); - out.device(d) = - temp.cwiseMax(static_cast(0)).cwiseMin(static_cast(1)); -======= - out.device(d) = - static_cast(scale_b) * (static_cast(scale_a) * x).tanh(); ->>>>>>> 1904572ac8edb57dfb528e711588758002a168dd - } -}; - -template -<<<<<<< HEAD -struct HardSigmoidGradFunctor : public BaseActivationFunctor { - float slope; - float offset; - typename BaseActivationFunctor::AttrPair GetAttrs() { - return {{"slope", &slope}, {"offset", &offset}}; - } - template - void operator()(Device d, X x, Out out, dOut dout, dX dx) const { - dx.device(d) = dout * - ((out > static_cast(0)) * (out < static_cast(1))) - .template cast() * - static_cast(slope); - } - - static constexpr ActBwdOpFwdDeps FwdDeps() { - return ActBwdOpFwdDeps::kDepOut; - } -======= -struct STanhGradFunctor : public BaseActivationFunctor { - float scale_a; - float scale_b; - typename BaseActivationFunctor::AttrPair GetAttrs() { - return {{"scale_a", &scale_a}, {"scale_b", &scale_b}}; - } - - template - void operator()(Device d, X x, Out out, dOut dout, dX dx) const { - auto a = static_cast(scale_a); - auto b = static_cast(scale_b); - auto temp = (a * x).tanh() * (a * x).tanh(); - dx.device(d) = dout * a * b * (static_cast(1) - temp); - } - - static constexpr ActBwdOpFwdDeps FwdDeps() { return ActBwdOpFwdDeps::kDepX; } ->>>>>>> 1904572ac8edb57dfb528e711588758002a168dd -}; - template struct SwishFunctor : public BaseActivationFunctor { float beta; @@ -1326,14 +1224,14 @@ struct LogGradGradFunctor : public BaseActivationFunctor { } // namespace operators } // namespace paddle -#define FOR_EACH_ACTIVATION_OP(__macro) \ - __macro(ceil, Ceil, CeilFunctor, ZeroGradFunctor); \ - __macro(floor, Floor, FloorFunctor, ZeroGradFunctor); \ - __macro(round, Round, RoundFunctor, ZeroGradFunctor); \ - __macro(log1p, Log1p, Log1pFunctor, Log1pGradFunctor); \ - __macro(log2, Log2, Log2Functor, Log2GradFunctor); \ - __macro(log10, Log10, Log10Functor, Log10GradFunctor); \ - __macro(soft_relu, SoftRelu, SoftReluFunctor, SoftReluGradFunctor); \ - __macro(relu6, Relu6, Relu6Functor, Relu6GradFunctor); \ - __macro(swish, Swish, SwishFunctor, SwishGradFunctor); \ +#define FOR_EACH_ACTIVATION_OP(__macro) \ + __macro(ceil, Ceil, CeilFunctor, ZeroGradFunctor); \ + __macro(floor, Floor, FloorFunctor, ZeroGradFunctor); \ + __macro(round, Round, RoundFunctor, ZeroGradFunctor); \ + __macro(log1p, Log1p, Log1pFunctor, Log1pGradFunctor); \ + __macro(log2, Log2, Log2Functor, Log2GradFunctor); \ + __macro(log10, Log10, Log10Functor, Log10GradFunctor); \ + __macro(soft_relu, SoftRelu, SoftReluFunctor, SoftReluGradFunctor); \ + __macro(relu6, Relu6, Relu6Functor, Relu6GradFunctor); \ + __macro(swish, Swish, SwishFunctor, SwishGradFunctor); \ __macro(hard_swish, HardSwish, HardSwishFunctor, HardSwishGradFunctor); -- GitLab