From cabb9501bdd73b9b661679a9af8af1b830efaff2 Mon Sep 17 00:00:00 2001 From: Zeng Jinle <32832641+sneaxiy@users.noreply.github.com> Date: Tue, 17 Sep 2019 20:26:26 +0800 Subject: [PATCH] fix leaky_relu op when alpha is zero, test=develop (#19833) --- paddle/fluid/operators/activation_op.h | 14 +++++++------- .../operators/test_leaky_relu_grad_grad_functor.cc | 5 +++++ .../operators/test_leaky_relu_grad_grad_functor.cu | 5 +++++ .../operators/test_leaky_relu_grad_grad_functor.h | 2 +- 4 files changed, 18 insertions(+), 8 deletions(-) diff --git a/paddle/fluid/operators/activation_op.h b/paddle/fluid/operators/activation_op.h index 34c848ac982..316fb00eb99 100644 --- a/paddle/fluid/operators/activation_op.h +++ b/paddle/fluid/operators/activation_op.h @@ -1073,8 +1073,8 @@ struct LeakyReluGradFunctor : public BaseActivationFunctor { typename dX> void operator()(Device d, X x, Out out, dOut dout, dX dx) const { auto temp1 = - static_cast(alpha) * (out < static_cast(0)).template cast(); - auto temp2 = (out >= static_cast(0)).template cast(); + static_cast(alpha) * (out <= static_cast(0)).template cast(); + auto temp2 = (out > static_cast(0)).template cast(); dx.device(d) = dout * (temp1 + temp2).template cast(); } @@ -1418,11 +1418,11 @@ struct LeakyReluGradGradFunctor : public BaseActivationFunctor { auto ddx = framework::EigenVector::Flatten(detail::Ref(ddX)); auto out = framework::EigenVector::Flatten(detail::Ref(Out)); auto ddout = framework::EigenVector::Flatten(detail::Ref(ddOut)); - ddout.device(*d) = - ddx * - ((out >= static_cast(0)).template cast() + - static_cast(alpha) * (out < static_cast(0)).template cast()) - .template cast(); + ddout.device(*d) = ddx * + ((out > static_cast(0)).template cast() + + static_cast(alpha) * + (out <= static_cast(0)).template cast()) + .template cast(); } } static constexpr ActBwdOpFwdDeps FwdDeps() { return kDepOut; } diff --git a/paddle/fluid/operators/test_leaky_relu_grad_grad_functor.cc b/paddle/fluid/operators/test_leaky_relu_grad_grad_functor.cc index 77e74e3f81c..9a06a9a2762 100644 --- a/paddle/fluid/operators/test_leaky_relu_grad_grad_functor.cc +++ b/paddle/fluid/operators/test_leaky_relu_grad_grad_functor.cc @@ -22,5 +22,10 @@ TEST(leaky_relu_grad_grad, test_cpu) { TestLeakyReluGradGradMain({32, 64}, platform::CPUPlace(), 0.02)); } +TEST(leaky_relu_grad_grad, test_cpu_zero_alpha) { + ASSERT_TRUE( + TestLeakyReluGradGradMain({32, 64}, platform::CPUPlace(), 0.0)); +} + } // namespace operators } // namespace paddle diff --git a/paddle/fluid/operators/test_leaky_relu_grad_grad_functor.cu b/paddle/fluid/operators/test_leaky_relu_grad_grad_functor.cu index bb1afaea621..6f0f840b8c5 100644 --- a/paddle/fluid/operators/test_leaky_relu_grad_grad_functor.cu +++ b/paddle/fluid/operators/test_leaky_relu_grad_grad_functor.cu @@ -22,5 +22,10 @@ TEST(leaky_relu_grad_grad, test_gpu) { TestLeakyReluGradGradMain({32, 64}, platform::CUDAPlace(0), 0.15)); } +TEST(leaky_relu_grad_grad, test_gpu_zero_alpha) { + ASSERT_TRUE( + TestLeakyReluGradGradMain({32, 64}, platform::CUDAPlace(0), 0.0)); +} + } // namespace operators } // namespace paddle diff --git a/paddle/fluid/operators/test_leaky_relu_grad_grad_functor.h b/paddle/fluid/operators/test_leaky_relu_grad_grad_functor.h index fe9bf969b1d..f416aa6e00f 100644 --- a/paddle/fluid/operators/test_leaky_relu_grad_grad_functor.h +++ b/paddle/fluid/operators/test_leaky_relu_grad_grad_functor.h @@ -46,7 +46,7 @@ struct LeakyReluGradGradEachElementFunctor { : ddx_(ddx), out_(out), alpha_(alpha), ddout_(ddout) {} HOSTDEVICE void operator()(int idx) { - if (out_[idx] >= 0) { + if (out_[idx] > 0) { ddout_[idx] = ddx_[idx]; } else { ddout_[idx] = ddx_[idx] * alpha_; -- GitLab