diff --git a/paddle/operators/math/activation_functor.h b/paddle/operators/math/activation_functor.h index 7e15607f4627d727dcc49b1febe13a2f41affa95..1e9bdd142ee2dabf9113aed26105aab575b730f8 100644 --- a/paddle/operators/math/activation_functor.h +++ b/paddle/operators/math/activation_functor.h @@ -61,9 +61,9 @@ struct ExpGrad { const framework::Tensor& X, const framework::Tensor& Y, const framework::Tensor& dY, framework::Tensor* dX) { auto dx = framework::EigenVector::Flatten(*dX); - auto dy = framework::EigenVector::Flatten(dY); + auto y = framework::EigenVector::Flatten(Y); auto* place = device_context.template get_eigen_device(); - dx.device(*place) = dy.exp(); + dx.device(*place) = y; } }; diff --git a/python/paddle/v2/framework/tests/test_relu_op.py b/python/paddle/v2/framework/tests/test_relu_op.py index 07b7113d791cab3171efbfebb0e240869913374c..58a0872db4185784feaffaa5dccbde9934dbb21e 100644 --- a/python/paddle/v2/framework/tests/test_relu_op.py +++ b/python/paddle/v2/framework/tests/test_relu_op.py @@ -3,9 +3,9 @@ import numpy as np from op_test import OpTest -class TestExp(OpTest): +class TestRelu(OpTest): def setUp(self): - self.op_type = "exp" + self.op_type = "relu" self.inputs = { 'X': np.random.uniform(-1, 1, [11, 17]).astype("float32") }