提交 b50a5076 编写于 作者: Q qijun

add activation operator python test

上级 d736fc0e
...@@ -61,9 +61,9 @@ struct ExpGrad { ...@@ -61,9 +61,9 @@ struct ExpGrad {
const framework::Tensor& X, const framework::Tensor& Y, const framework::Tensor& X, const framework::Tensor& Y,
const framework::Tensor& dY, framework::Tensor* dX) { const framework::Tensor& dY, framework::Tensor* dX) {
auto dx = framework::EigenVector<T>::Flatten(*dX); auto dx = framework::EigenVector<T>::Flatten(*dX);
auto dy = framework::EigenVector<T>::Flatten(dY); auto y = framework::EigenVector<T>::Flatten(Y);
auto* place = device_context.template get_eigen_device<Place>(); auto* place = device_context.template get_eigen_device<Place>();
dx.device(*place) = dy.exp(); dx.device(*place) = y;
} }
}; };
......
...@@ -3,9 +3,9 @@ import numpy as np ...@@ -3,9 +3,9 @@ import numpy as np
from op_test import OpTest from op_test import OpTest
class TestExp(OpTest): class TestRelu(OpTest):
def setUp(self): def setUp(self):
self.op_type = "exp" self.op_type = "relu"
self.inputs = { self.inputs = {
'X': np.random.uniform(-1, 1, [11, 17]).astype("float32") 'X': np.random.uniform(-1, 1, [11, 17]).astype("float32")
} }
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册