diff --git a/python/paddle/v2/framework/tests/test_activation_op.py b/python/paddle/v2/framework/tests/test_activation_op.py new file mode 100644 index 0000000000000000000000000000000000000000..23ff58439646f64b80dacf6fb5d3fa3558ca0435 --- /dev/null +++ b/python/paddle/v2/framework/tests/test_activation_op.py @@ -0,0 +1,50 @@ +import unittest +import numpy as np +from op_test import OpTest + + +class TestExp(OpTest): + def setUp(self): + self.op_type = "exp" + self.inputs = { + 'X': np.random.uniform(0.1, 1, [11, 17]).astype("float32") + } + self.outputs = {'Y': np.exp(self.inputs['X'])} + + def test_check_output(self): + self.check_output() + + def test_check_grad(self): + self.check_grad(['X'], 'Y', max_relative_error=0.007) + + +class TestRelu(OpTest): + def setUp(self): + self.op_type = "relu" + self.inputs = {'X': np.random.uniform(-1, 1, [4, 4]).astype("float32")} + self.outputs = {'Y': np.maximum(self.inputs['X'], 0)} + + def test_check_output(self): + self.check_output() + + def test_check_grad(self): + self.check_grad(['X'], 'Y', max_relative_error=0.007) + + +class TestSigmoid(OpTest): + def setUp(self): + self.op_type = "sigmoid" + self.inputs = { + 'X': np.random.uniform(0.1, 1, [11, 17]).astype("float32") + } + self.outputs = {'Y': 1 / (1 + np.exp(-self.inputs['X']))} + + def test_check_output(self): + self.check_output() + + def test_check_grad(self): + self.check_grad(['X'], 'Y', max_relative_error=0.007) + + +if __name__ == "__main__": + unittest.main() diff --git a/python/paddle/v2/framework/tests/test_exp_op.py b/python/paddle/v2/framework/tests/test_exp_op.py deleted file mode 100644 index 0ec41e56a03fa03ec23b8caa5b6fd00d250647ed..0000000000000000000000000000000000000000 --- a/python/paddle/v2/framework/tests/test_exp_op.py +++ /dev/null @@ -1,22 +0,0 @@ -import unittest -import numpy as np -from op_test import OpTest - - -class TestExp(OpTest): - def setUp(self): - self.op_type = "exp" - self.inputs = { - 'X': np.random.uniform(0.1, 1, [11, 17]).astype("float32") - } - self.outputs = {'Y': np.exp(self.inputs['X'])} - - def test_check_output(self): - self.check_output() - - def test_check_grad(self): - self.check_grad(['X'], 'Y', max_relative_error=0.007) - - -if __name__ == "__main__": - unittest.main() diff --git a/python/paddle/v2/framework/tests/test_relu_op.py b/python/paddle/v2/framework/tests/test_relu_op.py deleted file mode 100644 index c9af0c2ba7e0d99c83898bfc6744ecf89cbfc1c8..0000000000000000000000000000000000000000 --- a/python/paddle/v2/framework/tests/test_relu_op.py +++ /dev/null @@ -1,20 +0,0 @@ -import unittest -import numpy as np -from op_test import OpTest - - -class TestRelu(OpTest): - def setUp(self): - self.op_type = "relu" - self.inputs = {'X': np.random.uniform(-1, 1, [4, 4]).astype("float32")} - self.outputs = {'Y': np.maximum(self.inputs['X'], 0)} - - def test_check_output(self): - self.check_output() - - def test_check_grad(self): - self.check_grad(['X'], 'Y', max_relative_error=0.007) - - -if __name__ == "__main__": - unittest.main() diff --git a/python/paddle/v2/framework/tests/test_sigmoid_op.py b/python/paddle/v2/framework/tests/test_sigmoid_op.py deleted file mode 100644 index cf05e934d5f83544dc8fa78d6b3b54a2a38d55b3..0000000000000000000000000000000000000000 --- a/python/paddle/v2/framework/tests/test_sigmoid_op.py +++ /dev/null @@ -1,22 +0,0 @@ -import unittest -import numpy as np -from op_test import OpTest - - -class TestSigmoid(OpTest): - def setUp(self): - self.op_type = "sigmoid" - self.inputs = { - 'X': np.random.uniform(0.1, 1, [11, 17]).astype("float32") - } - self.outputs = {'Y': 1 / (1 + np.exp(-self.inputs['X']))} - - def test_check_output(self): - self.check_output() - - def test_check_grad(self): - self.check_grad(['X'], 'Y', max_relative_error=0.007) - - -if __name__ == "__main__": - unittest.main()