From 3110bf9a9aaa8cbc3f52bd171b12340b299481db Mon Sep 17 00:00:00 2001 From: qijun Date: Thu, 14 Sep 2017 07:37:31 +0800 Subject: [PATCH] merge activation operator python tests --- .../v2/framework/tests/test_activation_op.py | 50 +++++++++++++++++++ .../paddle/v2/framework/tests/test_exp_op.py | 22 -------- .../paddle/v2/framework/tests/test_relu_op.py | 20 -------- .../v2/framework/tests/test_sigmoid_op.py | 22 -------- 4 files changed, 50 insertions(+), 64 deletions(-) create mode 100644 python/paddle/v2/framework/tests/test_activation_op.py delete mode 100644 python/paddle/v2/framework/tests/test_exp_op.py delete mode 100644 python/paddle/v2/framework/tests/test_relu_op.py delete mode 100644 python/paddle/v2/framework/tests/test_sigmoid_op.py diff --git a/python/paddle/v2/framework/tests/test_activation_op.py b/python/paddle/v2/framework/tests/test_activation_op.py new file mode 100644 index 00000000000..23ff5843964 --- /dev/null +++ b/python/paddle/v2/framework/tests/test_activation_op.py @@ -0,0 +1,50 @@ +import unittest +import numpy as np +from op_test import OpTest + + +class TestExp(OpTest): + def setUp(self): + self.op_type = "exp" + self.inputs = { + 'X': np.random.uniform(0.1, 1, [11, 17]).astype("float32") + } + self.outputs = {'Y': np.exp(self.inputs['X'])} + + def test_check_output(self): + self.check_output() + + def test_check_grad(self): + self.check_grad(['X'], 'Y', max_relative_error=0.007) + + +class TestRelu(OpTest): + def setUp(self): + self.op_type = "relu" + self.inputs = {'X': np.random.uniform(-1, 1, [4, 4]).astype("float32")} + self.outputs = {'Y': np.maximum(self.inputs['X'], 0)} + + def test_check_output(self): + self.check_output() + + def test_check_grad(self): + self.check_grad(['X'], 'Y', max_relative_error=0.007) + + +class TestSigmoid(OpTest): + def setUp(self): + self.op_type = "sigmoid" + self.inputs = { + 'X': np.random.uniform(0.1, 1, [11, 17]).astype("float32") + } + self.outputs = {'Y': 1 / (1 + np.exp(-self.inputs['X']))} + + def test_check_output(self): + self.check_output() + + def test_check_grad(self): + self.check_grad(['X'], 'Y', max_relative_error=0.007) + + +if __name__ == "__main__": + unittest.main() diff --git a/python/paddle/v2/framework/tests/test_exp_op.py b/python/paddle/v2/framework/tests/test_exp_op.py deleted file mode 100644 index 0ec41e56a03..00000000000 --- a/python/paddle/v2/framework/tests/test_exp_op.py +++ /dev/null @@ -1,22 +0,0 @@ -import unittest -import numpy as np -from op_test import OpTest - - -class TestExp(OpTest): - def setUp(self): - self.op_type = "exp" - self.inputs = { - 'X': np.random.uniform(0.1, 1, [11, 17]).astype("float32") - } - self.outputs = {'Y': np.exp(self.inputs['X'])} - - def test_check_output(self): - self.check_output() - - def test_check_grad(self): - self.check_grad(['X'], 'Y', max_relative_error=0.007) - - -if __name__ == "__main__": - unittest.main() diff --git a/python/paddle/v2/framework/tests/test_relu_op.py b/python/paddle/v2/framework/tests/test_relu_op.py deleted file mode 100644 index c9af0c2ba7e..00000000000 --- a/python/paddle/v2/framework/tests/test_relu_op.py +++ /dev/null @@ -1,20 +0,0 @@ -import unittest -import numpy as np -from op_test import OpTest - - -class TestRelu(OpTest): - def setUp(self): - self.op_type = "relu" - self.inputs = {'X': np.random.uniform(-1, 1, [4, 4]).astype("float32")} - self.outputs = {'Y': np.maximum(self.inputs['X'], 0)} - - def test_check_output(self): - self.check_output() - - def test_check_grad(self): - self.check_grad(['X'], 'Y', max_relative_error=0.007) - - -if __name__ == "__main__": - unittest.main() diff --git a/python/paddle/v2/framework/tests/test_sigmoid_op.py b/python/paddle/v2/framework/tests/test_sigmoid_op.py deleted file mode 100644 index cf05e934d5f..00000000000 --- a/python/paddle/v2/framework/tests/test_sigmoid_op.py +++ /dev/null @@ -1,22 +0,0 @@ -import unittest -import numpy as np -from op_test import OpTest - - -class TestSigmoid(OpTest): - def setUp(self): - self.op_type = "sigmoid" - self.inputs = { - 'X': np.random.uniform(0.1, 1, [11, 17]).astype("float32") - } - self.outputs = {'Y': 1 / (1 + np.exp(-self.inputs['X']))} - - def test_check_output(self): - self.check_output() - - def test_check_grad(self): - self.check_grad(['X'], 'Y', max_relative_error=0.007) - - -if __name__ == "__main__": - unittest.main() -- GitLab