From c540aa040fe536999b8d4e018e619a09d21150e3 Mon Sep 17 00:00:00 2001 From: dangqingqing Date: Fri, 4 Aug 2017 14:56:17 +0800 Subject: [PATCH] Refine unit test in op_test_util --- .../paddle/v2/framework/tests/op_test_util.py | 21 ++++++++++++------- .../v2/framework/tests/test_add_two_op.py | 8 ++++--- .../framework/tests/test_cross_entropy_op.py | 10 +++++---- .../paddle/v2/framework/tests/test_mean_op.py | 4 ++-- .../paddle/v2/framework/tests/test_mul_op.py | 8 ++++--- .../v2/framework/tests/test_rowwise_add_op.py | 8 ++++--- .../paddle/v2/framework/tests/test_sgd_op.py | 11 ++++++---- .../v2/framework/tests/test_sigmoid_op.py | 4 ++-- .../v2/framework/tests/test_softmax_op.py | 6 ++++-- 9 files changed, 49 insertions(+), 31 deletions(-) diff --git a/python/paddle/v2/framework/tests/op_test_util.py b/python/paddle/v2/framework/tests/op_test_util.py index 98fae1b975..cad7b0fed0 100644 --- a/python/paddle/v2/framework/tests/op_test_util.py +++ b/python/paddle/v2/framework/tests/op_test_util.py @@ -33,23 +33,28 @@ class OpTestMeta(type): for place in places: for in_name in func.all_input_args: - if hasattr(self, in_name): + if hasattr(self, "inputs") and in_name in self.inputs: kwargs[in_name] = in_name var = scope.new_var(in_name).get_tensor() - arr = getattr(self, in_name) + arr = self.inputs[in_name] var.set_dims(arr.shape) var.set(arr, place) else: kwargs[in_name] = "@EMPTY@" for out_name in func.all_output_args: - if hasattr(self, out_name): - kwargs[out_name] = out_name - scope.new_var(out_name).get_tensor() + if not hasattr(self, "outputs"): + raise ValueError( + "The test op must set self.outputs dict.") + if out_name not in self.outputs: + raise ValueError("The %s is not self.outputs dict." % + (out_name)) + kwargs[out_name] = out_name + scope.new_var(out_name).get_tensor() for attr_name in func.all_attr_args: - if hasattr(self, attr_name): - kwargs[attr_name] = getattr(self, attr_name) + if hasattr(self, "attrs") and attr_name in self.attrs: + kwargs[attr_name] = self.attrs[attr_name] op = func(**kwargs) @@ -60,7 +65,7 @@ class OpTestMeta(type): for out_name in func.all_output_args: actual = numpy.array(scope.find_var(out_name).get_tensor()) - expect = getattr(self, out_name) + expect = self.outputs[out_name] # TODO(qijun) The default decimal is 7, but numpy.dot and eigen.mul # has some diff, and could not pass unittest. So I set decimal 3 here. # And I will check this in future. diff --git a/python/paddle/v2/framework/tests/test_add_two_op.py b/python/paddle/v2/framework/tests/test_add_two_op.py index 6e6643201b..8ef48f4727 100644 --- a/python/paddle/v2/framework/tests/test_add_two_op.py +++ b/python/paddle/v2/framework/tests/test_add_two_op.py @@ -12,9 +12,11 @@ class TestAddOp(unittest.TestCase): def setUp(self): self.type = "add_two" - self.X = numpy.random.random((102, 105)).astype("float32") - self.Y = numpy.random.random((102, 105)).astype("float32") - self.Out = self.X + self.Y + self.inputs = { + 'X': numpy.random.random((102, 105)).astype("float32"), + 'Y': numpy.random.random((102, 105)).astype("float32") + } + self.outputs = {'Out': self.inputs['X'] + self.inputs['Y']} class TestAddGradOp(unittest.TestCase): diff --git a/python/paddle/v2/framework/tests/test_cross_entropy_op.py b/python/paddle/v2/framework/tests/test_cross_entropy_op.py index 609c56535e..4242073787 100644 --- a/python/paddle/v2/framework/tests/test_cross_entropy_op.py +++ b/python/paddle/v2/framework/tests/test_cross_entropy_op.py @@ -7,15 +7,17 @@ class TestSGD(unittest.TestCase): __metaclass__ = OpTestMeta def setUp(self): + # TODO this unit test is not passed self.type = "onehot_cross_entropy" batch_size = 100 class_num = 10 - self.X = numpy.random.random((batch_size, class_num)).astype("float32") - self.label = 5 * numpy.ones(batch_size).astype("int32") + X = numpy.random.random((batch_size, class_num)).astype("float32") + label = 5 * numpy.ones(batch_size).astype("int32") + self.inputs = {'X': X, 'label': label} Y = [] for i in range(0, batch_size): - Y.append(-numpy.log(self.X[i][self.label[i]])) - self.Y = numpy.array(Y).astype("float32") + Y.append(-numpy.log(X[i][label[i]])) + self.outputs = {'Y': numpy.array(Y).astype("float32")} if __name__ == "__main__": diff --git a/python/paddle/v2/framework/tests/test_mean_op.py b/python/paddle/v2/framework/tests/test_mean_op.py index 78fff1eeff..b5d52b9056 100644 --- a/python/paddle/v2/framework/tests/test_mean_op.py +++ b/python/paddle/v2/framework/tests/test_mean_op.py @@ -8,8 +8,8 @@ class TestMeanOp(unittest.TestCase): def setUp(self): self.type = "mean" - self.X = np.random.random((32, 784)).astype("float32") - self.Out = np.mean(self.X) + self.inputs = {'X': np.random.random((32, 784)).astype("float32")} + self.outputs = {'Out': np.mean(self.inputs['X'])} if __name__ == '__main__': diff --git a/python/paddle/v2/framework/tests/test_mul_op.py b/python/paddle/v2/framework/tests/test_mul_op.py index e1ac66d3a4..ec0ac99156 100644 --- a/python/paddle/v2/framework/tests/test_mul_op.py +++ b/python/paddle/v2/framework/tests/test_mul_op.py @@ -8,9 +8,11 @@ class TestMulOp(unittest.TestCase): def setUp(self): self.type = "mul" - self.X = np.random.random((32, 84)).astype("float32") - self.Y = np.random.random((84, 100)).astype("float32") - self.Out = np.dot(self.X, self.Y) + self.inputs = { + 'X': np.random.random((32, 84)).astype("float32"), + 'Y': np.random.random((84, 100)).astype("float32") + } + self.outputs = {'Out': np.dot(self.inputs['X'], self.inputs['Y'])} if __name__ == '__main__': diff --git a/python/paddle/v2/framework/tests/test_rowwise_add_op.py b/python/paddle/v2/framework/tests/test_rowwise_add_op.py index 04abc14ee1..f8521eb517 100644 --- a/python/paddle/v2/framework/tests/test_rowwise_add_op.py +++ b/python/paddle/v2/framework/tests/test_rowwise_add_op.py @@ -8,9 +8,11 @@ class TestRowwiseAddOp(unittest.TestCase): def setUp(self): self.type = "rowwise_add" - self.X = np.random.random((32, 84)).astype("float32") - self.b = np.random.random(84).astype("float32") - self.Out = np.add(self.X, self.b) + self.inputs = { + 'X': np.random.random((32, 84)).astype("float32"), + 'b': np.random.random(84).astype("float32") + } + self.outputs = {'Out': np.add(self.inputs['X'], self.inputs['b'])} if __name__ == '__main__': diff --git a/python/paddle/v2/framework/tests/test_sgd_op.py b/python/paddle/v2/framework/tests/test_sgd_op.py index ca03cc11ab..e5f9ef865e 100644 --- a/python/paddle/v2/framework/tests/test_sgd_op.py +++ b/python/paddle/v2/framework/tests/test_sgd_op.py @@ -8,10 +8,13 @@ class TestSGD(unittest.TestCase): def setUp(self): self.type = "sgd" - self.param = numpy.random.random((102, 105)).astype("float32") - self.grad = numpy.random.random((102, 105)).astype("float32") - self.learning_rate = 0.1 - self.param_out = self.param - self.learning_rate * self.grad + w = numpy.random.random((102, 105)).astype("float32") + g = numpy.random.random((102, 105)).astype("float32") + lr = 0.1 + + self.inputs = {'param': w, 'grad': g} + self.attrs = {'learning_rate': lr} + self.outputs = {'param_out': w - lr * g} if __name__ == "__main__": diff --git a/python/paddle/v2/framework/tests/test_sigmoid_op.py b/python/paddle/v2/framework/tests/test_sigmoid_op.py index 50044a122f..2610bcf163 100644 --- a/python/paddle/v2/framework/tests/test_sigmoid_op.py +++ b/python/paddle/v2/framework/tests/test_sigmoid_op.py @@ -8,8 +8,8 @@ class TestSigmoidOp(unittest.TestCase): def setUp(self): self.type = "sigmoid" - self.X = np.random.random((32, 100)).astype("float32") - self.Y = 1 / (1 + np.exp(-self.X)) + self.inputs = {'X': np.random.random((32, 100)).astype("float32")} + self.outputs = {'Y': 1 / (1 + np.exp(-self.inputs['X']))} if __name__ == '__main__': diff --git a/python/paddle/v2/framework/tests/test_softmax_op.py b/python/paddle/v2/framework/tests/test_softmax_op.py index c808881287..98ca8ddc86 100644 --- a/python/paddle/v2/framework/tests/test_softmax_op.py +++ b/python/paddle/v2/framework/tests/test_softmax_op.py @@ -19,8 +19,10 @@ class TestSoftmaxOp(unittest.TestCase): def setUp(self): self.type = "softmax" - self.X = np.random.random((32, 100)).astype("float32") - self.Y = np.apply_along_axis(stable_softmax, 1, self.X) + self.inputs = {'X': np.random.random((32, 100)).astype("float32")} + self.outputs = { + 'Y': np.apply_along_axis(stable_softmax, 1, self.inputs['X']) + } class TestSoftmaxGradOp(unittest.TestCase): -- GitLab