From beb93bb901c85efeeff99b2a643988ad4626366e Mon Sep 17 00:00:00 2001 From: minqiyang Date: Mon, 20 Aug 2018 22:17:01 +0800 Subject: [PATCH] Fix ut bug for graph_test Port dist_transpiler new added codes Port ut for clone desc --- paddle/fluid/framework/ir/graph_test.cc | 4 +++- paddle/scripts/paddle_build.sh | 13 ++++++++++++- python/paddle/fluid/tests/unittests/op_test.py | 2 +- .../paddle/fluid/tests/unittests/test_desc_clone.py | 4 +++- .../paddle/fluid/tests/unittests/test_prelu_op.py | 7 +++++++ .../fluid/transpiler/distribute_transpiler.py | 4 ++-- 6 files changed, 28 insertions(+), 6 deletions(-) diff --git a/paddle/fluid/framework/ir/graph_test.cc b/paddle/fluid/framework/ir/graph_test.cc index b1b8d1c586..cadda49c39 100644 --- a/paddle/fluid/framework/ir/graph_test.cc +++ b/paddle/fluid/framework/ir/graph_test.cc @@ -200,9 +200,11 @@ TEST(GraphTest, WriteAfterWrite) { ASSERT_TRUE(ir::IsControlDepVar(*n->inputs[1])); control_dep2 = n->inputs[1]; ASSERT_EQ(n->inputs.size(), 2); - ASSERT_EQ(control_dep1, control_dep2); } } + ASSERT_NE(control_dep1, nullptr); + ASSERT_NE(control_dep2, nullptr); + ASSERT_EQ(control_dep1, control_dep2); } } // namespace framework } // namespace paddle diff --git a/paddle/scripts/paddle_build.sh b/paddle/scripts/paddle_build.sh index 8460f93b84..752c20fa0f 100755 --- a/paddle/scripts/paddle_build.sh +++ b/paddle/scripts/paddle_build.sh @@ -313,7 +313,18 @@ function run_test() { Running unit tests ... ======================================== EOF - ctest --output-on-failure + ctest --output-on-failure -R graph_test -V + ctest --output-on-failure -R test_prelu_op -V + ctest --output-on-failure -R test_prelu_op -V + ctest --output-on-failure -R test_dist_transpiler -V + ctest --output-on-failure -R test_dist_word2vec -V + ctest --output-on-failure -R test_desc_clone -V + ctest --output-on-failure -R test_dist_mnist -V + ctest --output-on-failure -R test_listen_and_serv_op -V + ctest --output-on-failure -R test_debugger -V + ctest --output-on-failure -R test_dist_transformer -V + ctest --output-on-failure -R test_dist_se_resnext -V + # make install should also be test when unittest make install -j `nproc` pip install /usr/local/opt/paddle/share/wheels/*.whl diff --git a/python/paddle/fluid/tests/unittests/op_test.py b/python/paddle/fluid/tests/unittests/op_test.py index 972e44c952..511f88bd30 100644 --- a/python/paddle/fluid/tests/unittests/op_test.py +++ b/python/paddle/fluid/tests/unittests/op_test.py @@ -123,7 +123,7 @@ def get_numeric_gradient(place, y_neg = get_output() __set_elem__(tensor_to_check, i, origin) - gradient_flat[i] = (y_pos - y_neg) / delta / 2 + gradient_flat[i] = (y_pos - y_neg) / delta // 2 return gradient_flat.reshape(tensor_to_check.shape()) diff --git a/python/paddle/fluid/tests/unittests/test_desc_clone.py b/python/paddle/fluid/tests/unittests/test_desc_clone.py index 88d44e453c..aca2911482 100644 --- a/python/paddle/fluid/tests/unittests/test_desc_clone.py +++ b/python/paddle/fluid/tests/unittests/test_desc_clone.py @@ -27,6 +27,7 @@ import unittest from multiprocessing import Process import os import signal +import six import collections SEED = 1 @@ -55,7 +56,8 @@ def cnn_model(data): # TODO(dzhwinter) : refine the initializer and random seed settting SIZE = 10 input_shape = conv_pool_2.shape - param_shape = [reduce(lambda a, b: a * b, input_shape[1:], 1)] + [SIZE] + param_shape = [six.moves.reduce(lambda a, b: a * b, input_shape[1:], 1) + ] + [SIZE] scale = (2.0 / (param_shape[0]**2 * SIZE))**0.5 predict = fluid.layers.fc( diff --git a/python/paddle/fluid/tests/unittests/test_prelu_op.py b/python/paddle/fluid/tests/unittests/test_prelu_op.py index 979be5af3b..3f00655384 100644 --- a/python/paddle/fluid/tests/unittests/test_prelu_op.py +++ b/python/paddle/fluid/tests/unittests/test_prelu_op.py @@ -39,10 +39,17 @@ class PReluTest(OpTest): alpha_np = np.random.rand(*x_np.shape).astype("float32") self.inputs = {'X': x_np, 'Alpha': alpha_np} + import sys + print('self.inputs', self.inputs) + sys.stdout.flush() + out_np = np.maximum(self.inputs['X'], 0.) out_np = out_np + np.minimum(self.inputs['X'], 0.) * self.inputs['Alpha'] assert out_np is not self.inputs['X'] + import sys + print('self.outputs', self.outputs) + sys.stdout.flush() self.outputs = {'Out': out_np} def initTestCase(self): diff --git a/python/paddle/fluid/transpiler/distribute_transpiler.py b/python/paddle/fluid/transpiler/distribute_transpiler.py index 57bc2e8a0b..69c0fbbfe0 100644 --- a/python/paddle/fluid/transpiler/distribute_transpiler.py +++ b/python/paddle/fluid/transpiler/distribute_transpiler.py @@ -369,7 +369,7 @@ class DistributeTranspiler(object): # FIXME(gongwb): delete not need ops. # note that: some parameter is not trainable and those ops can't be deleted. - for varname, splited_var in self.param_var_mapping.iteritems(): + for varname, splited_var in six.iteritems(self.param_var_mapping): # Get the eplist of recv vars eps = [] for var in splited_var: @@ -406,7 +406,7 @@ class DistributeTranspiler(object): RPC_OP_ROLE_ATTR_NAME: RPC_OP_ROLE_ATTR_VALUE }) - for varname, splited_var in self.param_var_mapping.iteritems(): + for varname, splited_var in six.iteritems(self.param_var_mapping): #add concat ops to merge splited parameters received from parameter servers. if len(splited_var) <= 1: continue -- GitLab