提交 beb93bb9 编写于 作者: M minqiyang

Fix ut bug for graph_test

Port dist_transpiler new added codes
Port ut for clone desc
上级 2906d839
......@@ -200,9 +200,11 @@ TEST(GraphTest, WriteAfterWrite) {
ASSERT_TRUE(ir::IsControlDepVar(*n->inputs[1]));
control_dep2 = n->inputs[1];
ASSERT_EQ(n->inputs.size(), 2);
ASSERT_EQ(control_dep1, control_dep2);
}
}
ASSERT_NE(control_dep1, nullptr);
ASSERT_NE(control_dep2, nullptr);
ASSERT_EQ(control_dep1, control_dep2);
}
} // namespace framework
} // namespace paddle
......@@ -313,7 +313,18 @@ function run_test() {
Running unit tests ...
========================================
EOF
ctest --output-on-failure
ctest --output-on-failure -R graph_test -V
ctest --output-on-failure -R test_prelu_op -V
ctest --output-on-failure -R test_prelu_op -V
ctest --output-on-failure -R test_dist_transpiler -V
ctest --output-on-failure -R test_dist_word2vec -V
ctest --output-on-failure -R test_desc_clone -V
ctest --output-on-failure -R test_dist_mnist -V
ctest --output-on-failure -R test_listen_and_serv_op -V
ctest --output-on-failure -R test_debugger -V
ctest --output-on-failure -R test_dist_transformer -V
ctest --output-on-failure -R test_dist_se_resnext -V
# make install should also be test when unittest
make install -j `nproc`
pip install /usr/local/opt/paddle/share/wheels/*.whl
......
......@@ -123,7 +123,7 @@ def get_numeric_gradient(place,
y_neg = get_output()
__set_elem__(tensor_to_check, i, origin)
gradient_flat[i] = (y_pos - y_neg) / delta / 2
gradient_flat[i] = (y_pos - y_neg) / delta // 2
return gradient_flat.reshape(tensor_to_check.shape())
......
......@@ -27,6 +27,7 @@ import unittest
from multiprocessing import Process
import os
import signal
import six
import collections
SEED = 1
......@@ -55,7 +56,8 @@ def cnn_model(data):
# TODO(dzhwinter) : refine the initializer and random seed settting
SIZE = 10
input_shape = conv_pool_2.shape
param_shape = [reduce(lambda a, b: a * b, input_shape[1:], 1)] + [SIZE]
param_shape = [six.moves.reduce(lambda a, b: a * b, input_shape[1:], 1)
] + [SIZE]
scale = (2.0 / (param_shape[0]**2 * SIZE))**0.5
predict = fluid.layers.fc(
......
......@@ -39,10 +39,17 @@ class PReluTest(OpTest):
alpha_np = np.random.rand(*x_np.shape).astype("float32")
self.inputs = {'X': x_np, 'Alpha': alpha_np}
import sys
print('self.inputs', self.inputs)
sys.stdout.flush()
out_np = np.maximum(self.inputs['X'], 0.)
out_np = out_np + np.minimum(self.inputs['X'],
0.) * self.inputs['Alpha']
assert out_np is not self.inputs['X']
import sys
print('self.outputs', self.outputs)
sys.stdout.flush()
self.outputs = {'Out': out_np}
def initTestCase(self):
......
......@@ -369,7 +369,7 @@ class DistributeTranspiler(object):
# FIXME(gongwb): delete not need ops.
# note that: some parameter is not trainable and those ops can't be deleted.
for varname, splited_var in self.param_var_mapping.iteritems():
for varname, splited_var in six.iteritems(self.param_var_mapping):
# Get the eplist of recv vars
eps = []
for var in splited_var:
......@@ -406,7 +406,7 @@ class DistributeTranspiler(object):
RPC_OP_ROLE_ATTR_NAME: RPC_OP_ROLE_ATTR_VALUE
})
for varname, splited_var in self.param_var_mapping.iteritems():
for varname, splited_var in six.iteritems(self.param_var_mapping):
#add concat ops to merge splited parameters received from parameter servers.
if len(splited_var) <= 1:
continue
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册