提交 b920d2c2 编写于 作者: Y yuyang18

enable serial tests

上级 1b69c25c
......@@ -108,10 +108,7 @@ void StartServer(const std::string& endpoint) {
rpc_service_->RunSyncUpdate();
}
// NOTE(yuyang18) : This test is buggy.
// 1. We should not use port 8889 before check.
// 2. We should not use sleep(2) to sync threads.
TEST(PREFETCH, DISABLED_CPU) {
TEST(PREFETCH, CPU) {
// start up a server instance backend
std::thread server_thread(StartServer, "127.0.0.1:8889");
sleep(2);
......
......@@ -151,7 +151,7 @@ void StartServerNet(bool is_sparse, std::atomic<bool> *initialized) {
LOG(INFO) << "server exit";
}
TEST(SendRecvOp, DISABLED_CPUDense) {
TEST(SendRecvOp, CPUDense) {
std::atomic<bool> initialized{false};
std::thread server_thread(StartServerNet, false, &initialized);
while (!initialized) {
......@@ -197,7 +197,7 @@ TEST(SendRecvOp, DISABLED_CPUDense) {
paddle::operators::ListenAndServOp::ResetPort();
}
TEST(SendRecvOp, DISABLED_CPUSparse) {
TEST(SendRecvOp, CPUSparse) {
std::atomic<bool> initialized;
initialized = false;
std::thread server_thread(StartServerNet, true, &initialized);
......
......@@ -63,7 +63,7 @@ void StartServer(std::atomic<bool>* initialized) {
server_thread.join();
}
TEST(SendNcclId, DISABLED_Normal) {
TEST(SendNcclId, Normal) {
std::atomic<bool> initialized{false};
std::thread server_thread(StartServer, &initialized);
while (!initialized) {
......
......@@ -41,8 +41,10 @@ function(py_test_modules TARGET_NAME)
endfunction()
list(REMOVE_ITEM TEST_OPS test_warpctc_op)
list(REMOVE_ITEM TEST_OPS test_dist_train)
list(REMOVE_ITEM TEST_OPS test_parallel_executor_crf)
foreach(TEST_OP ${TEST_OPS})
py_test_modules(${TEST_OP} MODULES ${TEST_OP})
endforeach(TEST_OP)
py_test_modules(test_warpctc_op MODULES test_warpctc_op ENVS FLAGS_warpctc_dir=${WARPCTC_LIB_DIR} SERIAL)
py_test_modules(test_dist_train MODULES test_dist_train SERIAL)
py_test_modules(test_parallel_executor_crf MODULES test_parallel_executor_crf SERIAL)
......@@ -168,28 +168,24 @@ class TestCRFModel(unittest.TestCase):
pe.run(feed=feeder.feed(cur_batch),
fetch_list=[avg_cost.name]))[0]
@unittest.skip("Hang when parallel execute")
def test_update_sparse_parameter_all_reduce(self):
build_strategy = fluid.BuildStrategy()
build_strategy.reduce_strategy = fluid.BuildStrategy.ReduceStrategy.AllReduce
self.check_network_convergence(
is_sparse=True, build_strategy=build_strategy)
@unittest.skip("Hang when parallel execute")
def test_update_dense_parameter_all_reduce(self):
build_strategy = fluid.BuildStrategy()
build_strategy.reduce_strategy = fluid.BuildStrategy.ReduceStrategy.AllReduce
self.check_network_convergence(
is_sparse=False, build_strategy=build_strategy)
@unittest.skip("Hang when parallel execute")
def test_update_sparse_parameter_reduce(self):
build_strategy = fluid.BuildStrategy()
build_strategy.reduce_strategy = fluid.BuildStrategy.ReduceStrategy.Reduce
self.check_network_convergence(
is_sparse=True, build_strategy=build_strategy)
@unittest.skip("Hang wen parallel execute")
def test_update_dense_parameter_reduce(self):
build_strategy = fluid.BuildStrategy()
build_strategy.reduce_strategy = fluid.BuildStrategy.ReduceStrategy.Reduce
......
......@@ -210,11 +210,9 @@ class TestWarpCTCOp(OpTest):
self.outputs = {"Loss": loss}
self.attrs = {"blank": self.blank, "norm_by_times": self.norm_by_times}
@unittest.skip("This unittest could be hang")
def test_check_output(self):
self.check_output()
@unittest.skip("This unittest could be hang")
def test_check_grad(self):
self.outputs['WarpCTCGrad'] = self.gradient
self.check_grad(["Logits"], "Loss", max_relative_error=0.007)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册