From f8141d90c845c71cda03df10649b0dfc747f2c1a Mon Sep 17 00:00:00 2001 From: Yu Yang Date: Mon, 19 Mar 2018 15:16:40 +0800 Subject: [PATCH] Debug --- paddle/fluid/framework/parallel_executor.cc | 1 + .../tests/unittests/test_parallel_executor.py | 18 +++++++++--------- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/paddle/fluid/framework/parallel_executor.cc b/paddle/fluid/framework/parallel_executor.cc index b3bf2b8fb..c42101e21 100644 --- a/paddle/fluid/framework/parallel_executor.cc +++ b/paddle/fluid/framework/parallel_executor.cc @@ -345,6 +345,7 @@ struct NCCLAllReduceOpHandle : public OpHandle { } void Wait(platform::DeviceContext *waited_dev) override { + VLOG(3) << "Wait NCCL AllReduce"; this->dev_ctx_.at(waited_dev->GetPlace())->Wait(); } }; diff --git a/python/paddle/fluid/tests/unittests/test_parallel_executor.py b/python/paddle/fluid/tests/unittests/test_parallel_executor.py index e8976ff05..e156d5b60 100644 --- a/python/paddle/fluid/tests/unittests/test_parallel_executor.py +++ b/python/paddle/fluid/tests/unittests/test_parallel_executor.py @@ -72,12 +72,12 @@ class ParallelExecutor(unittest.TestCase): first_loss = numpy.array(fluid.global_scope().find_var('fetched_var') .get_lod_tensor_array()[0]) print first_loss - # - # for i in xrange(10): - # exe.run([], 'fetched_var') - # exe.run([loss.name], 'fetched_var') - # last_loss = numpy.array(fluid.global_scope().find_var('fetched_var') - # .get_lod_tensor_array()[0]) - # - # print first_loss, last_loss - # self.assertGreater(first_loss[0], last_loss[0]) + + for i in xrange(10): + exe.run([], 'fetched_var') + exe.run([loss.name], 'fetched_var') + last_loss = numpy.array(fluid.global_scope().find_var('fetched_var') + .get_lod_tensor_array()[0]) + + print first_loss, last_loss + self.assertGreater(first_loss[0], last_loss[0]) -- GitLab