From 2b7931d5c933efd91dfa3f25073a997dee3b00b7 Mon Sep 17 00:00:00 2001 From: Qiao Longfei Date: Sat, 23 Feb 2019 09:52:13 +0800 Subject: [PATCH] refine code test=develop --- paddle/fluid/framework/details/build_strategy.cc | 6 +++--- python/paddle/fluid/compiler.py | 12 ++---------- python/paddle/fluid/framework.py | 9 +++++++++ python/paddle/fluid/parallel_executor.py | 11 +---------- 4 files changed, 15 insertions(+), 23 deletions(-) diff --git a/paddle/fluid/framework/details/build_strategy.cc b/paddle/fluid/framework/details/build_strategy.cc index 010c8dee6c4..a6359402f8d 100644 --- a/paddle/fluid/framework/details/build_strategy.cc +++ b/paddle/fluid/framework/details/build_strategy.cc @@ -133,15 +133,15 @@ class ParallelExecutorPassBuilder : public ir::PassBuilder { void AppendMultiDevPass(const BuildStrategy &strategy) { ir::Pass *multi_devices_pass; if (strategy_.is_distribution_) { - VLOG(3) << "multi device dist train mode"; + VLOG(3) << "multi device parameter server mode"; multi_devices_pass = AppendPass("dist_multi_devices_pass").get(); } else { if (strategy.reduce_ == BuildStrategy::ReduceStrategy::kAllReduce) { - VLOG(3) << "multi device allreduce mode"; + VLOG(3) << "multi devices collective mode with allreduce"; multi_devices_pass = AppendPass("allreduce_mode_multi_devices_pass").get(); } else if (strategy.reduce_ == BuildStrategy::ReduceStrategy::kReduce) { - VLOG(3) << "multi device reduce mode"; + VLOG(3) << "multi deivces collective mode with reduce"; multi_devices_pass = AppendPass("reduce_mode_multi_devices_pass").get(); } else { PADDLE_THROW("Unknown reduce strategy."); diff --git a/python/paddle/fluid/compiler.py b/python/paddle/fluid/compiler.py index 2b69fd89a2c..d253f0cca8e 100644 --- a/python/paddle/fluid/compiler.py +++ b/python/paddle/fluid/compiler.py @@ -35,15 +35,6 @@ def _place_obj(place): return p -def _is_pserver_mode(main_program): - main = main_program if main_program \ - else framework.default_main_program() - for op in main.global_block().ops: - if op.type in ["send", "recv"]: - return True - return False - - class CompiledProgram(object): """ Compiles a Program for execution. @@ -120,7 +111,8 @@ class CompiledProgram(object): self._exec_strategy = ExecutionStrategy() if self._build_strategy is None: self._build_strategy = BuildStrategy() - self._build_strategy.is_distribution = _is_pserver_mode(self._program) + self._build_strategy.is_distribution = framework.is_pserver_mode( + self._program) return self def with_inference_optimize(self, config): diff --git a/python/paddle/fluid/framework.py b/python/paddle/fluid/framework.py index 832c97c7deb..162e94ec594 100644 --- a/python/paddle/fluid/framework.py +++ b/python/paddle/fluid/framework.py @@ -85,6 +85,15 @@ def _current_expected_place(): return _imperative_current_expected_place_ +def is_pserver_mode(main_program): + main = main_program if main_program \ + else default_main_program() + for op in main.global_block().ops: + if op.type in ["send", "recv"]: + return True + return False + + class NameScope(object): def __init__(self, name="", parent=None): self._children = dict() diff --git a/python/paddle/fluid/parallel_executor.py b/python/paddle/fluid/parallel_executor.py index 22212ae9a21..9bff3599a04 100644 --- a/python/paddle/fluid/parallel_executor.py +++ b/python/paddle/fluid/parallel_executor.py @@ -29,15 +29,6 @@ ExecutionStrategy = core.ParallelExecutor.ExecutionStrategy BuildStrategy = core.ParallelExecutor.BuildStrategy -def _is_pserver_mode(main_program): - main = main_program if main_program \ - else framework.default_main_program() - for op in main.global_block().ops: - if op.type in ["send", "recv"]: - return True - return False - - class ParallelExecutor(object): """ ParallelExecutor is designed for data parallelism, which focuses on distributing @@ -140,7 +131,7 @@ class ParallelExecutor(object): # FIXME(zcd): is_distribution_ is a temporary field, because in pserver mode, # num_trainers is 1, so the current fields of build_strategy doesn't tell if # it's distributed model. - build_strategy.is_distribution = _is_pserver_mode( + build_strategy.is_distribution = framework.is_pserver_mode( main_program) or num_trainers > 1 # step4: get main_program, scope, local_scopes -- GitLab