diff --git a/paddle/fluid/framework/heter_pipeline_trainer.cc b/paddle/fluid/framework/heter_pipeline_trainer.cc old mode 100644 new mode 100755 index dc99885811c2b6fa929073c145c932ad59a4f211..98860cfbb0bec0ca022f16a07a45663e9697b2b1 --- a/paddle/fluid/framework/heter_pipeline_trainer.cc +++ b/paddle/fluid/framework/heter_pipeline_trainer.cc @@ -333,5 +333,5 @@ Scope* HeterPipelineTrainer::GetWorkerScope(int thread_id) { } } // end namespace framework -} // namespace paddle +} // end namespace paddle #endif diff --git a/python/paddle/distributed/passes/ps_trainer_pass.py b/python/paddle/distributed/passes/ps_trainer_pass.py index 4a015fea30a1dd575da4afab49542e9682e63ad2..80012e7428128d1913bdfe848a3b880ad61386b4 100755 --- a/python/paddle/distributed/passes/ps_trainer_pass.py +++ b/python/paddle/distributed/passes/ps_trainer_pass.py @@ -434,8 +434,8 @@ class DistributedOpsPass(PassBase): if op.type in SPARSE_OP_TYPE_DICT.keys() \ and op.attr('remote_prefetch') is True: param_name = op.input(SPARSE_OP_TYPE_DICT[op.type])[0] - if attrs['is_heter_ps_mode']: - # trick for matchnet, need to modify + if attrs['is_heter_ps_mode'] and not attrs['is_fl_ps_mode']: + # TODO: trick for matchnet, need to modify for heter_ps param_name += op.input("Ids")[0][0] ops = pull_sparse_ops.get(param_name, []) ops.append(op) diff --git a/python/paddle/distributed/ps/the_one_ps.py b/python/paddle/distributed/ps/the_one_ps.py index 0836e91c307cea50802494509875fc643b2bde33..a199901011493311e17113cd0b27638f0538ca05 100755 --- a/python/paddle/distributed/ps/the_one_ps.py +++ b/python/paddle/distributed/ps/the_one_ps.py @@ -1015,14 +1015,8 @@ class TheOnePSRuntime(RuntimeBase): is_test = bool(int(os.getenv("TEST_MODE", "0"))) - # for GEO - if self.role_maker._is_first_worker() and self.is_heter_ps_mode: - # for ps-heter mode load all parameters on first_worker - init_params = get_the_one_recv_context(self.context, - split_dense_table=True, - use_origin_program=True) - else: - init_params = dense_map + # for GEO & heter_ps + init_params = dense_map # if not is_test: # self._communicator.init_params(init_params) @@ -1053,11 +1047,7 @@ class TheOnePSRuntime(RuntimeBase): fleet.util.barrier() # 保证 0 号 worker 参数 push_dense_param over if not self.context['use_ps_gpu']: - if self.is_heter_ps_mode == True and not self.role_maker._is_first_worker( - ): - self._communicator.pull_dense(init_params) - else: - self._pull_all_dense(scopes, send_ctx, dense_map) + self._pull_all_dense(scopes, send_ctx, dense_map) fleet.util.barrier() if self.context[