From 4bd64781ca8dbe8c3e122ede00dd8adb5544cf7c Mon Sep 17 00:00:00 2001 From: tangwei12 Date: Sat, 14 Jul 2018 18:25:44 +0800 Subject: [PATCH] fix distribued transpile when slice_var_up=False --- python/paddle/fluid/trainer.py | 5 ++++- python/paddle/fluid/transpiler/distribute_transpiler.py | 2 -- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/python/paddle/fluid/trainer.py b/python/paddle/fluid/trainer.py index 64049a93cb0..81cf8011699 100644 --- a/python/paddle/fluid/trainer.py +++ b/python/paddle/fluid/trainer.py @@ -349,7 +349,10 @@ class Trainer(object): with self._prog_and_scope_guard(): t = distribute_transpiler.DistributeTranspiler() t.transpile( - self.trainer_id, pservers=pserver_endpoints, trainers=trainers) + self.trainer_id, + pservers=pserver_endpoints, + trainers=trainers, + slice_var_up=False) if training_role == "PSERVER": if self.checkpoint_cfg: pserver_id = eplist.index(current_endpoint) diff --git a/python/paddle/fluid/transpiler/distribute_transpiler.py b/python/paddle/fluid/transpiler/distribute_transpiler.py index 121c36e4773..416d03a4d3f 100644 --- a/python/paddle/fluid/transpiler/distribute_transpiler.py +++ b/python/paddle/fluid/transpiler/distribute_transpiler.py @@ -196,8 +196,6 @@ class DistributeTranspiler(object): # fc_b@GRAD_trainer_0, fc_b@GRAD_trainer_1 --> pserver2 # shuffle the map will avoid the uneven distribution above grad_var_mapping_items = self.grad_var_mapping.items() - if not slice_var_up: - np.random.shuffle(grad_var_mapping_items) for orig_varname, splited_vars in grad_var_mapping_items: eplist = ps_dispatcher.dispatch(splited_vars) -- GitLab