diff --git a/python/paddle/distributed/passes/ps_trainer_pass.py b/python/paddle/distributed/passes/ps_trainer_pass.py index cdb377a72be02ded3f36b5b2140821e100bc9412..466730ae1a56f633f24cfe8fb374a17e88db1af8 100755 --- a/python/paddle/distributed/passes/ps_trainer_pass.py +++ b/python/paddle/distributed/passes/ps_trainer_pass.py @@ -617,6 +617,8 @@ class DeleteExtraOptimizerPass(PassBase): for var in remote_optimize_vars: if var in local_optimize_vars: continue + if 'learning_rate_0' == var: + continue if var not in remote_optimize_op_role_vars: optimize_need_delete_vars.append(var) need_delete_optimize_vars = list(set(optimize_need_delete_vars)) diff --git a/python/paddle/distributed/ps/utils/ps_program_builder.py b/python/paddle/distributed/ps/utils/ps_program_builder.py index 53771b05cbf671277ec4252d78e08527a9b36bec..0bd870ffee5d947bc57b60f5812712047a2bc35c 100755 --- a/python/paddle/distributed/ps/utils/ps_program_builder.py +++ b/python/paddle/distributed/ps/utils/ps_program_builder.py @@ -373,8 +373,8 @@ class FlPsProgramBuilder(HeterAsyncPsProgramBuilder): _main_file = ps_log_root_dir + '4_fl_worker_main_program.prototxt' #debug_program(_main_file, self.cloned_main) - fake_init_ops_pass = new_pass("fake_init_ops_pass", self.attrs) - fake_init_ops_pass.apply([None], [self.cloned_startup], self.pass_ctx) + #fake_init_ops_pass = new_pass("fake_init_ops_pass", self.attrs) + #fake_init_ops_pass.apply([None], [self.cloned_startup], self.pass_ctx) _main_file = ps_log_root_dir + '5_fl_worker_main_program.prototxt' #debug_program(_main_file, self.cloned_main)