From 0fe2001a883f8307441a1bed8d2ab34f459b15d3 Mon Sep 17 00:00:00 2001 From: Leo Chen Date: Sat, 2 Apr 2022 10:53:19 +0800 Subject: [PATCH] make variable 'gradient_merge_cond' local (#41262) --- .../fleet/meta_optimizers/sharding_optimizer.py | 9 ++------- .../distributed/passes/auto_parallel_gradient_merge.py | 9 ++------- python/paddle/fluid/optimizer.py | 9 ++------- 3 files changed, 6 insertions(+), 21 deletions(-) diff --git a/python/paddle/distributed/fleet/meta_optimizers/sharding_optimizer.py b/python/paddle/distributed/fleet/meta_optimizers/sharding_optimizer.py index 52468ab533..c4d42f9061 100755 --- a/python/paddle/distributed/fleet/meta_optimizers/sharding_optimizer.py +++ b/python/paddle/distributed/fleet/meta_optimizers/sharding_optimizer.py @@ -1621,13 +1621,8 @@ class ShardingOptimizer(MetaOptimizerBase): persistable=True, force_cpu=True) - cond_var = layers.create_global_var( - name="gradient_merge_cond", - shape=[1], - value=bool(0), - dtype='bool', - persistable=False, - force_cpu=True) + cond_var = main_block.create_var( + name="gradient_merge_cond", shape=[1], dtype='bool') with device_guard("cpu"): # step_var = (step_var + 1) % k_step diff --git a/python/paddle/distributed/passes/auto_parallel_gradient_merge.py b/python/paddle/distributed/passes/auto_parallel_gradient_merge.py index 7668dff362..accac81133 100644 --- a/python/paddle/distributed/passes/auto_parallel_gradient_merge.py +++ b/python/paddle/distributed/passes/auto_parallel_gradient_merge.py @@ -107,13 +107,8 @@ def _get_gm_cond_var(main_program, k_steps, dist_context): force_cpu=True) set_var_dist_attr(dist_context, step_var, [-1], world_process_group.ranks) - cond_var = layers.create_global_var( - name="gradient_merge_cond", - shape=[1], - value=bool(0), - dtype='bool', - persistable=False, - force_cpu=True) + cond_var = main_block.create_var( + name="gradient_merge_cond", shape=[1], dtype='bool') set_var_dist_attr(dist_context, cond_var, [-1], world_process_group.ranks) with device_guard("cpu"): diff --git a/python/paddle/fluid/optimizer.py b/python/paddle/fluid/optimizer.py index 7bf4608de8..8242d8e339 100755 --- a/python/paddle/fluid/optimizer.py +++ b/python/paddle/fluid/optimizer.py @@ -7098,13 +7098,8 @@ class GradientMergeOptimizer(object): persistable=True, force_cpu=True) - cond_var = layers.create_global_var( - name="gradient_merge_cond", - shape=[1], - value=bool(0), - dtype='bool', - persistable=False, - force_cpu=True) + cond_var = main_block.create_var( + name="gradient_merge_cond", shape=[1], dtype='bool') with device_guard("cpu"): # step_var = (step_var + 1) % k_step -- GitLab