diff --git a/python/paddle/fluid/distribute_transpiler.py b/python/paddle/fluid/distribute_transpiler.py index aa15392d7e4901e8ee23ad5b4370542232adc2a5..591c22d9bd8e5846a99a971256a64e2750541383 100644 --- a/python/paddle/fluid/distribute_transpiler.py +++ b/python/paddle/fluid/distribute_transpiler.py @@ -420,13 +420,14 @@ class DistributeTranspiler: # append op to the current block per_opt_block = append_block - for _, opt_op in enumerate(opt_op_on_pserver): + for idx, opt_op in enumerate(opt_op_on_pserver): for _, op in enumerate(self.optimize_ops): # optimizer is connected to itself if ufind.is_connected(op, opt_op) and \ op not in global_ops: __append_optimize_op__(op, per_opt_block) - per_opt_block = pserver_program.create_block(append_block.idx) + if idx == len(opt_op_on_pserver) - 1 and global_ops: + per_opt_block = pserver_program.create_block(append_block.idx) # append global ops for glb_op in global_ops: