diff --git a/python/paddle/fluid/transpiler/distribute_transpiler.py b/python/paddle/fluid/transpiler/distribute_transpiler.py index 4a3bd3bef2c3b763eee411034a908edd55c4df03..343901cda3f505c3b3d2ed0c30cf7fea71c8b6b1 100644 --- a/python/paddle/fluid/transpiler/distribute_transpiler.py +++ b/python/paddle/fluid/transpiler/distribute_transpiler.py @@ -301,6 +301,7 @@ class DistributeTranspiler(object): Program: trainer side program. """ # remove optimize ops and add a send op to main_program + # FIXME(typhoonzero): Also ops like clip_gradient, lrn_decay? delete_ops(self.origin_program.global_block(), self.optimize_ops) self.origin_program.__str__() return self.origin_program @@ -537,7 +538,6 @@ class DistributeTranspiler(object): # 2. rename op outputs for op in orig_s_prog.global_block().ops: - new_inputs = dict() new_outputs = dict() # do not append startup op if var is not on this pserver op_on_pserver = False