From c7cca0a6b08f8471c2c9c1847722f541e62c5208 Mon Sep 17 00:00:00 2001 From: minqiyang Date: Tue, 9 Apr 2019 20:07:51 +0800 Subject: [PATCH] Fix auto growth bug of optimizer in dygraph mode test=release/1.4 --- python/paddle/fluid/optimizer.py | 29 ++++++++++++++++++++--------- 1 file changed, 20 insertions(+), 9 deletions(-) diff --git a/python/paddle/fluid/optimizer.py b/python/paddle/fluid/optimizer.py index 79accabe878..f3e35fdcab5 100644 --- a/python/paddle/fluid/optimizer.py +++ b/python/paddle/fluid/optimizer.py @@ -275,15 +275,26 @@ class Optimizer(object): self._create_global_learning_rate() optimize_ops = [] - for param_and_grad in parameters_and_grads: - if param_and_grad[1] is None: - continue - with param_and_grad[0].block.program._optimized_guard( - param_and_grad), name_scope("optimizer"): - if param_and_grad[0].trainable is True: - optimize_op = self._append_optimize_op(global_block, - param_and_grad) - optimize_ops.append(optimize_op) + if framework._in_dygraph_mode(): + for param_and_grad in parameters_and_grads: + if param_and_grad[1] is None: + continue + with param_and_grad[0].block.program._optimized_guard( + param_and_grad): + if param_and_grad[0].trainable is True: + optimize_op = self._append_optimize_op(global_block, + param_and_grad) + optimize_ops.append(optimize_op) + else: + for param_and_grad in parameters_and_grads: + if param_and_grad[1] is None: + continue + with param_and_grad[0].block.program._optimized_guard( + param_and_grad), name_scope("optimizer"): + if param_and_grad[0].trainable is True: + optimize_op = self._append_optimize_op(global_block, + param_and_grad) + optimize_ops.append(optimize_op) # Get custom finish ops for subclasses # FIXME: Need to fix this once we figure out how to handle dependencies -- GitLab