From 7d10edc5eed5fa85d84370fe0e98e99ae3b17aef Mon Sep 17 00:00:00 2001 From: zhongpu <2013000149@qq.com> Date: Thu, 9 Jan 2020 11:24:31 +0800 Subject: [PATCH] add clear_gradients for Optimizer and add clear_gradients api description (#21948) * add clear_gradients for Optimizer, add api description, test=develop * fix optest for optimizer's clear_gradient interface, test=develop * add sample code, test=develop * polish sample code, test=develop --- python/paddle/fluid/dygraph/layers.py | 24 ++++++++++++++ python/paddle/fluid/optimizer.py | 31 +++++++++++++++++++ ..._imperative_lod_tensor_to_selected_rows.py | 2 +- ..._imperative_selected_rows_to_lod_tensor.py | 2 +- 4 files changed, 57 insertions(+), 2 deletions(-) diff --git a/python/paddle/fluid/dygraph/layers.py b/python/paddle/fluid/dygraph/layers.py index 6592029753..20bfa2aea3 100644 --- a/python/paddle/fluid/dygraph/layers.py +++ b/python/paddle/fluid/dygraph/layers.py @@ -173,6 +173,30 @@ class Layer(core.Layer): return ret def clear_gradients(self): + """ + Clear the gradients of all parameters for this layer. + + Returns: + None + + Examples: + .. code-block:: python + + import paddle.fluid as fluid + import numpy as np + + with fluid.dygraph.guard(): + value = np.arange(26).reshape(2, 13).astype("float32") + a = fluid.dygraph.to_variable(value) + linear = fluid.Linear(13, 5, dtype="float32") + adam = fluid.optimizer.Adam(learning_rate=0.01, + parameter_list=linear.parameters()) + out = linear(a) + out.backward() + adam.minimize(out) + linear.clear_gradients() + + """ for p in self.parameters(): if p.trainable: p.clear_gradient() diff --git a/python/paddle/fluid/optimizer.py b/python/paddle/fluid/optimizer.py index caa665759a..376985d257 100644 --- a/python/paddle/fluid/optimizer.py +++ b/python/paddle/fluid/optimizer.py @@ -662,6 +662,37 @@ class Optimizer(object): return no_grad_set + @framework.dygraph_only + def clear_gradients(self): + """ + Clear the gradients of all optimized parameters for model. + + Returns: + None + + Examples: + .. code-block:: python + + import paddle.fluid as fluid + import numpy as np + + with fluid.dygraph.guard(): + value = np.arange(26).reshape(2, 13).astype("float32") + a = fluid.dygraph.to_variable(value) + linear = fluid.Linear(13, 5, dtype="float32") + # This can be any optimizer supported by dygraph. + adam = fluid.optimizer.Adam(learning_rate = 0.01, + parameter_list = linear.parameters()) + out = linear(a) + out.backward() + adam.minimize(out) + adam.clear_gradients() + + """ + for p in self._parameter_list: + if p.trainable: + p.clear_gradient() + @imperative_base.no_grad def minimize(self, loss, diff --git a/python/paddle/fluid/tests/unittests/test_imperative_lod_tensor_to_selected_rows.py b/python/paddle/fluid/tests/unittests/test_imperative_lod_tensor_to_selected_rows.py index c7a9e202e1..477b3be76f 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_lod_tensor_to_selected_rows.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_lod_tensor_to_selected_rows.py @@ -131,7 +131,7 @@ class TestDygraphSimpleNet(unittest.TestCase): dy_param_init[param.name] = param.numpy() dy_loss.backward(backward_strategy) sgd.minimize(dy_loss) - simple_net.clear_gradients() + sgd.clear_gradients() if i == batch_num - 1: for param in simple_net.parameters(): dy_param_updated[param.name] = param.numpy() diff --git a/python/paddle/fluid/tests/unittests/test_imperative_selected_rows_to_lod_tensor.py b/python/paddle/fluid/tests/unittests/test_imperative_selected_rows_to_lod_tensor.py index 3db655b788..a42a62019b 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_selected_rows_to_lod_tensor.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_selected_rows_to_lod_tensor.py @@ -137,7 +137,7 @@ class TestDygraphSimpleNet(unittest.TestCase): dy_param_init[param.name] = param.numpy() dy_loss.backward(backward_strategy) sgd.minimize(dy_loss) - simple_net.clear_gradients() + sgd.clear_gradients() if i == batch_num - 1: for param in simple_net.parameters(): dy_param_updated[param.name] = param.numpy() -- GitLab