From 5d73bfdb9860ecdeb21965da8d5585eb216e9ccb Mon Sep 17 00:00:00 2001 From: Chen Weihang Date: Wed, 21 Oct 2020 21:05:06 +0800 Subject: [PATCH] fix test_weight_decay_extend error (#28178) --- .../fluid/contrib/tests/test_weight_decay_extend.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/python/paddle/fluid/contrib/tests/test_weight_decay_extend.py b/python/paddle/fluid/contrib/tests/test_weight_decay_extend.py index 5ed7fd01a43..65d400c6326 100644 --- a/python/paddle/fluid/contrib/tests/test_weight_decay_extend.py +++ b/python/paddle/fluid/contrib/tests/test_weight_decay_extend.py @@ -149,17 +149,20 @@ class TestWeightDecay(unittest.TestCase): avg_cost = model(data, label, self.word_dict_len) + optimizer = fluid.optimizer.Adam(learning_rate=self.learning_rate) + + params_grads = optimizer.backward(avg_cost) + param_list = [(var, var * self.learning_rate) for var in main_prog.block(0).all_parameters()] - optimizer = fluid.optimizer.Adam(learning_rate=self.learning_rate) - - optimizer.minimize(avg_cost) for params in param_list: updated_p = fluid.layers.elementwise_sub( x=params[0], y=params[1]) fluid.layers.assign(input=updated_p, output=params[0]) + optimizer.apply_optimize(avg_cost, startup_prog, params_grads) + param_sum = self.run_program(place, [data, label]) return param_sum -- GitLab