From 4fdd114d34086726df7c1d0f17be9b3b042d8ef7 Mon Sep 17 00:00:00 2001 From: qiaolongfei Date: Thu, 8 Mar 2018 15:56:50 +0800 Subject: [PATCH] a little optimize of optimizer --- python/paddle/fluid/optimizer.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/python/paddle/fluid/optimizer.py b/python/paddle/fluid/optimizer.py index 1c12d53e4f..421963a2f9 100644 --- a/python/paddle/fluid/optimizer.py +++ b/python/paddle/fluid/optimizer.py @@ -92,7 +92,10 @@ class Optimizer(object): # create learning rate variable for every parameter param = param_and_grad[0] param_lr = param.optimize_attr['learning_rate'] - return self.global_learning_rate() * param_lr + if param_lr == 1.0: + return self.global_learning_rate() + else: + return self.global_learning_rate() * param_lr def _create_accumulators(self, block, parameters): """Create all accumulators needed by the parameters -- GitLab