From 9e14f260c024e523ff4aee163324bf74669911d3 Mon Sep 17 00:00:00 2001 From: minqiyang Date: Thu, 28 Mar 2019 20:21:09 +0800 Subject: [PATCH] Fix polynomal decay bug in python2.x test=develop --- .../paddle/fluid/imperative/learning_rate_scheduler.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/python/paddle/fluid/imperative/learning_rate_scheduler.py b/python/paddle/fluid/imperative/learning_rate_scheduler.py index b698e620073..3209fa76d95 100644 --- a/python/paddle/fluid/imperative/learning_rate_scheduler.py +++ b/python/paddle/fluid/imperative/learning_rate_scheduler.py @@ -20,7 +20,7 @@ from .. import unique_name __all__ = [ 'NoamDecay', 'PiecewiseDecay', 'NaturalExpDecay', 'ExponentialDecay', - 'InverseTimeDecay', 'CosineDecay' + 'InverseTimeDecay', 'PolynomialDecay', 'CosineDecay' ] @@ -173,12 +173,10 @@ class PolynomialDecay(LearningRateDecay): tmp_decay_steps = self.decay_steps if self.cycle: div_res = layers.ceil( - self.create_lr_var(tmp_step_num / self.decay_steps)) - zero_var = 0.0 - one_var = 1.0 + self.create_lr_var(tmp_step_num / float(self.decay_steps))) - if float(tmp_step_num) == zero_var: - div_res = one_var + if tmp_step_num == 0: + div_res = self.create_lr_var(1.0) tmp_decay_steps = self.decay_steps * div_res else: tmp_step_num = self.create_lr_var(tmp_step_num -- GitLab