From 353c3f512b860681c1d0cb3224a8adeec15fc3c1 Mon Sep 17 00:00:00 2001 From: gaotingquan Date: Mon, 7 Aug 2023 02:54:44 +0000 Subject: [PATCH] debug: when using Piecewise.learning_rate while total epochs < 30 --- ppcls/optimizer/learning_rate.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/ppcls/optimizer/learning_rate.py b/ppcls/optimizer/learning_rate.py index f352fd00..acf48219 100644 --- a/ppcls/optimizer/learning_rate.py +++ b/ppcls/optimizer/learning_rate.py @@ -255,7 +255,7 @@ class Cosine(LRBase): class Cyclic(LRBase): """Cyclic learning rate decay - + Args: epochs (int): Total epoch(s). step_each_epoch (int): Number of iterations within an epoch. @@ -406,11 +406,17 @@ class Piecewise(LRBase): learning_rate=None, **kwargs): if learning_rate: - decay_epochs = list(range(0, epochs, 30))[1:] + decay_epochs = list(range(0, epochs, 30)) values = [ - learning_rate * (0.1**i) - for i in range(len(decay_epochs) + 1) + learning_rate * (0.1**i) for i in range(len(decay_epochs)) ] + # when total epochs < 30, decay_epochs and values should be + # [] and [lr] respectively, but paddle dont support. + if len(decay_epochs) == 1: + decay_epochs = [epochs] + values = [values[0], values[0]] + else: + decay_epochs = decay_epochs[1:] logger.warning( "When 'learning_rate' of Piecewise has beed set, " "the learning rate scheduler would be set by the rule that lr decay 10 times every 30 epochs. " -- GitLab