diff --git a/ppcls/optimizer/learning_rate.py b/ppcls/optimizer/learning_rate.py index f352fd00dce94539f7d504c78e56c4e399fb4c2d..acf48219826a9224349c3ff18c0315ac1917c621 100644 --- a/ppcls/optimizer/learning_rate.py +++ b/ppcls/optimizer/learning_rate.py @@ -255,7 +255,7 @@ class Cosine(LRBase): class Cyclic(LRBase): """Cyclic learning rate decay - + Args: epochs (int): Total epoch(s). step_each_epoch (int): Number of iterations within an epoch. @@ -406,11 +406,17 @@ class Piecewise(LRBase): learning_rate=None, **kwargs): if learning_rate: - decay_epochs = list(range(0, epochs, 30))[1:] + decay_epochs = list(range(0, epochs, 30)) values = [ - learning_rate * (0.1**i) - for i in range(len(decay_epochs) + 1) + learning_rate * (0.1**i) for i in range(len(decay_epochs)) ] + # when total epochs < 30, decay_epochs and values should be + # [] and [lr] respectively, but paddle dont support. + if len(decay_epochs) == 1: + decay_epochs = [epochs] + values = [values[0], values[0]] + else: + decay_epochs = decay_epochs[1:] logger.warning( "When 'learning_rate' of Piecewise has beed set, " "the learning rate scheduler would be set by the rule that lr decay 10 times every 30 epochs. "