From f8271649b4057d4b8c7a26b867d337fa68021ae4 Mon Sep 17 00:00:00 2001 From: minqiyang Date: Tue, 29 Jan 2019 17:35:43 +0800 Subject: [PATCH] Add PiecewiseDecay implementation --- .../imperative/learning_rate_scheduler.py | 68 +++++++++++++++++++ .../fluid/layers/learning_rate_scheduler.py | 3 +- 2 files changed, 70 insertions(+), 1 deletion(-) create mode 100644 python/paddle/fluid/imperative/learning_rate_scheduler.py diff --git a/python/paddle/fluid/imperative/learning_rate_scheduler.py b/python/paddle/fluid/imperative/learning_rate_scheduler.py new file mode 100644 index 00000000000..5393090cde5 --- /dev/null +++ b/python/paddle/fluid/imperative/learning_rate_scheduler.py @@ -0,0 +1,68 @@ +# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +from .. import layers +from .. import unique_name + +__all__ = [ + 'ExponentialDecay', 'NaturalExpDecay', 'InverseTimeDecay', + 'PolynomialDecay', 'PiecewiseDecay', 'NoamDecay' +] + + +class LearningRateDecay(object): + """ + Base class of learning rate decay + """ + + def __init__(self, step, dtype='float32'): + self.step = step + self.dtype = dtype + + def __call__(self): + lr = self.step() + if isinstance(lr, float): + lr = self._create_lr_var(lr) + self.step += 1 + return lr + + def create_lr_var(lr): + lr = layers.create_global_var( + name=unique_name.generate("learning_rate"), + shape=[1], + value=float(lr), + dtype=self.dtype, + persistable=True) + + def step(self): + raise NotImplementedError() + + +class PiecewiseDecay(object): + def __init__(self, boundaries, values, step, dtype='float32'): + super(PiecewiseDecay, self).__init__(step, dtype) + self.boundaries = boundaries + self.values = values + + self.vars = [] + for value in values: + self.vars.append(self.create_lr_var(value)) + + def step(self): + for i in range(len(boundaries)): + if self.step <= boundaries[i]: + return self.vars[i] + return self.vars[len(values) - 1] diff --git a/python/paddle/fluid/layers/learning_rate_scheduler.py b/python/paddle/fluid/layers/learning_rate_scheduler.py index 2f489e43db1..521e4ceb60b 100644 --- a/python/paddle/fluid/layers/learning_rate_scheduler.py +++ b/python/paddle/fluid/layers/learning_rate_scheduler.py @@ -29,6 +29,7 @@ from . import tensor from ..initializer import init_on_cpu from ..framework import default_main_program, Parameter, unique_name, name_scope from ..imperative import base as imperative_base +from ..imperative import learning_rate_scheduler as imperate_lr __all__ = [ 'exponential_decay', 'natural_exp_decay', 'inverse_time_decay', @@ -279,7 +280,7 @@ def piecewise_decay(boundaries, values): raise ValueError("len(values) - len(boundaries) should be 1") if imperative_base.enabled(): - decay = imperative.PiecewiseDecay(boundaries, values, 0) + decay = imperate_lr.PiecewiseDecay(boundaries, values, 0) return decay else: global_step = _decay_step_counter() -- GitLab