提交 f8271649 编写于 作者: M minqiyang

Add PiecewiseDecay implementation

上级 1e0a7855
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from .. import layers
from .. import unique_name
__all__ = [
'ExponentialDecay', 'NaturalExpDecay', 'InverseTimeDecay',
'PolynomialDecay', 'PiecewiseDecay', 'NoamDecay'
]
class LearningRateDecay(object):
"""
Base class of learning rate decay
"""
def __init__(self, step, dtype='float32'):
self.step = step
self.dtype = dtype
def __call__(self):
lr = self.step()
if isinstance(lr, float):
lr = self._create_lr_var(lr)
self.step += 1
return lr
def create_lr_var(lr):
lr = layers.create_global_var(
name=unique_name.generate("learning_rate"),
shape=[1],
value=float(lr),
dtype=self.dtype,
persistable=True)
def step(self):
raise NotImplementedError()
class PiecewiseDecay(object):
def __init__(self, boundaries, values, step, dtype='float32'):
super(PiecewiseDecay, self).__init__(step, dtype)
self.boundaries = boundaries
self.values = values
self.vars = []
for value in values:
self.vars.append(self.create_lr_var(value))
def step(self):
for i in range(len(boundaries)):
if self.step <= boundaries[i]:
return self.vars[i]
return self.vars[len(values) - 1]
...@@ -29,6 +29,7 @@ from . import tensor ...@@ -29,6 +29,7 @@ from . import tensor
from ..initializer import init_on_cpu from ..initializer import init_on_cpu
from ..framework import default_main_program, Parameter, unique_name, name_scope from ..framework import default_main_program, Parameter, unique_name, name_scope
from ..imperative import base as imperative_base from ..imperative import base as imperative_base
from ..imperative import learning_rate_scheduler as imperate_lr
__all__ = [ __all__ = [
'exponential_decay', 'natural_exp_decay', 'inverse_time_decay', 'exponential_decay', 'natural_exp_decay', 'inverse_time_decay',
...@@ -279,7 +280,7 @@ def piecewise_decay(boundaries, values): ...@@ -279,7 +280,7 @@ def piecewise_decay(boundaries, values):
raise ValueError("len(values) - len(boundaries) should be 1") raise ValueError("len(values) - len(boundaries) should be 1")
if imperative_base.enabled(): if imperative_base.enabled():
decay = imperative.PiecewiseDecay(boundaries, values, 0) decay = imperate_lr.PiecewiseDecay(boundaries, values, 0)
return decay return decay
else: else:
global_step = _decay_step_counter() global_step = _decay_step_counter()
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册