__init__.py 1.9 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14
#   Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

H
hong 已提交
15 16
__all__ = [
    'Adadelta', 'AdadeltaOptimizer', 'Adagrad', 'AdagradOptimizer', 'Adam',
17 18 19 20 21 22 23
    'Adamax', 'AdamW', 'DecayedAdagrad', 'DecayedAdagradOptimizer', 'Dpsgd',
    'DpsgdOptimizer', 'ExponentialMovingAverage', 'Ftrl', 'FtrlOptimizer',
    'LookaheadOptimizer', 'ModelAverage', 'Momentum', 'MomentumOptimizer',
    'RMSProp', 'SGD', 'SGDOptimizer', 'Optimizer', '_LRScheduler', 'NoamLR',
    'PiecewiseLR', 'NaturalExpLR', 'InverseTimeLR', 'PolynomialLR',
    'LinearLrWarmup', 'ExponentialLR', 'MultiStepLR', 'StepLR', 'LambdaLR',
    'ReduceLROnPlateau', 'CosineAnnealingLR'
H
hong 已提交
24 25 26
]


J
Jiawei Wang 已提交
27
from ..fluid.optimizer import Momentum, Adagrad, Dpsgd, DecayedAdagrad, Ftrl,\
28 29 30
            AdagradOptimizer, DpsgdOptimizer, DecayedAdagradOptimizer, \
            FtrlOptimizer, AdadeltaOptimizer, ModelAverage, \
            ExponentialMovingAverage, LookaheadOptimizer
M
MRXLT 已提交
31 32 33 34 35 36

from .optimizer import Optimizer
from .adam import Adam
from .adamw import AdamW
from .adamax import Adamax
from .rmsprop import RMSProp
J
Jiawei Wang 已提交
37 38 39
from .adadelta import Adadelta
from .sgd import SGD
from .momentum import Momentum
40 41 42 43

from . import lr_scheduler
from .lr_scheduler import _LRScheduler, NoamLR, PiecewiseLR, NaturalExpLR, InverseTimeLR, PolynomialLR, \
            LinearLrWarmup, ExponentialLR, MultiStepLR, StepLR, LambdaLR, ReduceLROnPlateau, CosineAnnealingLR