未验证 提交 6d9ae660 编写于 作者: Y yaoxuefeng 提交者: GitHub

delete ExponentialMovingAverage in paddle/optimizer (#27683)

上级 199da968
...@@ -15,19 +15,19 @@ ...@@ -15,19 +15,19 @@
__all__ = [ __all__ = [
'Adadelta', 'AdadeltaOptimizer', 'Adagrad', 'AdagradOptimizer', 'Adam', 'Adadelta', 'AdadeltaOptimizer', 'Adagrad', 'AdagradOptimizer', 'Adam',
'Adamax', 'AdamW', 'DecayedAdagrad', 'DecayedAdagradOptimizer', 'Dpsgd', 'Adamax', 'AdamW', 'DecayedAdagrad', 'DecayedAdagradOptimizer', 'Dpsgd',
'DpsgdOptimizer', 'ExponentialMovingAverage', 'Ftrl', 'FtrlOptimizer', 'DpsgdOptimizer', 'Ftrl', 'FtrlOptimizer', 'LookaheadOptimizer',
'LookaheadOptimizer', 'ModelAverage', 'Momentum', 'MomentumOptimizer', 'ModelAverage', 'Momentum', 'MomentumOptimizer', 'RMSProp', 'SGD',
'RMSProp', 'SGD', 'SGDOptimizer', 'Optimizer', '_LRScheduler', 'NoamLR', 'SGDOptimizer', 'Optimizer', '_LRScheduler', 'NoamLR', 'PiecewiseLR',
'PiecewiseLR', 'NaturalExpLR', 'InverseTimeLR', 'PolynomialLR', 'NaturalExpLR', 'InverseTimeLR', 'PolynomialLR', 'LinearLrWarmup',
'LinearLrWarmup', 'ExponentialLR', 'MultiStepLR', 'StepLR', 'LambdaLR', 'ExponentialLR', 'MultiStepLR', 'StepLR', 'LambdaLR', 'ReduceLROnPlateau',
'ReduceLROnPlateau', 'CosineAnnealingLR' 'CosineAnnealingLR'
] ]
from ..fluid.optimizer import Momentum, Adagrad, Dpsgd, DecayedAdagrad, Ftrl,\ from ..fluid.optimizer import Momentum, Adagrad, Dpsgd, DecayedAdagrad, Ftrl,\
AdagradOptimizer, DpsgdOptimizer, DecayedAdagradOptimizer, \ AdagradOptimizer, DpsgdOptimizer, DecayedAdagradOptimizer, \
FtrlOptimizer, AdadeltaOptimizer, ModelAverage, \ FtrlOptimizer, AdadeltaOptimizer, ModelAverage, \
ExponentialMovingAverage, LookaheadOptimizer LookaheadOptimizer
from .optimizer import Optimizer from .optimizer import Optimizer
from .adam import Adam from .adam import Adam
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册