From 5641ea2bf64bf7f46898fa7cf31012107e3c3b92 Mon Sep 17 00:00:00 2001 From: WangXi Date: Mon, 28 Sep 2020 10:38:08 +0800 Subject: [PATCH] Remove optimizer which in fleet, test=develop (#27606) --- python/paddle/optimizer/__init__.py | 24 ++++++++++-------------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/python/paddle/optimizer/__init__.py b/python/paddle/optimizer/__init__.py index 095a34cb6f..c9e49ce614 100644 --- a/python/paddle/optimizer/__init__.py +++ b/python/paddle/optimizer/__init__.py @@ -14,24 +14,20 @@ __all__ = [ 'Adadelta', 'AdadeltaOptimizer', 'Adagrad', 'AdagradOptimizer', 'Adam', - 'Adamax', 'AdamW', 'DecayedAdagrad', 'DecayedAdagradOptimizer', - 'DGCMomentumOptimizer', 'Dpsgd', 'DpsgdOptimizer', - 'ExponentialMovingAverage', 'Ftrl', 'FtrlOptimizer', 'LambOptimizer', - 'LarsMomentum', 'LarsMomentumOptimizer', 'LookaheadOptimizer', - 'ModelAverage', 'Momentum', 'MomentumOptimizer', 'PipelineOptimizer', - 'RecomputeOptimizer', 'RMSProp', 'SGD', 'SGDOptimizer', 'Optimizer', - '_LRScheduler', 'NoamLR', 'PiecewiseLR', 'NaturalExpLR', 'InverseTimeLR', - 'PolynomialLR', 'LinearLrWarmup', 'ExponentialLR', 'MultiStepLR', 'StepLR', - 'LambdaLR', 'ReduceLROnPlateau', 'CosineAnnealingLR' + 'Adamax', 'AdamW', 'DecayedAdagrad', 'DecayedAdagradOptimizer', 'Dpsgd', + 'DpsgdOptimizer', 'ExponentialMovingAverage', 'Ftrl', 'FtrlOptimizer', + 'LookaheadOptimizer', 'ModelAverage', 'Momentum', 'MomentumOptimizer', + 'RMSProp', 'SGD', 'SGDOptimizer', 'Optimizer', '_LRScheduler', 'NoamLR', + 'PiecewiseLR', 'NaturalExpLR', 'InverseTimeLR', 'PolynomialLR', + 'LinearLrWarmup', 'ExponentialLR', 'MultiStepLR', 'StepLR', 'LambdaLR', + 'ReduceLROnPlateau', 'CosineAnnealingLR' ] from ..fluid.optimizer import Momentum, Adagrad, Dpsgd, DecayedAdagrad, Ftrl,\ - AdagradOptimizer,DpsgdOptimizer,\ - DecayedAdagradOptimizer,FtrlOptimizer,AdadeltaOptimizer, \ - ModelAverage, LarsMomentum, DGCMomentumOptimizer, LambOptimizer,\ - ExponentialMovingAverage, PipelineOptimizer, LookaheadOptimizer, \ - RecomputeOptimizer, LarsMomentumOptimizer + AdagradOptimizer, DpsgdOptimizer, DecayedAdagradOptimizer, \ + FtrlOptimizer, AdadeltaOptimizer, ModelAverage, \ + ExponentialMovingAverage, LookaheadOptimizer from .optimizer import Optimizer from .adam import Adam -- GitLab