未验证 提交 086b92df 编写于 作者: Z Zhou Wei 提交者: GitHub

fix optimizer init (#27995)

上级 a0b2f936
......@@ -13,14 +13,10 @@
# limitations under the License.
__all__ = [
'Adadelta', 'Adam', 'Adamax', 'AdamW', 'Momentum', 'MomentumOptimizer',
'RMSProp', 'SGD', 'SGDOptimizer', 'Optimizer', '_LRScheduler', 'NoamLR',
'PiecewiseLR', 'NaturalExpLR', 'InverseTimeLR', 'PolynomialLR',
'LinearLrWarmup', 'ExponentialLR', 'MultiStepLR', 'StepLR', 'LambdaLR',
'ReduceLROnPlateau', 'CosineAnnealingLR'
'Optimizer', 'Adagrad', 'Adam', 'AdamW', 'Adamax', 'RMSProp', 'Adadelta',
'SGD', 'Momentum', 'lr'
]
from .optimizer import Optimizer
from .adagrad import Adagrad
from .adam import Adam
......@@ -30,5 +26,4 @@ from .rmsprop import RMSProp
from .adadelta import Adadelta
from .sgd import SGD
from .momentum import Momentum
from . import lr
......@@ -30,7 +30,7 @@ class LRScheduler(object):
LRScheduler Base class. Define the common interface of a learning rate scheduler.
User can import it by ``form paddle.optimizer.lr import LRScheduler`` ,
User can import it by ``from paddle.optimizer.lr import LRScheduler`` ,
then overload it for your subclass and have a custom implementation of ``get_lr()`` .
......@@ -50,7 +50,7 @@ class LRScheduler(object):
.. code-block:: python
import paddle
form paddle.optimizer.lr import LRScheduler
from paddle.optimizer.lr import LRScheduler
class StepDecay(LRScheduler):
def __init__(self,
......
......@@ -24,6 +24,8 @@
}
],
"wlist_temp_api":[
"LRScheduler",
"ReduceOnPlateau",
"append_LARS",
"BuildStrategy.debug_graphviz_path",
"BuildStrategy.enable_sequential_execution",
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册