diff --git a/python/paddle/fluid/dygraph/learning_rate_scheduler.py b/python/paddle/fluid/dygraph/learning_rate_scheduler.py index 02b63ebeefa937a624a06006555ef18ef4d3280c..7e4e50bc47618fb48cb7fdf149480c567ea33e0e 100644 --- a/python/paddle/fluid/dygraph/learning_rate_scheduler.py +++ b/python/paddle/fluid/dygraph/learning_rate_scheduler.py @@ -171,15 +171,18 @@ class NaturalExpDecay(LearningRateDecay): Examples: .. code-block:: python - import paddle.fluid as fluid - base_lr = 0.1 - with fluid.dygraph.guard(): - sgd_optimizer = fluid.optimizer.SGD( - learning_rate=fluid.dygraph.NaturalExpDecay( - learning_rate=base_lr, - decay_steps=10000, - decay_rate=0.5, - staircase=True)) + import paddle.fluid as fluid + base_lr = 0.1 + with fluid.dygraph.guard(): + emb = fluid.dygraph.Embedding([10, 10]) + sgd_optimizer = fluid.optimizer.SGD( + learning_rate=fluid.dygraph.NaturalExpDecay( + learning_rate=base_lr, + decay_steps=10000, + decay_rate=0.5, + staircase=True), + parameter_list=emb.parameters()) + """