diff --git a/python/paddle/fluid/dygraph/learning_rate_scheduler.py b/python/paddle/fluid/dygraph/learning_rate_scheduler.py index 9e184d9c5b7f7a1bb7ae5169a22cebccfcfaaa9e..f7ca63d22036a7f350f99a4e966de203c40b813c 100644 --- a/python/paddle/fluid/dygraph/learning_rate_scheduler.py +++ b/python/paddle/fluid/dygraph/learning_rate_scheduler.py @@ -175,15 +175,17 @@ class NaturalExpDecay(LearningRateDecay): Examples: .. code-block:: python - import paddle.fluid as fluid - base_lr = 0.1 - with fluid.dygraph.guard(): - sgd_optimizer = fluid.optimizer.SGD( - learning_rate=fluid.dygraph.NaturalExpDecay( - learning_rate=base_lr, - decay_steps=10000, - decay_rate=0.5, - staircase=True)) + import paddle.fluid as fluid + base_lr = 0.1 + with fluid.dygraph.guard(): + emb = fluid.dygraph.Embedding([10, 10]) + sgd_optimizer = fluid.optimizer.SGD( + learning_rate=fluid.dygraph.NaturalExpDecay( + learning_rate=base_lr, + decay_steps=10000, + decay_rate=0.5, + staircase=True), + parameter_list=emb.parameters()) """