diff --git a/python/paddle/fluid/imperative/learning_rate_scheduler.py b/python/paddle/fluid/dygraph/learning_rate_scheduler.py similarity index 100% rename from python/paddle/fluid/imperative/learning_rate_scheduler.py rename to python/paddle/fluid/dygraph/learning_rate_scheduler.py diff --git a/python/paddle/fluid/layers/learning_rate_scheduler.py b/python/paddle/fluid/layers/learning_rate_scheduler.py index 9c642712d2acda2b61b815ef0ece26223cb9a988..18ebab8ad683df73b12541f83fb4d4787e2d3b1f 100644 --- a/python/paddle/fluid/layers/learning_rate_scheduler.py +++ b/python/paddle/fluid/layers/learning_rate_scheduler.py @@ -30,8 +30,8 @@ from . import ops from . import tensor from ..initializer import init_on_cpu from ..framework import default_main_program, Parameter, unique_name, name_scope -from ..imperative import base as imperative_base -from ..imperative import learning_rate_scheduler as imperate_lr +from ..dygraph import base as imperative_base +from ..dygraph import learning_rate_scheduler as imperate_lr __all__ = [ 'exponential_decay', 'natural_exp_decay', 'inverse_time_decay', @@ -350,7 +350,7 @@ def cosine_decay(learning_rate, step_each_epoch, epochs): following cosine decay strategy. decayed_lr = learning_rate * 0.5 * (math.cos(epoch * math.pi / epochs) + 1) - + Args: learning_rate(Variable|float): The initial learning rate. step_each_epoch(int): the number of steps in an epoch. diff --git a/python/paddle/fluid/tests/unittests/test_imperative_mnist.py b/python/paddle/fluid/tests/unittests/test_imperative_mnist.py index 5b3c250501386a7854313218f5ea338281824252..5ab01839fbc20bbd3c242878c4ea23a00f7b0dca 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_mnist.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_mnist.py @@ -23,12 +23,12 @@ import paddle import paddle.fluid as fluid from paddle.fluid import core from paddle.fluid.optimizer import SGDOptimizer -from paddle.fluid.imperative.nn import Conv2D, Pool2D, FC -from paddle.fluid.imperative.base import to_variable +from paddle.fluid.dygraph.nn import Conv2D, Pool2D, FC +from paddle.fluid.dygraph.base import to_variable from test_imperative_base import new_program_scope -class SimpleImgConvPool(fluid.imperative.Layer): +class SimpleImgConvPool(fluid.dygraph.Layer): def __init__(self, name_scope, num_channels, @@ -77,7 +77,7 @@ class SimpleImgConvPool(fluid.imperative.Layer): return x -class MNIST(fluid.imperative.Layer): +class MNIST(fluid.dygraph.Layer): def __init__(self, name_scope): super(MNIST, self).__init__(name_scope) @@ -108,7 +108,7 @@ class TestImperativeMnist(unittest.TestCase): def test_mnist_float32(self): seed = 90 epoch_num = 1 - with fluid.imperative.guard(): + with fluid.dygraph.guard(): fluid.default_startup_program().random_seed = seed fluid.default_main_program().random_seed = seed