diff --git a/python/paddle/fluid/data.py b/python/paddle/fluid/data.py index 2c75c493cba02dc21a5e2518a8a5e52b6eb4fd81..20ab401727c42bdc4c5733df7336ceb1b6f097a1 100644 --- a/python/paddle/fluid/data.py +++ b/python/paddle/fluid/data.py @@ -24,11 +24,6 @@ __all__ = ['data'] def data(name, shape, dtype='float32', lod_level=0): """ - :api_attr: Static Graph - :alias_main: paddle.nn.data - :alias: paddle.nn.data,paddle.nn.input.data - :old_api: paddle.fluid.data - **Data Layer** This function creates a variable on the global block. The global variable @@ -52,7 +47,7 @@ def data(name, shape, dtype='float32', lod_level=0): The default :code:`stop_gradient` attribute of the Variable created by this API is true, which means the gradient won't be passed backward - through the data Varaible. Set :code:`var.stop_gradient = False` If + through the data Variable. Set :code:`var.stop_gradient = False` If user would like to pass backward gradient. Args: @@ -88,7 +83,7 @@ def data(name, shape, dtype='float32', lod_level=0): z = x + y - # In this example, we will feed x and y with np-ndarry "1" + # In this example, we will feed x and y with np-ndarray "1" # and fetch z, like implementing "1 + 1 = 2" in PaddlePaddle feed_data = np.ones(shape=[3, 2, 1], dtype=np.float32) diff --git a/python/paddle/fluid/tests/unittests/test_erf_op.py b/python/paddle/fluid/tests/unittests/test_erf_op.py index b83436fae01e6778a126cb327b2cd5d9cfef29bb..964e704c6a2ccbdc96fc281f6e417caf8351cdf7 100644 --- a/python/paddle/fluid/tests/unittests/test_erf_op.py +++ b/python/paddle/fluid/tests/unittests/test_erf_op.py @@ -61,7 +61,7 @@ class TestErfLayer(unittest.TestCase): def test_name(self): with fluid.program_guard(fluid.Program()): - x = paddle.nn.data('x', [3, 4]) + x = paddle.static.data('x', [3, 4]) y = paddle.erf(x, name='erf') self.assertTrue('erf' in y.name)