diff --git a/python/paddle/fluid/data.py b/python/paddle/fluid/data.py index 20ab401727c42bdc4c5733df7336ceb1b6f097a1..dc57e9f71ed3d0de1a374bdf719b32a083198b31 100644 --- a/python/paddle/fluid/data.py +++ b/python/paddle/fluid/data.py @@ -18,10 +18,12 @@ import six from paddle.fluid import core from paddle.fluid.layer_helper import LayerHelper from paddle.fluid.data_feeder import check_dtype, check_type +from ..utils import deprecated __all__ = ['data'] +@deprecated(since="2.0.0", update_to="paddle.static.data") def data(name, shape, dtype='float32', lod_level=0): """ **Data Layer** diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index be3988b184933d6fd6c0c5ef1fd1bd19f2d4811b..d8b31bc6616477de721550de92dec32ed02a6384 100644 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -8353,6 +8353,7 @@ def gather_nd(input, index, name=None): return output +@deprecated(since="2.0.0", update_to="paddle.scatter") def scatter(input, index, updates, name=None, overwrite=True): """ :alias_main: paddle.scatter diff --git a/python/paddle/fluid/tests/unittests/test_data.py b/python/paddle/fluid/tests/unittests/test_data.py index 22dc72048e429ed257e9d7d1213b6cb7dcafbf1a..8070148f8b36dd7dab7711abaf25994acebc7e6f 100644 --- a/python/paddle/fluid/tests/unittests/test_data.py +++ b/python/paddle/fluid/tests/unittests/test_data.py @@ -16,9 +16,11 @@ from __future__ import print_function import unittest +import paddle import paddle.fluid as fluid import paddle.fluid.layers as layers from paddle.fluid import Program, program_guard +import paddle.fluid.core as core class TestApiDataError(unittest.TestCase): @@ -53,5 +55,49 @@ class TestApiDataError(unittest.TestCase): self.assertRaises(TypeError, test_shape_type) +class TestApiStaticDataError(unittest.TestCase): + def test_fluid_dtype(self): + with program_guard(Program(), Program()): + x1 = paddle.static.data(name="x1", shape=[2, 25]) + self.assertEqual(x1.dtype, core.VarDesc.VarType.FP32) + + x2 = paddle.static.data(name="x2", shape=[2, 25], dtype="bool") + self.assertEqual(x2.dtype, core.VarDesc.VarType.BOOL) + + paddle.set_default_dtype("float64") + x3 = paddle.static.data(name="x3", shape=[2, 25]) + self.assertEqual(x3.dtype, core.VarDesc.VarType.FP64) + + def test_fluid_data(self): + with program_guard(Program(), Program()): + + # 1. The type of 'name' in fluid.data must be str. + def test_name_type(): + paddle.static.data(name=1, shape=[2, 25], dtype="bool") + + self.assertRaises(TypeError, test_name_type) + + # 2. The type of 'shape' in fluid.data must be list or tuple. + def test_shape_type(): + paddle.static.data(name='data1', shape=2, dtype="bool") + + self.assertRaises(TypeError, test_shape_type) + + def test_layers_data(self): + with program_guard(Program(), Program()): + + # 1. The type of 'name' in layers.data must be str. + def test_name_type(): + paddle.static.data(name=1, shape=[2, 25], dtype="bool") + + self.assertRaises(TypeError, test_name_type) + + # 2. The type of 'shape' in layers.data must be list or tuple. + def test_shape_type(): + paddle.static.data(name='data1', shape=2, dtype="bool") + + self.assertRaises(TypeError, test_shape_type) + + if __name__ == "__main__": unittest.main() diff --git a/python/paddle/static/input.py b/python/paddle/static/input.py index 0fe1d7e03f43b298af8bf557c04fdc592b3632bf..06b9c7cdbef5dd11d237a2b85586e598611bf83e 100644 --- a/python/paddle/static/input.py +++ b/python/paddle/static/input.py @@ -12,11 +12,112 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ..fluid.data import data +import paddle +import numpy as np +import six + +from paddle.fluid import core +from paddle.fluid.layer_helper import LayerHelper +from paddle.fluid.data_feeder import check_dtype, check_type __all__ = ['data', 'InputSpec'] +def data(name, shape, dtype=None, lod_level=0): + """ + **Data Layer** + + This function creates a variable on the global block. The global variable + can be accessed by all the following operators in the graph. The variable + is a placeholder that could be fed with input, such as Executor can feed + input into the variable. When `dtype` is None, the dtype + will get from the global dtype by `paddle.get_default_dtype()`. + + Args: + name (str): The name/alias of the variable, see :ref:`api_guide_Name` + for more details. + shape (list|tuple): List|Tuple of integers declaring the shape. You can + set "None" or -1 at a dimension to indicate the dimension can be of any + size. For example, it is useful to set changeable batch size as "None" or -1. + dtype (np.dtype|str, optional): The type of the data. Supported + dtype: bool, float16, float32, float64, int8, int16, int32, int64, + uint8. Default: None. When `dtype` is not set, the dtype will get + from the global dtype by `paddle.get_default_dtype()`. + lod_level (int, optional): The LoD level of the LoDTensor. Usually users + don't have to set this value. For more details about when and how to + use LoD level, see :ref:`user_guide_lod_tensor` . Default: 0. + + Returns: + Variable: The global variable that gives access to the data. + + Examples: + .. code-block:: python + + import numpy as np + import paddle.fluid as fluid + import paddle + + # Creates a variable with fixed size [3, 2, 1] + # User can only feed data of the same shape to x + # the dtype is not set, so it will set "float32" by + # paddle.get_default_dtype(). You can use paddle.get_default_dtype() to + # change the global dtype + x = paddle.static.data(name='x', shape=[3, 2, 1]) + + # Creates a variable with changeable batch size -1. + # Users can feed data of any batch size into y, + # but size of each data sample has to be [2, 1] + y = paddle.static.data(name='y', shape=[-1, 2, 1], dtype='float32') + + z = x + y + + # In this example, we will feed x and y with np-ndarray "1" + # and fetch z, like implementing "1 + 1 = 2" in PaddlePaddle + feed_data = np.ones(shape=[3, 2, 1], dtype=np.float32) + + exe = fluid.Executor(fluid.CPUPlace()) + out = exe.run(fluid.default_main_program(), + feed={ + 'x': feed_data, + 'y': feed_data + }, + fetch_list=[z.name]) + + # np-ndarray of shape=[3, 2, 1], dtype=float32, whose elements are 2 + print(out) + + """ + helper = LayerHelper('data', **locals()) + check_type(name, 'name', (six.binary_type, six.text_type), 'data') + check_type(shape, 'shape', (list, tuple), 'data') + + shape = list(shape) + for i in six.moves.range(len(shape)): + if shape[i] is None: + shape[i] = -1 + + if dtype: + return helper.create_global_variable( + name=name, + shape=shape, + dtype=dtype, + type=core.VarDesc.VarType.LOD_TENSOR, + stop_gradient=True, + lod_level=lod_level, + is_data=True, + need_check_feed=True) + else: + return helper.create_global_variable( + name=name, + shape=shape, + dtype=paddle.get_default_dtype(), + type=core.VarDesc.VarType.LOD_TENSOR, + stop_gradient=True, + lod_level=lod_level, + is_data=True, + need_check_feed=True) + + class InputSpec(object): """ Define input specification of the model. @@ -28,7 +129,7 @@ class InputSpec(object): declaring the shape. You can set "None" or -1 at a dimension to indicate the dimension can be of any size. For example, it is useful to set changeable batch size as "None" or -1. - dtype (np.dtype|VarType|str, optional): The type of the data. Supported + dtype (np.dtype|str, optional): The type of the data. Supported dtype: bool, float16, float32, float64, int8, int16, int32, int64, uint8. Default: float32.