data.py 4.6 KB
Newer Older
H
Huihuang Zheng 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
#   Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import numpy as np
16
import six
H
Huihuang Zheng 已提交
17

18 19 20
from paddle.fluid import core
from paddle.fluid.layer_helper import LayerHelper
from paddle.fluid.data_feeder import check_dtype, check_type
H
Huihuang Zheng 已提交
21 22 23 24

__all__ = ['data']


25
def data(name, shape, dtype='float32', lod_level=0):
H
Huihuang Zheng 已提交
26 27 28
    """
    **Data Layer**

29 30
    This function creates a variable on the global block. The global variable
    can be accessed by all the following operators in the graph. The variable
T
tianshuo78520a 已提交
31
    is a placeholder that could be fed with input, such as Executor can feed
32
    input into the variable.
H
Huihuang Zheng 已提交
33 34

    Note: 
35 36
        `paddle.fluid.layers.data` is deprecated. It will be removed in a
        future version. Please use this `paddle.fluid.data`. 
H
Huihuang Zheng 已提交
37
       
38
        The `paddle.fluid.layers.data` set shape and dtype at compile time but
T
tianshuo78520a 已提交
39 40
        does NOT check the shape or the dtype of fed data, this
        `paddle.fluid.data` checks the shape and the dtype of data fed by
41 42
        Executor or ParallelExecutor during run time.

43
        To feed variable size inputs, users can set None or -1 on the variable
44 45 46 47 48 49
        dimension when using :code:`paddle.fluid.data`, or feed variable size
        inputs directly to :code:`paddle.fluid.layers.data` and PaddlePaddle
        will fit the size accordingly.

        The default :code:`stop_gradient` attribute of the Variable created by
        this API is true, which means the gradient won't be passed backward
50
        through the data Variable. Set :code:`var.stop_gradient = False` If
51
        user would like to pass backward gradient.
H
Huihuang Zheng 已提交
52 53

    Args:
54 55
       name (str): The name/alias of the variable, see :ref:`api_guide_Name`
           for more details.
56
       shape (list|tuple): List|Tuple of integers declaring the shape. You can
57 58
           set "None" or -1 at a dimension to indicate the dimension can be of any
           size. For example, it is useful to set changeable batch size as "None" or -1.
59 60
       dtype (np.dtype|VarType|str, optional): The type of the data. Supported
           dtype: bool, float16, float32, float64, int8, int16, int32, int64,
61
           uint8. Default: float32.
62 63
       lod_level (int, optional): The LoD level of the LoDTensor. Usually users
           don't have to set this value. For more details about when and how to
64
           use LoD level, see :ref:`user_guide_lod_tensor` . Default: 0.
H
Huihuang Zheng 已提交
65 66 67 68 69 70 71 72

    Returns:
        Variable: The global variable that gives access to the data.

    Examples:
        .. code-block:: python

          import paddle.fluid as fluid
73
          import numpy as np
H
Huihuang Zheng 已提交
74

75
          # Creates a variable with fixed size [3, 2, 1]
H
Huihuang Zheng 已提交
76
          # User can only feed data of the same shape to x
77
          x = fluid.data(name='x', shape=[3, 2, 1], dtype='float32')
H
Huihuang Zheng 已提交
78

79
          # Creates a variable with changeable batch size -1.
80 81
          # Users can feed data of any batch size into y,
          # but size of each data sample has to be [2, 1]
82
          y = fluid.data(name='y', shape=[-1, 2, 1], dtype='float32')
83 84 85

          z = x + y

86
          # In this example, we will feed x and y with np-ndarray "1"
87 88 89 90 91 92 93 94 95 96 97 98 99
          # and fetch z, like implementing "1 + 1 = 2" in PaddlePaddle
          feed_data = np.ones(shape=[3, 2, 1], dtype=np.float32)

          exe = fluid.Executor(fluid.CPUPlace())
          out = exe.run(fluid.default_main_program(),
                        feed={
                            'x': feed_data,
                            'y': feed_data
                        },
                        fetch_list=[z.name])

          # np-ndarray of shape=[3, 2, 1], dtype=float32, whose elements are 2
          print(out)
H
Huihuang Zheng 已提交
100 101 102

    """
    helper = LayerHelper('data', **locals())
103 104 105 106

    check_type(name, 'name', (six.binary_type, six.text_type), 'data')
    check_type(shape, 'shape', (list, tuple), 'data')

107 108 109 110 111
    shape = list(shape)
    for i in six.moves.range(len(shape)):
        if shape[i] is None:
            shape[i] = -1

H
Huihuang Zheng 已提交
112 113 114 115
    return helper.create_global_variable(
        name=name,
        shape=shape,
        dtype=dtype,
116
        type=core.VarDesc.VarType.LOD_TENSOR,
H
Huihuang Zheng 已提交
117
        stop_gradient=True,
118
        lod_level=lod_level,
H
Huihuang Zheng 已提交
119 120
        is_data=True,
        need_check_feed=True)