data.py 4.4 KB
Newer Older
H
Huihuang Zheng 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
#   Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import numpy as np
16
import six
H
Huihuang Zheng 已提交
17 18 19 20 21 22 23

from . import core
from .layer_helper import LayerHelper

__all__ = ['data']


24
def data(name, shape, dtype='float32', lod_level=0):
H
Huihuang Zheng 已提交
25 26 27
    """
    **Data Layer**

28 29
    This function creates a variable on the global block. The global variable
    can be accessed by all the following operators in the graph. The variable
T
tianshuo78520a 已提交
30
    is a placeholder that could be fed with input, such as Executor can feed
31
    input into the variable.
H
Huihuang Zheng 已提交
32 33

    Note: 
34 35
        `paddle.fluid.layers.data` is deprecated. It will be removed in a
        future version. Please use this `paddle.fluid.data`. 
H
Huihuang Zheng 已提交
36
       
37
        The `paddle.fluid.layers.data` set shape and dtype at compile time but
T
tianshuo78520a 已提交
38 39
        does NOT check the shape or the dtype of fed data, this
        `paddle.fluid.data` checks the shape and the dtype of data fed by
40 41 42 43 44 45 46 47 48 49 50
        Executor or ParallelExecutor during run time.

        To feed variable size inputs, users can set -1 on the variable
        dimension when using :code:`paddle.fluid.data`, or feed variable size
        inputs directly to :code:`paddle.fluid.layers.data` and PaddlePaddle
        will fit the size accordingly.

        The default :code:`stop_gradient` attribute of the Variable created by
        this API is true, which means the gradient won't be passed backward
        through the data Varaible. Set :code:`var.stop_gradient = False` If
        user would like to pass backward gradient.
H
Huihuang Zheng 已提交
51 52

    Args:
53 54
       name (str): The name/alias of the variable, see :ref:`api_guide_Name`
           for more details.
55 56
       shape (list|tuple): List|Tuple of integers declaring the shape. You can
           set "None" at a dimension to indicate the dimension can be of any
T
tianshuo78520a 已提交
57
           size. For example, it is useful to set changeable batch size as "None" 
58 59 60 61 62 63
       dtype (np.dtype|VarType|str, optional): The type of the data. Supported
           dtype: bool, float16, float32, float64, int8, int16, int32, int64,
           uint8. Default: float32
       lod_level (int, optional): The LoD level of the LoDTensor. Usually users
           don't have to set this value. For more details about when and how to
           use LoD level, see :ref:`user_guide_lod_tensor` . Default: 0
H
Huihuang Zheng 已提交
64 65 66 67 68 69 70 71

    Returns:
        Variable: The global variable that gives access to the data.

    Examples:
        .. code-block:: python

          import paddle.fluid as fluid
72
          import numpy as np
H
Huihuang Zheng 已提交
73

74
          # Creates a variable with fixed size [3, 2, 1]
H
Huihuang Zheng 已提交
75
          # User can only feed data of the same shape to x
76
          x = fluid.data(name='x', shape=[3, 2, 1], dtype='float32')
H
Huihuang Zheng 已提交
77

T
tianshuo78520a 已提交
78
          # Creates a variable with changeable batch size.
79 80
          # Users can feed data of any batch size into y,
          # but size of each data sample has to be [2, 1]
81
          y = fluid.data(name='y', shape=[None, 2, 1], dtype='float32')
82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98

          z = x + y

          # In this example, we will feed x and y with np-ndarry "1"
          # and fetch z, like implementing "1 + 1 = 2" in PaddlePaddle
          feed_data = np.ones(shape=[3, 2, 1], dtype=np.float32)

          exe = fluid.Executor(fluid.CPUPlace())
          out = exe.run(fluid.default_main_program(),
                        feed={
                            'x': feed_data,
                            'y': feed_data
                        },
                        fetch_list=[z.name])

          # np-ndarray of shape=[3, 2, 1], dtype=float32, whose elements are 2
          print(out)
H
Huihuang Zheng 已提交
99 100 101

    """
    helper = LayerHelper('data', **locals())
102 103 104 105 106
    shape = list(shape)
    for i in six.moves.range(len(shape)):
        if shape[i] is None:
            shape[i] = -1

H
Huihuang Zheng 已提交
107 108 109 110
    return helper.create_global_variable(
        name=name,
        shape=shape,
        dtype=dtype,
111
        type=core.VarDesc.VarType.LOD_TENSOR,
H
Huihuang Zheng 已提交
112
        stop_gradient=True,
113
        lod_level=lod_level,
H
Huihuang Zheng 已提交
114 115
        is_data=True,
        need_check_feed=True)