data.py 3.9 KB
Newer Older
H
Huihuang Zheng 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
#   Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import numpy as np
16
import six
H
Huihuang Zheng 已提交
17 18 19 20 21 22 23

from . import core
from .layer_helper import LayerHelper

__all__ = ['data']


24
def data(name, shape, dtype='float32', lod_level=0):
H
Huihuang Zheng 已提交
25 26 27
    """
    **Data Layer**

28 29 30 31
    This function creates a variable on the global block. The global variable
    can be accessed by all the following operators in the graph. The variable
    is a placeholder that could be feeded with input, such as Executor can feed
    input into the variable.
H
Huihuang Zheng 已提交
32 33 34 35 36

    Note: 
       `paddle.fluid.layers.data` is deprecated. It will be removed in a future
       version. Please use this `paddle.fluid.data`. 
       
37 38 39 40
       The `paddle.fluid.layers.data` set shape and dtype at compile time but
       does NOT check the shape or the dtype of feeded data, this
       `paddle.fluid.data` checks the shape and the dtype of data feeded by
       Executor or ParallelExecutor during run time.
H
Huihuang Zheng 已提交
41 42

    Args:
43 44
       name (str): The name/alias of the variable, see :ref:`api_guide_Name`
           for more details.
45 46 47
       shape (list|tuple): List|Tuple of integers declaring the shape. You can
           set "None" at a dimension to indicate the dimension can be of any
           size. For example, it is useful to set changable batch size as "None" 
48 49 50 51 52 53
       dtype (np.dtype|VarType|str, optional): The type of the data. Supported
           dtype: bool, float16, float32, float64, int8, int16, int32, int64,
           uint8. Default: float32
       lod_level (int, optional): The LoD level of the LoDTensor. Usually users
           don't have to set this value. For more details about when and how to
           use LoD level, see :ref:`user_guide_lod_tensor` . Default: 0
H
Huihuang Zheng 已提交
54 55 56 57 58 59 60 61

    Returns:
        Variable: The global variable that gives access to the data.

    Examples:
        .. code-block:: python

          import paddle.fluid as fluid
62
          import numpy as np
H
Huihuang Zheng 已提交
63

64
          # Creates a variable with fixed size [3, 2, 1]
H
Huihuang Zheng 已提交
65
          # User can only feed data of the same shape to x
66
          x = fluid.data(name='x', shape=[3, 2, 1], dtype='float32')
H
Huihuang Zheng 已提交
67

68
          # Creates a variable with changable batch size.
69 70
          # Users can feed data of any batch size into y,
          # but size of each data sample has to be [2, 1]
71
          y = fluid.data(name='y', shape=[None, 2, 1], dtype='float32')
72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88

          z = x + y

          # In this example, we will feed x and y with np-ndarry "1"
          # and fetch z, like implementing "1 + 1 = 2" in PaddlePaddle
          feed_data = np.ones(shape=[3, 2, 1], dtype=np.float32)

          exe = fluid.Executor(fluid.CPUPlace())
          out = exe.run(fluid.default_main_program(),
                        feed={
                            'x': feed_data,
                            'y': feed_data
                        },
                        fetch_list=[z.name])

          # np-ndarray of shape=[3, 2, 1], dtype=float32, whose elements are 2
          print(out)
H
Huihuang Zheng 已提交
89 90 91

    """
    helper = LayerHelper('data', **locals())
92 93 94 95 96
    shape = list(shape)
    for i in six.moves.range(len(shape)):
        if shape[i] is None:
            shape[i] = -1

H
Huihuang Zheng 已提交
97 98 99 100
    return helper.create_global_variable(
        name=name,
        shape=shape,
        dtype=dtype,
101
        type=core.VarDesc.VarType.LOD_TENSOR,
H
Huihuang Zheng 已提交
102
        stop_gradient=True,
103
        lod_level=lod_level,
H
Huihuang Zheng 已提交
104 105
        is_data=True,
        need_check_feed=True)