test_one_hot_op.py 7.0 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Y
Yang yaming 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest
import numpy as np
17
from op_test import OpTest
18
import paddle
19 20 21
import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.fluid.framework import Program, program_guard
Y
Yang yaming 已提交
22 23 24


class TestOneHotOp(OpTest):
25

Y
Yang yaming 已提交
26 27 28
    def setUp(self):
        self.op_type = 'one_hot'
        depth = 10
29
        depth_np = np.array(10).astype('int32')
Y
Yang yaming 已提交
30
        dimension = 12
31
        x_lod = [[4, 1, 3, 3]]
32
        x = [np.random.randint(0, depth - 1) for i in range(sum(x_lod[0]))]
33 34 35 36 37 38 39 40 41 42 43 44 45
        x = np.array(x).astype('int32').reshape([sum(x_lod[0]), 1])

        out = np.zeros(shape=(np.product(x.shape[:-1]),
                              depth)).astype('float32')

        for i in range(np.product(x.shape)):
            out[i, x[i]] = 1.0

        self.inputs = {'X': (x, x_lod), 'depth_tensor': depth_np}
        self.attrs = {'dtype': int(core.VarDesc.VarType.FP32)}
        self.outputs = {'Out': (out, x_lod)}

    def test_check_output(self):
H
hong 已提交
46
        self.check_output(check_dygraph=False)
47 48 49


class TestOneHotOp_attr(OpTest):
50

51 52 53 54 55 56 57
    def setUp(self):
        self.op_type = 'one_hot'
        depth = 10
        dimension = 12
        x_lod = [[4, 1, 3, 3]]
        x = [np.random.randint(0, depth - 1) for i in range(sum(x_lod[0]))]
        x = np.array(x).astype('int32').reshape([sum(x_lod[0]), 1])
Y
Yang yaming 已提交
58 59 60 61

        out = np.zeros(shape=(np.product(x.shape[:-1]),
                              depth)).astype('float32')

62
        for i in range(np.product(x.shape)):
Y
Yang yaming 已提交
63 64 65
            out[i, x[i]] = 1.0

        self.inputs = {'X': (x, x_lod)}
66
        self.attrs = {'dtype': int(core.VarDesc.VarType.FP32), 'depth': depth}
Y
Yang yaming 已提交
67 68 69
        self.outputs = {'Out': (out, x_lod)}

    def test_check_output(self):
H
hong 已提交
70
        self.check_output(check_dygraph=False)
Y
Yang yaming 已提交
71 72 73


class TestOneHotOp_default_dtype(OpTest):
74

75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94
    def setUp(self):
        self.op_type = 'one_hot'
        depth = 10
        depth_np = np.array(10).astype('int32')
        dimension = 12
        x_lod = [[4, 1, 3, 3]]
        x = [np.random.randint(0, depth - 1) for i in range(sum(x_lod[0]))]
        x = np.array(x).astype('int32').reshape([sum(x_lod[0]), 1])

        out = np.zeros(shape=(np.product(x.shape[:-1]),
                              depth)).astype('float32')

        for i in range(np.product(x.shape)):
            out[i, x[i]] = 1.0

        self.inputs = {'X': (x, x_lod), 'depth_tensor': depth_np}
        self.attrs = {}
        self.outputs = {'Out': (out, x_lod)}

    def test_check_output(self):
H
hong 已提交
95
        self.check_output(check_dygraph=False)
96 97 98


class TestOneHotOp_default_dtype_attr(OpTest):
99

Y
Yang yaming 已提交
100 101 102 103
    def setUp(self):
        self.op_type = 'one_hot'
        depth = 10
        dimension = 12
104
        x_lod = [[4, 1, 3, 3]]
105
        x = [np.random.randint(0, depth - 1) for i in range(sum(x_lod[0]))]
106
        x = np.array(x).astype('int32').reshape([sum(x_lod[0]), 1])
Y
Yang yaming 已提交
107 108 109 110

        out = np.zeros(shape=(np.product(x.shape[:-1]),
                              depth)).astype('float32')

111
        for i in range(np.product(x.shape)):
Y
Yang yaming 已提交
112 113 114 115 116 117 118
            out[i, x[i]] = 1.0

        self.inputs = {'X': (x, x_lod)}
        self.attrs = {'depth': depth}
        self.outputs = {'Out': (out, x_lod)}

    def test_check_output(self):
H
hong 已提交
119
        self.check_output(check_dygraph=False)
Y
Yang yaming 已提交
120 121


122
class TestOneHotOp_out_of_range(OpTest):
123

124 125 126 127 128 129 130 131 132 133 134 135 136 137 138
    def setUp(self):
        self.op_type = 'one_hot'
        depth = 10
        x_lod = [[4, 1, 3, 3]]
        x = [np.random.choice([-1, depth]) for i in range(sum(x_lod[0]))]
        x = np.array(x).astype('int32').reshape([sum(x_lod[0]), 1])

        out = np.zeros(shape=(np.product(x.shape[:-1]),
                              depth)).astype('float32')

        self.inputs = {'X': (x, x_lod)}
        self.attrs = {'depth': depth, 'allow_out_of_range': True}
        self.outputs = {'Out': (out, x_lod)}

    def test_check_output(self):
H
hong 已提交
139
        self.check_output(check_dygraph=False)
140 141


142
class TestOneHotOp_exception(unittest.TestCase):
143

Y
Yang yaming 已提交
144 145 146 147 148 149
    def setUp(self):
        self.op_type = 'one_hot'
        self.depth = 10
        self.place = core.CPUPlace()
        self.dimension = 12
        self.x = core.LoDTensor()
150
        x_lod = [[4, 1, 3, 3]]
151
        data = [np.random.randint(11, 20) for i in range(sum(x_lod[0]))]
152
        data = np.array(data).astype('int').reshape([sum(x_lod[0]), 1])
Y
Yang yaming 已提交
153
        self.x.set(data, self.place)
154
        self.x.set_recursive_sequence_lengths(x_lod)
Y
Yang yaming 已提交
155 156 157 158

    def test_check_output(self):
        program = Program()
        with program_guard(program):
159 160 161 162
            x = fluid.layers.data(name='x',
                                  shape=[self.dimension],
                                  dtype='float32',
                                  lod_level=1)
Y
Yang yaming 已提交
163
            block = program.current_block()
164 165 166 167 168 169 170
            one_hot_out = block.create_var(name="one_hot_out",
                                           type=core.VarDesc.VarType.LOD_TENSOR,
                                           dtype='float32')
            block.append_op(type='one_hot',
                            inputs={'X': x},
                            attrs={'depth': self.depth},
                            outputs={'Out': one_hot_out})
Y
Yang yaming 已提交
171 172 173 174 175 176 177
            exe = fluid.Executor(self.place)

            def run():
                exe.run(feed={'x': self.x},
                        fetch_list=[one_hot_out],
                        return_numpy=False)

178
            self.assertRaises(ValueError, run)
Y
Yang yaming 已提交
179 180


181
class TestOneHotOpError(unittest.TestCase):
182

183 184 185 186 187 188
    def test_errors(self):
        with program_guard(Program(), Program()):
            # the input must be Variable
            in_w = np.random.random((4, 1)).astype("int32")
            self.assertRaises(TypeError, fluid.layers.one_hot, in_w)
            # the input must be int32 or int 64
189 190 191 192
            in_w2 = fluid.layers.data(name="in_w2",
                                      shape=[4, 1],
                                      append_batch_size=False,
                                      dtype="float32")
193 194
            self.assertRaises(TypeError, fluid.layers.one_hot, in_w2)
            # the depth must be int, long or Variable
195 196 197 198
            in_r = fluid.layers.data(name="in_r",
                                     shape=[4, 1],
                                     append_batch_size=False,
                                     dtype="int32")
199 200 201 202 203
            depth_w = np.array([4])
            self.assertRaises(TypeError, fluid.layers.one_hot, in_r, 4.1)
            self.assertRaises(TypeError, fluid.layers.one_hot, in_r, depth_w)


Y
Yang yaming 已提交
204
if __name__ == '__main__':
205
    paddle.enable_static()
Y
Yang yaming 已提交
206
    unittest.main()