test_unique.py 11.5 KB
Newer Older
Z
zhoukunsheng 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
#   Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest
16

Z
zhoukunsheng 已提交
17 18
import numpy as np
from op_test import OpTest
19

Z
Zhang Ting 已提交
20
import paddle
21
from paddle.fluid import core
Z
zhoukunsheng 已提交
22 23 24 25 26 27 28 29


class TestUniqueOp(OpTest):
    def setUp(self):
        self.op_type = "unique"
        self.init_config()

    def test_check_output(self):
30
        paddle.enable_static()
Z
zhoukunsheng 已提交
31
        self.check_output()
32
        paddle.disable_static()
Z
zhoukunsheng 已提交
33 34

    def init_config(self):
35 36 37
        self.inputs = {
            'X': np.array([2, 3, 3, 1, 5, 3], dtype='int64'),
        }
Z
zhoukunsheng 已提交
38 39
        self.attrs = {'dtype': int(core.VarDesc.VarType.INT32)}
        self.outputs = {
40
            'Out': np.array([2, 3, 1, 5], dtype='int64'),
41
            'Index': np.array([0, 1, 1, 2, 3, 1], dtype='int32'),
Z
zhoukunsheng 已提交
42 43 44 45 46
        }


class TestOne(TestUniqueOp):
    def init_config(self):
47 48 49
        self.inputs = {
            'X': np.array([2], dtype='int64'),
        }
Z
zhoukunsheng 已提交
50 51
        self.attrs = {'dtype': int(core.VarDesc.VarType.INT32)}
        self.outputs = {
52
            'Out': np.array([2], dtype='int64'),
53
            'Index': np.array([0], dtype='int32'),
Z
zhoukunsheng 已提交
54 55 56 57 58
        }


class TestRandom(TestUniqueOp):
    def init_config(self):
59
        self.inputs = {'X': np.random.randint(0, 100, (150,), dtype='int64')}
Z
zhoukunsheng 已提交
60
        self.attrs = {'dtype': int(core.VarDesc.VarType.INT64)}
61 62 63
        np_unique, np_index, reverse_index = np.unique(
            self.inputs['X'], True, True
        )
Z
zhoukunsheng 已提交
64 65 66 67
        np_tuple = [(np_unique[i], np_index[i]) for i in range(len(np_unique))]
        np_tuple.sort(key=lambda x: x[1])
        target_out = np.array([i[0] for i in np_tuple], dtype='int64')
        target_index = np.array(
68 69
            [list(target_out).index(i) for i in self.inputs['X']], dtype='int64'
        )
Z
zhoukunsheng 已提交
70 71 72 73

        self.outputs = {'Out': target_out, 'Index': target_index}


74 75
class TestUniqueRaiseError(unittest.TestCase):
    def test_errors(self):
76 77
        paddle.enable_static()

78
        def test_type():
79
            paddle.unique([10])
80 81 82 83

        self.assertRaises(TypeError, test_type)

        def test_dtype():
84
            data = paddle.static.data(shape=[10], dtype="float16", name="input")
85
            paddle.unique(data)
86 87

        self.assertRaises(TypeError, test_dtype)
88
        paddle.disable_static()
89 90


91 92 93
@unittest.skipIf(
    not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
94 95
class TestOneGPU(TestUniqueOp):
    def init_config(self):
96 97 98
        self.inputs = {
            'X': np.array([2], dtype='int64'),
        }
99 100
        self.attrs = {'dtype': int(core.VarDesc.VarType.INT32)}
        self.outputs = {
101
            'Out': np.array([2], dtype='int64'),
102
            'Index': np.array([0], dtype='int32'),
103 104 105 106
        }

    def test_check_output(self):
        if core.is_compiled_with_cuda():
107
            paddle.enable_static()
108 109
            place = core.CUDAPlace(0)
            self.check_output_with_place(place, atol=1e-5)
110
            paddle.disable_static()
111 112


113 114 115
@unittest.skipIf(
    not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
116 117
class TestRandomGPU(TestUniqueOp):
    def init_config(self):
118
        self.inputs = {'X': np.random.randint(0, 100, (150,), dtype='int64')}
119
        self.attrs = {'dtype': int(core.VarDesc.VarType.INT64)}
120 121 122
        np_unique, np_index, reverse_index = np.unique(
            self.inputs['X'], True, True
        )
123 124 125 126
        np_tuple = [(np_unique[i], np_index[i]) for i in range(len(np_unique))]
        np_tuple.sort(key=lambda x: x[1])
        target_out = np.array([i[0] for i in np_tuple], dtype='int64')
        target_index = np.array(
127 128
            [list(target_out).index(i) for i in self.inputs['X']], dtype='int64'
        )
129 130 131 132 133

        self.outputs = {'Out': target_out, 'Index': target_index}

    def test_check_output(self):
        if core.is_compiled_with_cuda():
134
            paddle.enable_static()
135 136
            place = core.CUDAPlace(0)
            self.check_output_with_place(place, atol=1e-5)
137
            paddle.disable_static()
138 139


Z
Zhang Ting 已提交
140 141 142
class TestSortedUniqueOp(TestUniqueOp):
    def init_config(self):
        self.inputs = {'X': np.array([2, 3, 3, 1, 5, 3], dtype='int64')}
143 144 145 146 147 148 149
        unique, indices, inverse, count = np.unique(
            self.inputs['X'],
            return_index=True,
            return_inverse=True,
            return_counts=True,
            axis=None,
        )
Z
Zhang Ting 已提交
150 151 152 153 154 155
        self.attrs = {
            'dtype': int(core.VarDesc.VarType.INT32),
            "return_index": True,
            "return_inverse": True,
            "return_counts": True,
            "axis": None,
156
            "is_sorted": True,
Z
Zhang Ting 已提交
157 158 159 160 161 162 163 164 165 166 167 168
        }
        self.outputs = {
            'Out': unique,
            'Indices': indices,
            "Index": inverse,
            "Counts": count,
        }


class TestUniqueOpAxisNone(TestUniqueOp):
    def init_config(self):
        self.inputs = {'X': np.random.random((4, 7, 10)).astype('float64')}
169 170 171 172 173 174 175
        unique, indices, inverse, counts = np.unique(
            self.inputs['X'],
            return_index=True,
            return_inverse=True,
            return_counts=True,
            axis=None,
        )
Z
Zhang Ting 已提交
176 177 178 179 180 181
        self.attrs = {
            'dtype': int(core.VarDesc.VarType.INT32),
            "return_index": True,
            "return_inverse": True,
            "return_counts": True,
            "axis": None,
182
            "is_sorted": True,
183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208
        }
        self.outputs = {
            'Out': unique,
            'Indices': indices,
            "Index": inverse,
            "Counts": counts,
        }


class TestUniqueOpAxisNeg(TestUniqueOp):
    def init_config(self):
        self.inputs = {'X': np.random.random((6, 1, 8)).astype('float64')}
        unique, indices, inverse, counts = np.unique(
            self.inputs['X'],
            return_index=True,
            return_inverse=True,
            return_counts=True,
            axis=-1,
        )
        self.attrs = {
            'dtype': int(core.VarDesc.VarType.INT32),
            "return_index": True,
            "return_inverse": True,
            "return_counts": True,
            "axis": [-1],
            "is_sorted": True,
Z
Zhang Ting 已提交
209 210 211 212 213 214 215 216 217 218 219 220
        }
        self.outputs = {
            'Out': unique,
            'Indices': indices,
            "Index": inverse,
            "Counts": counts,
        }


class TestUniqueOpAxis1(TestUniqueOp):
    def init_config(self):
        self.inputs = {'X': np.random.random((3, 8, 8)).astype('float64')}
221 222 223 224 225 226 227
        unique, indices, inverse, counts = np.unique(
            self.inputs['X'],
            return_index=True,
            return_inverse=True,
            return_counts=True,
            axis=1,
        )
Z
Zhang Ting 已提交
228 229 230 231 232 233
        self.attrs = {
            'dtype': int(core.VarDesc.VarType.INT32),
            "return_index": True,
            "return_inverse": True,
            "return_counts": True,
            "axis": [1],
234
            "is_sorted": True,
Z
Zhang Ting 已提交
235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254
        }
        self.outputs = {
            'Out': unique,
            'Indices': indices,
            "Index": inverse,
            "Counts": counts,
        }


class TestUniqueAPI(unittest.TestCase):
    def test_dygraph_api_out(self):
        x_data = x_data = np.random.randint(0, 10, (120))
        x = paddle.to_tensor(x_data)
        out = paddle.unique(x)
        expected_out = np.unique(x_data)
        self.assertTrue((out.numpy() == expected_out).all(), True)

    def test_dygraph_api_attr(self):
        x_data = np.random.random((3, 5, 5)).astype("float32")
        x = paddle.to_tensor(x_data)
255 256 257 258 259 260 261 262 263 264 265 266 267 268
        out, index, inverse, counts = paddle.unique(
            x,
            return_index=True,
            return_inverse=True,
            return_counts=True,
            axis=0,
        )
        np_out, np_index, np_inverse, np_counts = np.unique(
            x_data,
            return_index=True,
            return_inverse=True,
            return_counts=True,
            axis=0,
        )
Z
Zhang Ting 已提交
269 270 271 272 273
        self.assertTrue((out.numpy() == np_out).all(), True)
        self.assertTrue((index.numpy() == np_index).all(), True)
        self.assertTrue((inverse.numpy() == np_inverse).all(), True)
        self.assertTrue((counts.numpy() == np_counts).all(), True)

Z
Zhang Ting 已提交
274 275 276
    def test_dygraph_attr_dtype(self):
        x_data = x_data = np.random.randint(0, 10, (120))
        x = paddle.to_tensor(x_data)
277 278 279 280 281 282 283
        out, indices, inverse, counts = paddle.unique(
            x,
            return_index=True,
            return_inverse=True,
            return_counts=True,
            dtype="int32",
        )
Z
Zhang Ting 已提交
284
        expected_out, np_indices, np_inverse, np_counts = np.unique(
285 286
            x_data, return_index=True, return_inverse=True, return_counts=True
        )
Z
Zhang Ting 已提交
287 288 289 290 291
        self.assertTrue((out.numpy() == expected_out).all(), True)
        self.assertTrue((indices.numpy() == np_indices).all(), True)
        self.assertTrue((inverse.numpy() == np_inverse).all(), True)
        self.assertTrue((counts.numpy() == np_counts).all(), True)

Z
Zhang Ting 已提交
292
    def test_static_graph(self):
293
        paddle.enable_static()
294 295 296
        with paddle.static.program_guard(
            paddle.static.Program(), paddle.static.Program()
        ):
297
            x = paddle.static.data(name='x', shape=[3, 2], dtype='float64')
298 299 300
            unique, inverse, counts = paddle.unique(
                x, return_inverse=True, return_counts=True, axis=0
            )
Z
Zhang Ting 已提交
301 302 303
            place = paddle.CPUPlace()
            exe = paddle.static.Executor(place)
            x_np = np.array([[1, 2], [3, 4], [1, 2]]).astype('float64')
304 305 306 307 308 309
            result = exe.run(
                feed={"x": x_np}, fetch_list=[unique, inverse, counts]
            )
        np_unique, np_inverse, np_counts = np.unique(
            x_np, return_inverse=True, return_counts=True, axis=0
        )
310 311 312
        np.testing.assert_allclose(result[0], np_unique, rtol=1e-05)
        np.testing.assert_allclose(result[1], np_inverse, rtol=1e-05)
        np.testing.assert_allclose(result[2], np_counts, rtol=1e-05)
313
        paddle.disable_static()
Z
Zhang Ting 已提交
314 315 316 317 318


class TestUniqueError(unittest.TestCase):
    def test_input_dtype(self):
        def test_x_dtype():
319 320 321
            with paddle.static.program_guard(
                paddle.static.Program(), paddle.static.Program()
            ):
322 323 324
                x = paddle.static.data(
                    name='x', shape=[10, 10], dtype='float16'
                )
Z
Zhang Ting 已提交
325 326 327 328 329
                result = paddle.unique(x)

            self.assertRaises(TypeError, test_x_dtype)

    def test_attr(self):
330
        paddle.enable_static()
331
        x = paddle.static.data(name='x', shape=[10, 10], dtype='float64')
Z
Zhang Ting 已提交
332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350

        def test_return_index():
            result = paddle.unique(x, return_index=0)

        self.assertRaises(TypeError, test_return_index)

        def test_return_inverse():
            result = paddle.unique(x, return_inverse='s')

        self.assertRaises(TypeError, test_return_inverse)

        def test_return_counts():
            result = paddle.unique(x, return_counts=3)

        self.assertRaises(TypeError, test_return_counts)

        def test_axis():
            result = paddle.unique(x, axis='12')

Z
Zhang Ting 已提交
351 352 353
        def test_dtype():
            result = paddle.unique(x, dtype='float64')

Z
Zhang Ting 已提交
354
        self.assertRaises(TypeError, test_axis)
355
        paddle.disable_static()
Z
Zhang Ting 已提交
356 357


Z
zhoukunsheng 已提交
358 359
if __name__ == "__main__":
    unittest.main()