test_compare_op.py 19.3 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
import op_test
Y
Yu Yang 已提交
16 17
import unittest
import numpy
18 19
import numpy as np
import paddle
20
import paddle.fluid as fluid
W
wawltor 已提交
21
import paddle.fluid.core as core
22
from paddle.fluid import Program, program_guard
Y
Yu Yang 已提交
23 24 25 26 27 28 29 30


def create_test_class(op_type, typename, callback):
    class Cls(op_test.OpTest):
        def setUp(self):
            a = numpy.random.random(size=(10, 7)).astype(typename)
            b = numpy.random.random(size=(10, 7)).astype(typename)
            c = callback(a, b)
H
hong 已提交
31
            self.python_api = eval("paddle." + op_type)
Y
Yu Yang 已提交
32 33 34 35 36
            self.inputs = {'X': a, 'Y': b}
            self.outputs = {'Out': c}
            self.op_type = op_type

        def test_output(self):
H
hong 已提交
37
            self.check_output(check_eager=False)
Y
Yu Yang 已提交
38

39
        def test_errors(self):
40
            paddle.enable_static()
41 42 43 44 45
            with program_guard(Program(), Program()):
                x = fluid.layers.data(name='x', shape=[2], dtype='int32')
                y = fluid.layers.data(name='y', shape=[2], dtype='int32')
                a = fluid.layers.data(name='a', shape=[2], dtype='int16')
                if self.op_type == "less_than":
46 47 48
                    self.assertRaises(
                        TypeError, fluid.layers.less_than, x=x, y=y, force_cpu=1
                    )
49 50 51 52 53
                op = eval("fluid.layers.%s" % self.op_type)
                self.assertRaises(TypeError, op, x=x, y=y, cond=1)
                self.assertRaises(TypeError, op, x=x, y=a)
                self.assertRaises(TypeError, op, x=a, y=y)

Y
Yu Yang 已提交
54 55 56 57 58
    cls_name = "{0}_{1}".format(op_type, typename)
    Cls.__name__ = cls_name
    globals()[cls_name] = Cls


59
for _type_name in {'float32', 'float64', 'int32', 'int64', 'float16'}:
F
furnace 已提交
60 61
    if _type_name == 'float64' and core.is_compiled_with_rocm():
        _type_name = 'float32'
62 63
    if _type_name == 'float16' and (not core.is_compiled_with_cuda()):
        continue
F
furnace 已提交
64

Y
Yu Yang 已提交
65
    create_test_class('less_than', _type_name, lambda _a, _b: _a < _b)
66
    create_test_class('less_equal', _type_name, lambda _a, _b: _a <= _b)
Q
qiaolongfei 已提交
67 68
    create_test_class('greater_than', _type_name, lambda _a, _b: _a > _b)
    create_test_class('greater_equal', _type_name, lambda _a, _b: _a >= _b)
Y
Yu Yang 已提交
69
    create_test_class('equal', _type_name, lambda _a, _b: _a == _b)
Q
qiaolongfei 已提交
70
    create_test_class('not_equal', _type_name, lambda _a, _b: _a != _b)
Y
Yu Yang 已提交
71

72

W
wawltor 已提交
73 74 75 76
def create_paddle_case(op_type, callback):
    class PaddleCls(unittest.TestCase):
        def setUp(self):
            self.op_type = op_type
77 78
            self.input_x = np.array([1, 2, 3, 4]).astype(np.int64)
            self.input_y = np.array([1, 3, 2, 4]).astype(np.int64)
W
wawltor 已提交
79
            self.real_result = callback(self.input_x, self.input_y)
80 81 82
            self.place = fluid.CPUPlace()
            if core.is_compiled_with_cuda():
                self.place = paddle.CUDAPlace(0)
W
wawltor 已提交
83 84

        def test_api(self):
85
            paddle.enable_static()
W
wawltor 已提交
86
            with program_guard(Program(), Program()):
87 88
                x = fluid.data(name='x', shape=[4], dtype='int64')
                y = fluid.data(name='y', shape=[4], dtype='int64')
W
wawltor 已提交
89 90
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, y)
91
                exe = fluid.Executor(self.place)
92 93 94 95
                (res,) = exe.run(
                    feed={"x": self.input_x, "y": self.input_y},
                    fetch_list=[out],
                )
W
wawltor 已提交
96 97
            self.assertEqual((res == self.real_result).all(), True)

98 99 100 101 102 103 104 105 106
        def test_api_float(self):
            if self.op_type == "equal":
                paddle.enable_static()
                with program_guard(Program(), Program()):
                    x = fluid.data(name='x', shape=[4], dtype='int64')
                    y = fluid.data(name='y', shape=[1], dtype='int64')
                    op = eval("paddle.%s" % (self.op_type))
                    out = op(x, y)
                    exe = fluid.Executor(self.place)
107 108 109
                    (res,) = exe.run(
                        feed={"x": self.input_x, "y": 1.0}, fetch_list=[out]
                    )
110 111 112
                self.real_result = np.array([1, 0, 0, 0]).astype(np.int64)
                self.assertEqual((res == self.real_result).all(), True)

113 114 115 116 117 118 119 120 121
        def test_dynamic_api(self):
            paddle.disable_static()
            x = paddle.to_tensor(self.input_x)
            y = paddle.to_tensor(self.input_y)
            op = eval("paddle.%s" % (self.op_type))
            out = op(x, y)
            self.assertEqual((out.numpy() == self.real_result).all(), True)
            paddle.enable_static()

122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141
        def test_dynamic_api_int(self):
            if self.op_type == "equal":
                paddle.disable_static()
                x = paddle.to_tensor(self.input_x)
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, 1)
                self.real_result = np.array([1, 0, 0, 0]).astype(np.int64)
                self.assertEqual((out.numpy() == self.real_result).all(), True)
                paddle.enable_static()

        def test_dynamic_api_float(self):
            if self.op_type == "equal":
                paddle.disable_static()
                x = paddle.to_tensor(self.input_x)
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, 1.0)
                self.real_result = np.array([1, 0, 0, 0]).astype(np.int64)
                self.assertEqual((out.numpy() == self.real_result).all(), True)
                paddle.enable_static()

142 143 144 145 146 147 148 149 150 151 152 153
        def test_dynamic_api_inf_1(self):
            if self.op_type == "equal":
                paddle.disable_static()
                x1 = np.array([1, float('inf'), float('inf')]).astype(np.int64)
                x = paddle.to_tensor(x1)
                y1 = np.array([1, float('-inf'), float('inf')]).astype(np.int64)
                y = paddle.to_tensor(y1)
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, y)
                self.real_result = (x1 == y1).astype(np.int64)
                self.assertEqual(
                    (out.numpy().astype(np.int64) == self.real_result).all(),
154 155
                    True,
                )
156 157 158 159 160
                paddle.enable_static()

        def test_dynamic_api_inf_2(self):
            if self.op_type == "equal":
                paddle.disable_static()
161 162 163
                x1 = np.array([1, float('inf'), float('inf')]).astype(
                    np.float32
                )
164
                x = paddle.to_tensor(x1)
165 166 167
                y1 = np.array([1, float('-inf'), float('inf')]).astype(
                    np.float32
                )
168 169 170 171 172 173
                y = paddle.to_tensor(y1)
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, y)
                self.real_result = (x1 == y1).astype(np.int64)
                self.assertEqual(
                    (out.numpy().astype(np.int64) == self.real_result).all(),
174 175
                    True,
                )
176 177 178 179 180
                paddle.enable_static()

        def test_dynamic_api_inf_3(self):
            if self.op_type == "equal":
                paddle.disable_static()
181 182 183
                x1 = np.array([1, float('inf'), float('-inf')]).astype(
                    np.float32
                )
184 185 186 187 188 189 190 191
                x = paddle.to_tensor(x1)
                y1 = np.array([1, 2, 3]).astype(np.float32)
                y = paddle.to_tensor(y1)
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, y)
                self.real_result = (x1 == y1).astype(np.int64)
                self.assertEqual(
                    (out.numpy().astype(np.int64) == self.real_result).all(),
192 193
                    True,
                )
194 195 196 197 198 199 200 201 202 203 204 205 206 207
                paddle.enable_static()

        def test_dynamic_api_nan_1(self):
            if self.op_type == "equal":
                paddle.disable_static()
                x1 = np.array([1, float('nan'), float('nan')]).astype(np.int64)
                x = paddle.to_tensor(x1)
                y1 = np.array([1, float('-nan'), float('nan')]).astype(np.int64)
                y = paddle.to_tensor(y1)
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, y)
                self.real_result = (x1 == y1).astype(np.int64)
                self.assertEqual(
                    (out.numpy().astype(np.int64) == self.real_result).all(),
208 209
                    True,
                )
210 211 212 213 214
                paddle.enable_static()

        def test_dynamic_api_nan_2(self):
            if self.op_type == "equal":
                paddle.disable_static()
215 216 217
                x1 = np.array([1, float('nan'), float('nan')]).astype(
                    np.float32
                )
218
                x = paddle.to_tensor(x1)
219 220 221
                y1 = np.array([1, float('-nan'), float('nan')]).astype(
                    np.float32
                )
222 223 224 225 226 227
                y = paddle.to_tensor(y1)
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, y)
                self.real_result = (x1 == y1).astype(np.int64)
                self.assertEqual(
                    (out.numpy().astype(np.int64) == self.real_result).all(),
228 229
                    True,
                )
230 231 232 233 234
                paddle.enable_static()

        def test_dynamic_api_nan_3(self):
            if self.op_type == "equal":
                paddle.disable_static()
235 236 237
                x1 = np.array([1, float('-nan'), float('nan')]).astype(
                    np.float32
                )
238 239 240 241 242 243 244 245
                x = paddle.to_tensor(x1)
                y1 = np.array([1, 2, 1]).astype(np.float32)
                y = paddle.to_tensor(y1)
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, y)
                self.real_result = (x1 == y1).astype(np.int64)
                self.assertEqual(
                    (out.numpy().astype(np.int64) == self.real_result).all(),
246 247
                    True,
                )
248 249
                paddle.enable_static()

Z
Zhang Ting 已提交
250 251 252
        def test_not_equal(self):
            if self.op_type == "not_equal":
                paddle.disable_static()
253 254 255 256 257 258
                x = paddle.to_tensor(
                    np.array([1.2e-8, 2, 2, 1]), dtype="float32"
                )
                y = paddle.to_tensor(
                    np.array([1.1e-8, 2, 2, 1]), dtype="float32"
                )
Z
Zhang Ting 已提交
259 260 261 262 263 264
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, y)
                self.real_result = np.array([0, 0, 0, 0]).astype(np.int64)
                self.assertEqual((out.numpy() == self.real_result).all(), True)
                paddle.enable_static()

265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285
        def test_assert(self):
            def test_dynamic_api_string(self):
                if self.op_type == "equal":
                    paddle.disable_static()
                    x = paddle.to_tensor(self.input_x)
                    op = eval("paddle.%s" % (self.op_type))
                    out = op(x, "1.0")
                    paddle.enable_static()

            self.assertRaises(TypeError, test_dynamic_api_string)

        def test_dynamic_api_bool(self):
            if self.op_type == "equal":
                paddle.disable_static()
                x = paddle.to_tensor(self.input_x)
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, True)
                self.real_result = np.array([1, 0, 0, 0]).astype(np.int64)
                self.assertEqual((out.numpy() == self.real_result).all(), True)
                paddle.enable_static()

286
        def test_broadcast_api_1(self):
287
            paddle.enable_static()
288
            with program_guard(Program(), Program()):
289 290 291
                x = paddle.static.data(
                    name='x', shape=[1, 2, 1, 3], dtype='int32'
                )
292
                y = paddle.static.data(name='y', shape=[1, 2, 3], dtype='int32')
293 294
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, y)
295
                exe = paddle.static.Executor(self.place)
296 297 298
                input_x = np.arange(1, 7).reshape((1, 2, 1, 3)).astype(np.int32)
                input_y = np.arange(0, 6).reshape((1, 2, 3)).astype(np.int32)
                real_result = callback(input_x, input_y)
299 300 301
                (res,) = exe.run(
                    feed={"x": input_x, "y": input_y}, fetch_list=[out]
                )
302 303
            self.assertEqual((res == real_result).all(), True)

304 305 306 307
        def test_broadcast_api_2(self):
            paddle.enable_static()
            with program_guard(Program(), Program()):
                x = paddle.static.data(name='x', shape=[1, 2, 3], dtype='int32')
308 309 310
                y = paddle.static.data(
                    name='y', shape=[1, 2, 1, 3], dtype='int32'
                )
311 312 313 314 315 316
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, y)
                exe = paddle.static.Executor(self.place)
                input_x = np.arange(0, 6).reshape((1, 2, 3)).astype(np.int32)
                input_y = np.arange(1, 7).reshape((1, 2, 1, 3)).astype(np.int32)
                real_result = callback(input_x, input_y)
317 318 319
                (res,) = exe.run(
                    feed={"x": input_x, "y": input_y}, fetch_list=[out]
                )
320 321
            self.assertEqual((res == real_result).all(), True)

322 323 324 325 326 327 328 329 330 331 332
        def test_broadcast_api_3(self):
            paddle.enable_static()
            with program_guard(Program(), Program()):
                x = paddle.static.data(name='x', shape=[5], dtype='int32')
                y = paddle.static.data(name='y', shape=[3, 1], dtype='int32')
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, y)
                exe = paddle.static.Executor(self.place)
                input_x = np.arange(0, 5).reshape((5)).astype(np.int32)
                input_y = np.array([5, 3, 2]).reshape((3, 1)).astype(np.int32)
                real_result = callback(input_x, input_y)
333 334 335
                (res,) = exe.run(
                    feed={"x": input_x, "y": input_y}, fetch_list=[out]
                )
336 337
            self.assertEqual((res == real_result).all(), True)

338 339 340 341 342 343 344 345
        def test_bool_api_4(self):
            paddle.enable_static()
            with program_guard(Program(), Program()):
                x = paddle.static.data(name='x', shape=[3, 1], dtype='bool')
                y = paddle.static.data(name='y', shape=[3, 1], dtype='bool')
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, y)
                exe = paddle.static.Executor(self.place)
346 347
                input_x = np.array([True, False, True]).astype(np.bool_)
                input_y = np.array([True, True, False]).astype(np.bool_)
348
                real_result = callback(input_x, input_y)
349 350 351
                (res,) = exe.run(
                    feed={"x": input_x, "y": input_y}, fetch_list=[out]
                )
352 353 354 355 356 357 358 359 360 361
            self.assertEqual((res == real_result).all(), True)

        def test_bool_broadcast_api_4(self):
            paddle.enable_static()
            with program_guard(Program(), Program()):
                x = paddle.static.data(name='x', shape=[3, 1], dtype='bool')
                y = paddle.static.data(name='y', shape=[1], dtype='bool')
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, y)
                exe = paddle.static.Executor(self.place)
362 363
                input_x = np.array([True, False, True]).astype(np.bool_)
                input_y = np.array([True]).astype(np.bool_)
364
                real_result = callback(input_x, input_y)
365 366 367
                (res,) = exe.run(
                    feed={"x": input_x, "y": input_y}, fetch_list=[out]
                )
368 369
            self.assertEqual((res == real_result).all(), True)

W
wawltor 已提交
370
        def test_attr_name(self):
371
            paddle.enable_static()
W
wawltor 已提交
372 373 374 375 376 377 378 379 380 381 382 383
            with program_guard(Program(), Program()):
                x = fluid.layers.data(name='x', shape=[4], dtype='int32')
                y = fluid.layers.data(name='y', shape=[4], dtype='int32')
                op = eval("paddle.%s" % (self.op_type))
                out = op(x=x, y=y, name="name_%s" % (self.op_type))
            self.assertEqual("name_%s" % (self.op_type) in out.name, True)

    cls_name = "TestCase_{}".format(op_type)
    PaddleCls.__name__ = cls_name
    globals()[cls_name] = PaddleCls


384
create_paddle_case('less_than', lambda _a, _b: _a < _b)
W
wawltor 已提交
385 386 387 388 389 390 391
create_paddle_case('less_equal', lambda _a, _b: _a <= _b)
create_paddle_case('greater_than', lambda _a, _b: _a > _b)
create_paddle_case('greater_equal', lambda _a, _b: _a >= _b)
create_paddle_case('equal', lambda _a, _b: _a == _b)
create_paddle_case('not_equal', lambda _a, _b: _a != _b)


392
class TestCompareOpError(unittest.TestCase):
393
    def test_errors(self):
394
        paddle.enable_static()
395 396 397
        with program_guard(Program(), Program()):
            # The input x and y of compare_op must be Variable.
            x = fluid.layers.data(name='x', shape=[1], dtype="float32")
398 399 400
            y = fluid.create_lod_tensor(
                numpy.array([[-1]]), [[1]], fluid.CPUPlace()
            )
401 402 403
            self.assertRaises(TypeError, fluid.layers.greater_equal, x, y)


404 405
class API_TestElementwise_Equal(unittest.TestCase):
    def test_api(self):
406
        paddle.enable_static()
407 408 409
        with fluid.program_guard(fluid.Program(), fluid.Program()):
            label = fluid.layers.assign(np.array([3, 3], dtype="int32"))
            limit = fluid.layers.assign(np.array([3, 2], dtype="int32"))
W
wawltor 已提交
410
            out = paddle.equal(x=label, y=limit)
411 412
            place = fluid.CPUPlace()
            exe = fluid.Executor(place)
413
            (res,) = exe.run(fetch_list=[out])
414 415 416 417 418
        self.assertEqual((res == np.array([True, False])).all(), True)

        with fluid.program_guard(fluid.Program(), fluid.Program()):
            label = fluid.layers.assign(np.array([3, 3], dtype="int32"))
            limit = fluid.layers.assign(np.array([3, 3], dtype="int32"))
W
wawltor 已提交
419
            out = paddle.equal(x=label, y=limit)
420 421
            place = fluid.CPUPlace()
            exe = fluid.Executor(place)
422
            (res,) = exe.run(fetch_list=[out])
423 424 425
        self.assertEqual((res == np.array([True, True])).all(), True)


426 427 428 429 430 431 432 433 434 435
class TestCompareOpPlace(unittest.TestCase):
    def test_place_1(self):
        paddle.enable_static()
        place = paddle.CPUPlace()
        if core.is_compiled_with_cuda():
            place = paddle.CUDAPlace(0)
        label = fluid.layers.assign(np.array([3, 3], dtype="int32"))
        limit = fluid.layers.assign(np.array([3, 2], dtype="int32"))
        out = fluid.layers.less_than(label, limit, force_cpu=True)
        exe = fluid.Executor(place)
436
        (res,) = exe.run(fetch_list=[out])
437 438 439 440 441 442 443 444 445 446 447 448 449 450 451
        self.assertEqual((res == np.array([False, False])).all(), True)

    def test_place_2(self):
        place = paddle.CPUPlace()
        data_place = place
        if core.is_compiled_with_cuda():
            place = paddle.CUDAPlace(0)
            data_place = paddle.CUDAPinnedPlace()
        paddle.disable_static(place)
        data = np.array([9], dtype="int64")
        data_tensor = paddle.to_tensor(data, place=data_place)
        result = data_tensor == 0
        self.assertEqual((result.numpy() == np.array([False])).all(), True)


Y
Yu Yang 已提交
452
if __name__ == '__main__':
H
hong 已提交
453
    paddle.enable_static()
Y
Yu Yang 已提交
454
    unittest.main()