test_compare_op.py 15.5 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16
from __future__ import print_function

17
import op_test
Y
Yu Yang 已提交
18 19
import unittest
import numpy
20 21
import numpy as np
import paddle
22
import paddle.fluid as fluid
W
wawltor 已提交
23
import paddle.fluid.core as core
24
from paddle.fluid import Program, program_guard
Y
Yu Yang 已提交
25 26 27


def create_test_class(op_type, typename, callback):
28

Y
Yu Yang 已提交
29
    class Cls(op_test.OpTest):
30

Y
Yu Yang 已提交
31 32 33 34
        def setUp(self):
            a = numpy.random.random(size=(10, 7)).astype(typename)
            b = numpy.random.random(size=(10, 7)).astype(typename)
            c = callback(a, b)
H
hong 已提交
35
            self.python_api = eval("paddle." + op_type)
Y
Yu Yang 已提交
36 37 38 39 40
            self.inputs = {'X': a, 'Y': b}
            self.outputs = {'Out': c}
            self.op_type = op_type

        def test_output(self):
H
hong 已提交
41
            self.check_output(check_eager=False)
Y
Yu Yang 已提交
42

43
        def test_errors(self):
44
            paddle.enable_static()
45 46 47 48 49
            with program_guard(Program(), Program()):
                x = fluid.layers.data(name='x', shape=[2], dtype='int32')
                y = fluid.layers.data(name='y', shape=[2], dtype='int32')
                a = fluid.layers.data(name='a', shape=[2], dtype='int16')
                if self.op_type == "less_than":
50 51 52 53 54
                    self.assertRaises(TypeError,
                                      fluid.layers.less_than,
                                      x=x,
                                      y=y,
                                      force_cpu=1)
55 56 57 58 59
                op = eval("fluid.layers.%s" % self.op_type)
                self.assertRaises(TypeError, op, x=x, y=y, cond=1)
                self.assertRaises(TypeError, op, x=x, y=a)
                self.assertRaises(TypeError, op, x=a, y=y)

Y
Yu Yang 已提交
60 61 62 63 64
    cls_name = "{0}_{1}".format(op_type, typename)
    Cls.__name__ = cls_name
    globals()[cls_name] = Cls


65
for _type_name in {'float32', 'float64', 'int32', 'int64', 'float16'}:
F
furnace 已提交
66 67
    if _type_name == 'float64' and core.is_compiled_with_rocm():
        _type_name = 'float32'
68 69
    if _type_name == 'float16' and (not core.is_compiled_with_cuda()):
        continue
F
furnace 已提交
70

Y
Yu Yang 已提交
71
    create_test_class('less_than', _type_name, lambda _a, _b: _a < _b)
72
    create_test_class('less_equal', _type_name, lambda _a, _b: _a <= _b)
Q
qiaolongfei 已提交
73 74
    create_test_class('greater_than', _type_name, lambda _a, _b: _a > _b)
    create_test_class('greater_equal', _type_name, lambda _a, _b: _a >= _b)
Y
Yu Yang 已提交
75
    create_test_class('equal', _type_name, lambda _a, _b: _a == _b)
Q
qiaolongfei 已提交
76
    create_test_class('not_equal', _type_name, lambda _a, _b: _a != _b)
Y
Yu Yang 已提交
77

78

W
wawltor 已提交
79
def create_paddle_case(op_type, callback):
80

W
wawltor 已提交
81
    class PaddleCls(unittest.TestCase):
82

W
wawltor 已提交
83 84
        def setUp(self):
            self.op_type = op_type
85 86
            self.input_x = np.array([1, 2, 3, 4]).astype(np.int64)
            self.input_y = np.array([1, 3, 2, 4]).astype(np.int64)
W
wawltor 已提交
87
            self.real_result = callback(self.input_x, self.input_y)
88 89 90
            self.place = fluid.CPUPlace()
            if core.is_compiled_with_cuda():
                self.place = paddle.CUDAPlace(0)
W
wawltor 已提交
91 92

        def test_api(self):
93
            paddle.enable_static()
W
wawltor 已提交
94
            with program_guard(Program(), Program()):
95 96
                x = fluid.data(name='x', shape=[4], dtype='int64')
                y = fluid.data(name='y', shape=[4], dtype='int64')
W
wawltor 已提交
97 98
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, y)
99
                exe = fluid.Executor(self.place)
100 101 102 103
                res, = exe.run(feed={
                    "x": self.input_x,
                    "y": self.input_y
                },
W
wawltor 已提交
104 105 106
                               fetch_list=[out])
            self.assertEqual((res == self.real_result).all(), True)

107 108 109 110 111 112 113 114 115
        def test_api_float(self):
            if self.op_type == "equal":
                paddle.enable_static()
                with program_guard(Program(), Program()):
                    x = fluid.data(name='x', shape=[4], dtype='int64')
                    y = fluid.data(name='y', shape=[1], dtype='int64')
                    op = eval("paddle.%s" % (self.op_type))
                    out = op(x, y)
                    exe = fluid.Executor(self.place)
116 117 118 119
                    res, = exe.run(feed={
                        "x": self.input_x,
                        "y": 1.0
                    },
120 121 122 123
                                   fetch_list=[out])
                self.real_result = np.array([1, 0, 0, 0]).astype(np.int64)
                self.assertEqual((res == self.real_result).all(), True)

124 125 126 127 128 129 130 131 132
        def test_dynamic_api(self):
            paddle.disable_static()
            x = paddle.to_tensor(self.input_x)
            y = paddle.to_tensor(self.input_y)
            op = eval("paddle.%s" % (self.op_type))
            out = op(x, y)
            self.assertEqual((out.numpy() == self.real_result).all(), True)
            paddle.enable_static()

133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152
        def test_dynamic_api_int(self):
            if self.op_type == "equal":
                paddle.disable_static()
                x = paddle.to_tensor(self.input_x)
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, 1)
                self.real_result = np.array([1, 0, 0, 0]).astype(np.int64)
                self.assertEqual((out.numpy() == self.real_result).all(), True)
                paddle.enable_static()

        def test_dynamic_api_float(self):
            if self.op_type == "equal":
                paddle.disable_static()
                x = paddle.to_tensor(self.input_x)
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, 1.0)
                self.real_result = np.array([1, 0, 0, 0]).astype(np.int64)
                self.assertEqual((out.numpy() == self.real_result).all(), True)
                paddle.enable_static()

Z
Zhang Ting 已提交
153 154 155
        def test_not_equal(self):
            if self.op_type == "not_equal":
                paddle.disable_static()
156 157 158 159
                x = paddle.to_tensor(np.array([1.2e-8, 2, 2, 1]),
                                     dtype="float32")
                y = paddle.to_tensor(np.array([1.1e-8, 2, 2, 1]),
                                     dtype="float32")
Z
Zhang Ting 已提交
160 161 162 163 164 165
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, y)
                self.real_result = np.array([0, 0, 0, 0]).astype(np.int64)
                self.assertEqual((out.numpy() == self.real_result).all(), True)
                paddle.enable_static()

166
        def test_assert(self):
167

168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187
            def test_dynamic_api_string(self):
                if self.op_type == "equal":
                    paddle.disable_static()
                    x = paddle.to_tensor(self.input_x)
                    op = eval("paddle.%s" % (self.op_type))
                    out = op(x, "1.0")
                    paddle.enable_static()

            self.assertRaises(TypeError, test_dynamic_api_string)

        def test_dynamic_api_bool(self):
            if self.op_type == "equal":
                paddle.disable_static()
                x = paddle.to_tensor(self.input_x)
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, True)
                self.real_result = np.array([1, 0, 0, 0]).astype(np.int64)
                self.assertEqual((out.numpy() == self.real_result).all(), True)
                paddle.enable_static()

188
        def test_broadcast_api_1(self):
189
            paddle.enable_static()
190
            with program_guard(Program(), Program()):
191 192 193
                x = paddle.static.data(name='x',
                                       shape=[1, 2, 1, 3],
                                       dtype='int32')
194
                y = paddle.static.data(name='y', shape=[1, 2, 3], dtype='int32')
195 196
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, y)
197
                exe = paddle.static.Executor(self.place)
198 199 200
                input_x = np.arange(1, 7).reshape((1, 2, 1, 3)).astype(np.int32)
                input_y = np.arange(0, 6).reshape((1, 2, 3)).astype(np.int32)
                real_result = callback(input_x, input_y)
201 202 203 204
                res, = exe.run(feed={
                    "x": input_x,
                    "y": input_y
                },
205 206 207
                               fetch_list=[out])
            self.assertEqual((res == real_result).all(), True)

208 209 210 211
        def test_broadcast_api_2(self):
            paddle.enable_static()
            with program_guard(Program(), Program()):
                x = paddle.static.data(name='x', shape=[1, 2, 3], dtype='int32')
212 213 214
                y = paddle.static.data(name='y',
                                       shape=[1, 2, 1, 3],
                                       dtype='int32')
215 216 217 218 219 220
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, y)
                exe = paddle.static.Executor(self.place)
                input_x = np.arange(0, 6).reshape((1, 2, 3)).astype(np.int32)
                input_y = np.arange(1, 7).reshape((1, 2, 1, 3)).astype(np.int32)
                real_result = callback(input_x, input_y)
221 222 223 224
                res, = exe.run(feed={
                    "x": input_x,
                    "y": input_y
                },
225 226 227
                               fetch_list=[out])
            self.assertEqual((res == real_result).all(), True)

228 229 230 231 232 233 234 235 236 237 238
        def test_broadcast_api_3(self):
            paddle.enable_static()
            with program_guard(Program(), Program()):
                x = paddle.static.data(name='x', shape=[5], dtype='int32')
                y = paddle.static.data(name='y', shape=[3, 1], dtype='int32')
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, y)
                exe = paddle.static.Executor(self.place)
                input_x = np.arange(0, 5).reshape((5)).astype(np.int32)
                input_y = np.array([5, 3, 2]).reshape((3, 1)).astype(np.int32)
                real_result = callback(input_x, input_y)
239 240 241 242
                res, = exe.run(feed={
                    "x": input_x,
                    "y": input_y
                },
243 244 245
                               fetch_list=[out])
            self.assertEqual((res == real_result).all(), True)

246 247 248 249 250 251 252 253
        def test_bool_api_4(self):
            paddle.enable_static()
            with program_guard(Program(), Program()):
                x = paddle.static.data(name='x', shape=[3, 1], dtype='bool')
                y = paddle.static.data(name='y', shape=[3, 1], dtype='bool')
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, y)
                exe = paddle.static.Executor(self.place)
254 255
                input_x = np.array([True, False, True]).astype(np.bool_)
                input_y = np.array([True, True, False]).astype(np.bool_)
256
                real_result = callback(input_x, input_y)
257 258 259 260
                res, = exe.run(feed={
                    "x": input_x,
                    "y": input_y
                },
261 262 263 264 265 266 267 268 269 270 271
                               fetch_list=[out])
            self.assertEqual((res == real_result).all(), True)

        def test_bool_broadcast_api_4(self):
            paddle.enable_static()
            with program_guard(Program(), Program()):
                x = paddle.static.data(name='x', shape=[3, 1], dtype='bool')
                y = paddle.static.data(name='y', shape=[1], dtype='bool')
                op = eval("paddle.%s" % (self.op_type))
                out = op(x, y)
                exe = paddle.static.Executor(self.place)
272 273
                input_x = np.array([True, False, True]).astype(np.bool_)
                input_y = np.array([True]).astype(np.bool_)
274
                real_result = callback(input_x, input_y)
275 276 277 278
                res, = exe.run(feed={
                    "x": input_x,
                    "y": input_y
                },
279 280 281
                               fetch_list=[out])
            self.assertEqual((res == real_result).all(), True)

W
wawltor 已提交
282
        def test_attr_name(self):
283
            paddle.enable_static()
W
wawltor 已提交
284 285 286 287 288 289 290 291 292 293 294 295
            with program_guard(Program(), Program()):
                x = fluid.layers.data(name='x', shape=[4], dtype='int32')
                y = fluid.layers.data(name='y', shape=[4], dtype='int32')
                op = eval("paddle.%s" % (self.op_type))
                out = op(x=x, y=y, name="name_%s" % (self.op_type))
            self.assertEqual("name_%s" % (self.op_type) in out.name, True)

    cls_name = "TestCase_{}".format(op_type)
    PaddleCls.__name__ = cls_name
    globals()[cls_name] = PaddleCls


296
create_paddle_case('less_than', lambda _a, _b: _a < _b)
W
wawltor 已提交
297 298 299 300 301 302 303
create_paddle_case('less_equal', lambda _a, _b: _a <= _b)
create_paddle_case('greater_than', lambda _a, _b: _a > _b)
create_paddle_case('greater_equal', lambda _a, _b: _a >= _b)
create_paddle_case('equal', lambda _a, _b: _a == _b)
create_paddle_case('not_equal', lambda _a, _b: _a != _b)


304
class TestCompareOpError(unittest.TestCase):
305

306
    def test_errors(self):
307
        paddle.enable_static()
308 309 310
        with program_guard(Program(), Program()):
            # The input x and y of compare_op must be Variable.
            x = fluid.layers.data(name='x', shape=[1], dtype="float32")
311 312
            y = fluid.create_lod_tensor(numpy.array([[-1]]), [[1]],
                                        fluid.CPUPlace())
313 314 315
            self.assertRaises(TypeError, fluid.layers.greater_equal, x, y)


316
class API_TestElementwise_Equal(unittest.TestCase):
317

318
    def test_api(self):
319
        paddle.enable_static()
320 321 322
        with fluid.program_guard(fluid.Program(), fluid.Program()):
            label = fluid.layers.assign(np.array([3, 3], dtype="int32"))
            limit = fluid.layers.assign(np.array([3, 2], dtype="int32"))
W
wawltor 已提交
323
            out = paddle.equal(x=label, y=limit)
324 325 326 327 328 329 330 331
            place = fluid.CPUPlace()
            exe = fluid.Executor(place)
            res, = exe.run(fetch_list=[out])
        self.assertEqual((res == np.array([True, False])).all(), True)

        with fluid.program_guard(fluid.Program(), fluid.Program()):
            label = fluid.layers.assign(np.array([3, 3], dtype="int32"))
            limit = fluid.layers.assign(np.array([3, 3], dtype="int32"))
W
wawltor 已提交
332
            out = paddle.equal(x=label, y=limit)
333 334 335 336 337 338
            place = fluid.CPUPlace()
            exe = fluid.Executor(place)
            res, = exe.run(fetch_list=[out])
        self.assertEqual((res == np.array([True, True])).all(), True)


339
class TestCompareOpPlace(unittest.TestCase):
340

341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365
    def test_place_1(self):
        paddle.enable_static()
        place = paddle.CPUPlace()
        if core.is_compiled_with_cuda():
            place = paddle.CUDAPlace(0)
        label = fluid.layers.assign(np.array([3, 3], dtype="int32"))
        limit = fluid.layers.assign(np.array([3, 2], dtype="int32"))
        out = fluid.layers.less_than(label, limit, force_cpu=True)
        exe = fluid.Executor(place)
        res, = exe.run(fetch_list=[out])
        self.assertEqual((res == np.array([False, False])).all(), True)

    def test_place_2(self):
        place = paddle.CPUPlace()
        data_place = place
        if core.is_compiled_with_cuda():
            place = paddle.CUDAPlace(0)
            data_place = paddle.CUDAPinnedPlace()
        paddle.disable_static(place)
        data = np.array([9], dtype="int64")
        data_tensor = paddle.to_tensor(data, place=data_place)
        result = data_tensor == 0
        self.assertEqual((result.numpy() == np.array([False])).all(), True)


Y
Yu Yang 已提交
366
if __name__ == '__main__':
H
hong 已提交
367
    paddle.enable_static()
Y
Yu Yang 已提交
368
    unittest.main()