test_where_op.py 15.4 KB
Newer Older
1
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
2
#
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
6
#
7
#     http://www.apache.org/licenses/LICENSE-2.0
8
#
9 10 11 12 13 14 15
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest
16

17
import numpy as np
18 19
from op_test import OpTest

G
GaoWei8 已提交
20
import paddle
21
import paddle.fluid as fluid
22
from paddle.fluid import Program, program_guard
23 24 25 26 27
from paddle.fluid.backward import append_backward


class TestWhereOp(OpTest):
    def setUp(self):
28
        self.op_type = 'where'
H
hong 已提交
29
        self.python_api = paddle.where
30 31 32 33 34
        self.init_config()
        self.inputs = {'Condition': self.cond, 'X': self.x, 'Y': self.y}
        self.outputs = {'Out': np.where(self.cond, self.x, self.y)}

    def test_check_output(self):
35
        self.check_output(check_eager=False)
36 37

    def test_check_grad(self):
38
        self.check_grad(['X', 'Y'], 'Out', check_eager=False)
39 40

    def init_config(self):
41 42 43
        self.x = np.random.uniform((-3), 5, 100).astype('float64')
        self.y = np.random.uniform((-3), 5, 100).astype('float64')
        self.cond = np.zeros(100).astype('bool')
44 45 46 47


class TestWhereOp2(TestWhereOp):
    def init_config(self):
48 49 50
        self.x = np.random.uniform((-5), 5, (60, 2)).astype('float64')
        self.y = np.random.uniform((-5), 5, (60, 2)).astype('float64')
        self.cond = np.ones((60, 2)).astype('bool')
51 52 53 54


class TestWhereOp3(TestWhereOp):
    def init_config(self):
55 56
        self.x = np.random.uniform((-3), 5, (20, 2, 4)).astype('float64')
        self.y = np.random.uniform((-3), 5, (20, 2, 4)).astype('float64')
57 58 59 60
        self.cond = np.array(np.random.randint(2, size=(20, 2, 4)), dtype=bool)


class TestWhereAPI(unittest.TestCase):
G
GaoWei8 已提交
61 62
    def setUp(self):
        self.init_data()
63

G
GaoWei8 已提交
64 65 66
    def init_data(self):
        self.shape = [10, 15]
        self.cond = np.array(np.random.randint(2, size=self.shape), dtype=bool)
67 68
        self.x = np.random.uniform((-2), 3, self.shape).astype(np.float32)
        self.y = np.random.uniform((-2), 3, self.shape).astype(np.float32)
G
GaoWei8 已提交
69
        self.out = np.where(self.cond, self.x, self.y)
70

G
GaoWei8 已提交
71
    def ref_x_backward(self, dout):
72
        return np.where(self.cond, dout, 0)
G
GaoWei8 已提交
73 74

    def ref_y_backward(self, dout):
75
        return np.where(~self.cond, dout, 0)
G
GaoWei8 已提交
76 77 78 79 80

    def test_api(self, use_cuda=False):
        for x_stop_gradient in [False, True]:
            for y_stop_gradient in [False, True]:
                with fluid.program_guard(Program(), Program()):
81 82 83 84 85 86 87 88 89
                    cond = fluid.layers.data(
                        name='cond', shape=self.shape, dtype='bool'
                    )
                    x = fluid.layers.data(
                        name='x', shape=self.shape, dtype='float32'
                    )
                    y = fluid.layers.data(
                        name='y', shape=self.shape, dtype='float32'
                    )
G
GaoWei8 已提交
90 91 92
                    x.stop_gradient = x_stop_gradient
                    y.stop_gradient = y_stop_gradient
                    result = paddle.where(cond, x, y)
93
                    append_backward(paddle.mean(result))
G
GaoWei8 已提交
94
                    for use_cuda in [False, True]:
95 96 97
                        if use_cuda and (
                            not fluid.core.is_compiled_with_cuda()
                        ):
G
GaoWei8 已提交
98
                            break
99 100 101
                        place = (
                            fluid.CUDAPlace(0) if use_cuda else fluid.CPUPlace()
                        )
G
GaoWei8 已提交
102 103
                        exe = fluid.Executor(place)
                        fetch_list = [result, result.grad_name]
104
                        if x_stop_gradient is False:
G
GaoWei8 已提交
105
                            fetch_list.append(x.grad_name)
106
                        if y_stop_gradient is False:
G
GaoWei8 已提交
107
                            fetch_list.append(y.grad_name)
108 109 110 111 112
                        out = exe.run(
                            fluid.default_main_program(),
                            feed={'cond': self.cond, 'x': self.x, 'y': self.y},
                            fetch_list=fetch_list,
                        )
G
GaoWei8 已提交
113
                        assert np.array_equal(out[0], self.out)
114 115 116 117 118
                        if x_stop_gradient is False:
                            assert np.array_equal(
                                out[2], self.ref_x_backward(out[1])
                            )
                            if y.stop_gradient is False:
G
GaoWei8 已提交
119
                                assert np.array_equal(
120 121 122 123 124 125
                                    out[3], self.ref_y_backward(out[1])
                                )
                        elif y.stop_gradient is False:
                            assert np.array_equal(
                                out[2], self.ref_y_backward(out[1])
                            )
126 127 128 129 130 131

    def test_api_broadcast(self, use_cuda=False):
        main_program = Program()
        with fluid.program_guard(main_program):
            x = fluid.layers.data(name='x', shape=[4, 1], dtype='float32')
            y = fluid.layers.data(name='y', shape=[4, 2], dtype='float32')
132
            x_i = np.array([[0.9383, 0.1983, 3.2, 1.2]]).astype('float32')
133 134 135
            y_i = np.array([[1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0]]).astype(
                'float32'
            )
136
            result = paddle.where((x > 1), x=x, y=y)
137
            for use_cuda in [False, True]:
138
                if use_cuda and (not fluid.core.is_compiled_with_cuda()):
139
                    return
140
                place = fluid.CUDAPlace(0) if use_cuda else fluid.CPUPlace()
141
                exe = fluid.Executor(place)
142 143 144 145 146
                out = exe.run(
                    fluid.default_main_program(),
                    feed={'x': x_i, 'y': y_i},
                    fetch_list=[result],
                )
147
                assert np.array_equal(out[0], np.where((x_i > 1), x_i, y_i))
148

R
ronnywang 已提交
149 150 151 152 153
    def test_scalar(self):
        paddle.enable_static()
        main_program = Program()
        with fluid.program_guard(main_program):
            cond_shape = [2, 4]
154 155 156
            cond = fluid.layers.data(
                name='cond', shape=cond_shape, dtype='bool'
            )
R
ronnywang 已提交
157 158 159 160 161
            x_data = 1.0
            y_data = 2.0
            cond_data = np.array([False, False, True, True]).astype('bool')
            result = paddle.where(condition=cond, x=x_data, y=y_data)
            for use_cuda in [False, True]:
162
                if use_cuda and (not fluid.core.is_compiled_with_cuda()):
R
ronnywang 已提交
163
                    return
164
                place = fluid.CUDAPlace(0) if use_cuda else fluid.CPUPlace()
R
ronnywang 已提交
165
                exe = fluid.Executor(place)
166 167 168 169 170
                out = exe.run(
                    fluid.default_main_program(),
                    feed={'cond': cond_data},
                    fetch_list=[result],
                )
R
ronnywang 已提交
171 172 173
                expect = np.where(cond_data, x_data, y_data)
                assert np.array_equal(out[0], expect)

174 175 176 177
    def __test_where_with_broadcast_static(self, cond_shape, x_shape, y_shape):
        paddle.enable_static()
        main_program = Program()
        with fluid.program_guard(main_program):
178 179 180
            cond = fluid.layers.data(
                name='cond', shape=cond_shape, dtype='bool'
            )
181 182
            x = fluid.layers.data(name='x', shape=x_shape, dtype='float32')
            y = fluid.layers.data(name='y', shape=y_shape, dtype='float32')
183
            cond_data_tmp = np.random.random(size=cond_shape).astype('float32')
184
            cond_data = cond_data_tmp < 0.3
185 186
            x_data = np.random.random(size=x_shape).astype('float32')
            y_data = np.random.random(size=y_shape).astype('float32')
187 188
            result = paddle.where(condition=cond, x=x, y=y)
            for use_cuda in [False, True]:
189
                if use_cuda and (not fluid.core.is_compiled_with_cuda()):
190
                    return
191
                place = fluid.CUDAPlace(0) if use_cuda else fluid.CPUPlace()
192
                exe = fluid.Executor(place)
193 194 195 196 197
                out = exe.run(
                    fluid.default_main_program(),
                    feed={'cond': cond_data, 'x': x_data, 'y': y_data},
                    fetch_list=[result],
                )
198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248
                expect = np.where(cond_data, x_data, y_data)
                assert np.array_equal(out[0], expect)

    def test_static_api_broadcast_1(self):
        cond_shape = [2, 4]
        a_shape = [2, 2, 4]
        b_shape = [2, 2, 4]
        self.__test_where_with_broadcast_static(cond_shape, a_shape, b_shape)

    def test_static_api_broadcast_2(self):
        cond_shape = [2, 1]
        a_shape = [2, 2, 4]
        b_shape = [2, 2, 4]
        self.__test_where_with_broadcast_static(cond_shape, a_shape, b_shape)

    def test_static_api_broadcast_3(self):
        cond_shape = [2, 2, 1]
        a_shape = [2, 2, 4]
        b_shape = [2, 2, 4]
        self.__test_where_with_broadcast_static(cond_shape, a_shape, b_shape)

    def test_static_api_broadcast_4(self):
        cond_shape = [2, 1, 4]
        a_shape = [2, 2, 4]
        b_shape = [2, 2, 4]
        self.__test_where_with_broadcast_static(cond_shape, a_shape, b_shape)

    def test_static_api_broadcast_5(self):
        cond_shape = [3, 2, 2, 4]
        a_shape = [2, 2, 4]
        b_shape = [2, 2, 4]
        self.__test_where_with_broadcast_static(cond_shape, a_shape, b_shape)

    def test_static_api_broadcast_6(self):
        cond_shape = [2, 2, 4]
        a_shape = [2, 2, 1]
        b_shape = [2, 2, 1]
        self.__test_where_with_broadcast_static(cond_shape, a_shape, b_shape)

    def test_static_api_broadcast_7(self):
        cond_shape = [2, 2, 4]
        a_shape = [2, 1, 4]
        b_shape = [2, 1, 4]
        self.__test_where_with_broadcast_static(cond_shape, a_shape, b_shape)

    def test_static_api_broadcast_8(self):
        cond_shape = [3, 2, 2, 4]
        a_shape = [2, 2, 1]
        b_shape = [2, 2, 1]
        self.__test_where_with_broadcast_static(cond_shape, a_shape, b_shape)

249 250 251 252

class TestWhereDygraphAPI(unittest.TestCase):
    def test_api(self):
        with fluid.dygraph.guard():
253 254 255
            x_i = np.array([0.9383, 0.1983, 3.2, 1.2]).astype('float64')
            y_i = np.array([1.0, 1.0, 1.0, 1.0]).astype('float64')
            cond_i = np.array([False, False, True, True]).astype('bool')
256 257 258
            x = fluid.dygraph.to_variable(x_i)
            y = fluid.dygraph.to_variable(y_i)
            cond = fluid.dygraph.to_variable(cond_i)
G
GaoWei8 已提交
259
            out = paddle.where(cond, x, y)
260 261
            assert np.array_equal(out.numpy(), np.where(cond_i, x_i, y_i))

R
ronnywang 已提交
262 263 264 265 266 267 268 269 270
    def test_scalar(self):
        with fluid.dygraph.guard():
            cond_i = np.array([False, False, True, True]).astype('bool')
            x = 1.0
            y = 2.0
            cond = fluid.dygraph.to_variable(cond_i)
            out = paddle.where(cond, x, y)
            assert np.array_equal(out.numpy(), np.where(cond_i, x, y))

271 272 273
    def __test_where_with_broadcast_dygraph(self, cond_shape, a_shape, b_shape):
        with fluid.dygraph.guard():
            cond_tmp = paddle.rand(cond_shape)
274
            cond = cond_tmp < 0.3
275 276 277 278 279
            a = paddle.rand(a_shape)
            b = paddle.rand(b_shape)
            result = paddle.where(cond, a, b)
            result = result.numpy()
            expect = np.where(cond, a, b)
280
            np.testing.assert_array_equal(expect, result)
281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329

    def test_dygraph_api_broadcast_1(self):
        cond_shape = [2, 4]
        a_shape = [2, 2, 4]
        b_shape = [2, 2, 4]
        self.__test_where_with_broadcast_dygraph(cond_shape, a_shape, b_shape)

    def test_dygraph_api_broadcast_2(self):
        cond_shape = [2, 1]
        a_shape = [2, 2, 4]
        b_shape = [2, 2, 4]
        self.__test_where_with_broadcast_dygraph(cond_shape, a_shape, b_shape)

    def test_dygraph_api_broadcast_3(self):
        cond_shape = [2, 2, 1]
        a_shape = [2, 2, 4]
        b_shape = [2, 2, 4]
        self.__test_where_with_broadcast_dygraph(cond_shape, a_shape, b_shape)

    def test_dygraph_api_broadcast_4(self):
        cond_shape = [2, 1, 4]
        a_shape = [2, 2, 4]
        b_shape = [2, 2, 4]
        self.__test_where_with_broadcast_dygraph(cond_shape, a_shape, b_shape)

    def test_dygraph_api_broadcast_5(self):
        cond_shape = [3, 2, 2, 4]
        a_shape = [2, 2, 4]
        b_shape = [2, 2, 4]
        self.__test_where_with_broadcast_dygraph(cond_shape, a_shape, b_shape)

    def test_dygraph_api_broadcast_6(self):
        cond_shape = [2, 2, 4]
        a_shape = [2, 2, 1]
        b_shape = [2, 2, 1]
        self.__test_where_with_broadcast_dygraph(cond_shape, a_shape, b_shape)

    def test_dygraph_api_broadcast_7(self):
        cond_shape = [2, 2, 4]
        a_shape = [2, 1, 4]
        b_shape = [2, 1, 4]
        self.__test_where_with_broadcast_dygraph(cond_shape, a_shape, b_shape)

    def test_dygraph_api_broadcast_8(self):
        cond_shape = [3, 2, 2, 4]
        a_shape = [2, 2, 1]
        b_shape = [2, 2, 1]
        self.__test_where_with_broadcast_dygraph(cond_shape, a_shape, b_shape)

R
ronnywang 已提交
330 331 332
    def test_where_condition(self):
        data = np.array([[True, False], [False, True]])
        with program_guard(Program(), Program()):
333
            x = fluid.layers.data(name='x', shape=[(-1), 2])
R
ronnywang 已提交
334 335 336 337 338
            y = paddle.where(x)
            self.assertEqual(type(y), tuple)
            self.assertEqual(len(y), 2)
            z = fluid.layers.concat(list(y), axis=1)
            exe = fluid.Executor(fluid.CPUPlace())
339 340 341
            (res,) = exe.run(
                feed={'x': data}, fetch_list=[z.name], return_numpy=False
            )
R
ronnywang 已提交
342
        expect_out = np.array([[0, 0], [1, 1]])
343
        np.testing.assert_allclose(expect_out, np.array(res), rtol=1e-05)
R
ronnywang 已提交
344 345
        data = np.array([True, True, False])
        with program_guard(Program(), Program()):
346
            x = fluid.layers.data(name='x', shape=[(-1)])
R
ronnywang 已提交
347 348 349 350 351
            y = paddle.where(x)
            self.assertEqual(type(y), tuple)
            self.assertEqual(len(y), 1)
            z = fluid.layers.concat(list(y), axis=1)
            exe = fluid.Executor(fluid.CPUPlace())
352 353 354
            (res,) = exe.run(
                feed={'x': data}, fetch_list=[z.name], return_numpy=False
            )
R
ronnywang 已提交
355
        expect_out = np.array([[0], [1]])
356
        np.testing.assert_allclose(expect_out, np.array(res), rtol=1e-05)
R
ronnywang 已提交
357

358 359 360 361

class TestWhereOpError(unittest.TestCase):
    def test_errors(self):
        with program_guard(Program(), Program()):
362 363 364
            x_i = np.array([0.9383, 0.1983, 3.2, 1.2]).astype('float64')
            y_i = np.array([1.0, 1.0, 1.0, 1.0]).astype('float64')
            cond_i = np.array([False, False, True, True]).astype('bool')
365 366

            def test_Variable():
G
GaoWei8 已提交
367
                paddle.where(cond_i, x_i, y_i)
368 369 370 371 372 373 374

            self.assertRaises(TypeError, test_Variable)

            def test_type():
                x = fluid.layers.data(name='x', shape=[4], dtype='bool')
                y = fluid.layers.data(name='y', shape=[4], dtype='float16')
                cond = fluid.layers.data(name='cond', shape=[4], dtype='int32')
G
GaoWei8 已提交
375
                paddle.where(cond, x, y)
376 377 378

            self.assertRaises(TypeError, test_type)

R
ronnywang 已提交
379 380 381 382
    def test_value_error(self):
        with fluid.dygraph.guard():
            cond_shape = [2, 2, 4]
            cond_tmp = paddle.rand(cond_shape)
383
            cond = cond_tmp < 0.3
R
ronnywang 已提交
384 385 386
            a = paddle.rand(cond_shape)
            self.assertRaises(ValueError, paddle.where, cond, a)

387

H
hong 已提交
388 389
if __name__ == "__main__":
    paddle.enable_static()
390
    unittest.main()