test_elementwise_mul_op.py 17.0 KB
Newer Older
1
#  Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6 7 8
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
9 10 11 12 13
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
14

15
import unittest
16

17
import numpy as np
18
from eager_op_test import OpTest, convert_float_to_uint16, skip_check_grad_ci
19

20
import paddle
21
from paddle.fluid import core
22 23


G
gongweibao 已提交
24
class ElementwiseMulOp(OpTest):
25 26 27
    def init_kernel_type(self):
        self.use_mkldnn = False

28 29
    def setUp(self):
        self.op_type = "elementwise_mul"
30 31
        self.prim_op_type = "prim"
        self.python_api = paddle.multiply
32
        self.public_python_api = paddle.multiply
33
        self.dtype = np.float64
34 35 36 37 38
        self.axis = -1
        self.init_dtype()
        self.init_input_output()
        self.init_kernel_type()
        self.init_axis()
39
        self.if_enable_cinn()
40

41
        self.inputs = {
42
            'X': OpTest.np_dtype_to_fluid_dtype(self.x),
43
            'Y': OpTest.np_dtype_to_fluid_dtype(self.y),
44
        }
45 46
        self.outputs = {'Out': self.out}
        self.attrs = {'axis': self.axis, 'use_mkldnn': self.use_mkldnn}
47 48

    def test_check_output(self):
49
        # TODO(wangzhongpu): support mkldnn op in dygraph mode
50
        self.check_output(check_dygraph=(not self.use_mkldnn))
51 52

    def test_check_grad_normal(self):
53
        # TODO(wangzhongpu): support mkldnn op in dygraph mode
54 55 56 57 58 59
        self.check_grad(
            ['X', 'Y'],
            'Out',
            check_dygraph=(not self.use_mkldnn),
            check_prim=True,
        )
60 61

    def test_check_grad_ingore_x(self):
62
        # TODO(wangzhongpu): support mkldnn op in dygraph mode
63 64 65 66
        self.check_grad(
            ['Y'],
            'Out',
            no_grad_set=set("X"),
67
            check_dygraph=(not self.use_mkldnn),
68
            check_prim=True,
69
        )
70 71

    def test_check_grad_ingore_y(self):
72
        # TODO(wangzhongpu): support mkldnn op in dygraph mode
73 74 75 76
        self.check_grad(
            ['X'],
            'Out',
            no_grad_set=set('Y'),
77
            check_dygraph=(not self.use_mkldnn),
78
            check_prim=True,
79
        )
80

81 82 83 84 85 86 87 88 89 90 91
    def init_input_output(self):
        self.x = np.random.uniform(0.1, 1, [13, 17]).astype(self.dtype)
        self.y = np.random.uniform(0.1, 1, [13, 17]).astype(self.dtype)
        self.out = np.multiply(self.x, self.y)

    def init_dtype(self):
        pass

    def init_axis(self):
        pass

92
    def if_enable_cinn(self):
93 94
        pass

95

96 97 98 99 100 101
class TestElementwiseMulOp_ZeroDim1(ElementwiseMulOp):
    def init_input_output(self):
        self.x = np.random.uniform(0.1, 1, []).astype(self.dtype)
        self.y = np.random.uniform(0.1, 1, []).astype(self.dtype)
        self.out = np.multiply(self.x, self.y)

102
    def if_enable_cinn(self):
103 104
        self.enable_cinn = False

105 106 107 108 109 110 111

class TestElementwiseMulOp_ZeroDim2(ElementwiseMulOp):
    def init_input_output(self):
        self.x = np.random.uniform(0.1, 1, [13, 17]).astype(self.dtype)
        self.y = np.random.uniform(0.1, 1, []).astype(self.dtype)
        self.out = np.multiply(self.x, self.y)

112
    def if_enable_cinn(self):
113 114
        self.enable_cinn = False

115 116 117 118 119 120 121

class TestElementwiseMulOp_ZeroDim3(ElementwiseMulOp):
    def init_input_output(self):
        self.x = np.random.uniform(0.1, 1, []).astype(self.dtype)
        self.y = np.random.uniform(0.1, 1, [13, 17]).astype(self.dtype)
        self.out = np.multiply(self.x, self.y)

122
    def if_enable_cinn(self):
123 124
        self.enable_cinn = False

125

126 127 128
class TestBF16ElementwiseMulOp(OpTest):
    def setUp(self):
        self.op_type = "elementwise_mul"
129 130
        self.prim_op_type = "prim"
        self.python_api = paddle.multiply
131
        self.public_python_api = paddle.multiply
132 133 134 135 136 137 138 139 140
        self.dtype = np.uint16

        self.x = np.random.uniform(0.1, 1, [13, 17]).astype(np.float32)
        self.y = np.random.uniform(0.1, 1, [13, 17]).astype(np.float32)
        self.out = np.multiply(self.x, self.y)

        self.axis = -1

        self.inputs = {
141 142 143 144 145 146
            'X': OpTest.np_dtype_to_fluid_dtype(
                convert_float_to_uint16(self.x)
            ),
            'Y': OpTest.np_dtype_to_fluid_dtype(
                convert_float_to_uint16(self.y)
            ),
147 148 149
        }
        self.outputs = {'Out': convert_float_to_uint16(self.out)}
        self.attrs = {'axis': self.axis, 'use_mkldnn': False}
150
        self.if_enable_cinn()
151 152 153 154 155

    def test_check_output(self):
        self.check_output()

    def test_check_grad_normal(self):
156
        self.check_grad(['X', 'Y'], 'Out', check_prim=True)
157 158

    def test_check_grad_ingore_x(self):
159 160 161 162 163 164
        self.check_grad(
            ['Y'],
            'Out',
            no_grad_set=set("X"),
            check_prim=True,
        )
165 166

    def test_check_grad_ingore_y(self):
167 168 169 170 171 172
        self.check_grad(
            ['X'],
            'Out',
            no_grad_set=set('Y'),
            check_prim=True,
        )
173

174
    def if_enable_cinn(self):
175
        self.enable_cinn = False
176 177


178
@skip_check_grad_ci(
179 180
    reason="[skip shape check] Use y_shape(1) to test broadcast."
)
181 182 183
class TestElementwiseMulOp_scalar(ElementwiseMulOp):
    def setUp(self):
        self.op_type = "elementwise_mul"
184 185
        self.prim_op_type = "prim"
        self.python_api = paddle.multiply
186
        self.public_python_api = paddle.multiply
187
        self.inputs = {
188
            'X': np.random.rand(10, 3, 4).astype(np.float64),
189
            'Y': np.random.rand(1).astype(np.float64),
190 191
        }
        self.outputs = {'Out': self.inputs['X'] * self.inputs['Y']}
192
        self.init_kernel_type()
193 194


G
gongweibao 已提交
195
class TestElementwiseMulOp_Vector(ElementwiseMulOp):
196 197
    def setUp(self):
        self.op_type = "elementwise_mul"
198 199
        self.prim_op_type = "prim"
        self.python_api = paddle.multiply
200
        self.public_python_api = paddle.multiply
201
        self.inputs = {
202 203
            'X': np.random.random((100,)).astype("float64"),
            'Y': np.random.random((100,)).astype("float64"),
204 205
        }
        self.outputs = {'Out': np.multiply(self.inputs['X'], self.inputs['Y'])}
206
        self.init_kernel_type()
207 208


209 210 211 212 213 214 215 216
class ElementwiseMulOp_broadcast(OpTest):
    def init_kernel_type(self):
        self.use_mkldnn = False

    def setUp(self):
        self.op_type = "elementwise_mul"
        self.prim_op_type = "prim"
        self.python_api = paddle.multiply
217
        self.public_python_api = paddle.multiply
218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281
        self.init_dtype()
        self.init_kernel_type()
        self.init_axis()
        self.init_input_attr_output()
        self.if_check_prim()
        self.if_check_dygraph()

    def test_check_output(self):
        self.check_output(
            check_dygraph=self.check_dygraph, check_prim=self.check_prim
        )

    def test_check_grad_normal(self):
        self.check_grad(
            ['X', 'Y'],
            'Out',
            check_dygraph=self.check_dygraph,
            check_prim=self.check_prim,
        )

    def test_check_grad_ingore_x(self):
        self.check_grad(
            ['Y'],
            'Out',
            no_grad_set=set("X"),
            check_dygraph=self.check_dygraph,
            check_prim=self.check_prim,
        )

    def test_check_grad_ingore_y(self):
        self.check_grad(
            ['X'],
            'Out',
            no_grad_set=set('Y'),
            check_dygraph=self.check_dygraph,
            check_prim=self.check_prim,
        )

    def init_input_attr_output(self):
        self.x = np.random.uniform(0.1, 1, [13, 17, 1]).astype(self.dtype)
        self.y = np.random.uniform(0.1, 1, [17, 17]).astype(self.dtype)
        self.out = np.multiply(self.x, self.y)
        self.inputs = {
            'X': OpTest.np_dtype_to_fluid_dtype(self.x),
            'Y': OpTest.np_dtype_to_fluid_dtype(self.y),
        }
        self.outputs = {'Out': self.out}
        self.attrs = {'axis': self.axis, 'use_mkldnn': self.use_mkldnn}

    def init_dtype(self):
        self.dtype = np.float64

    def init_axis(self):
        self.axis = -1

    def if_check_prim(self):
        self.check_prim = self.axis == -1

    def if_check_dygraph(self):
        self.check_dygraph = (not self.use_mkldnn) and (self.axis == -1)


class TestElementwiseMulOp_broadcast_0(ElementwiseMulOp_broadcast):
    def init_input_attr_output(self):
282 283 284
        self.x = np.random.rand(100, 2, 3).astype(self.dtype)
        self.y = np.random.rand(100).astype(self.dtype)
        self.out = self.x * self.y.reshape(100, 1, 1)
285 286 287 288 289 290
        self.inputs = {
            'X': OpTest.np_dtype_to_fluid_dtype(self.x),
            'Y': OpTest.np_dtype_to_fluid_dtype(self.y),
        }
        self.outputs = {'Out': self.out}
        self.attrs = {'axis': self.axis, 'use_mkldnn': self.use_mkldnn}
291

292 293
    def init_axis(self):
        self.axis = 0
294 295


296 297
class TestElementwiseMulOp_broadcast_1(ElementwiseMulOp_broadcast):
    def init_input_attr_output(self):
298
        self.inputs = {
299
            'X': np.random.rand(2, 100, 3).astype(np.float64),
300
            'Y': np.random.rand(100).astype(np.float64),
301 302
        }

303
        self.attrs = {'axis': self.axis}
304
        self.outputs = {
305
            'Out': self.inputs['X'] * self.inputs['Y'].reshape(1, 100, 1)
306
        }
307 308 309

    def init_axis(self):
        self.axis = 1
310 311


312 313
class TestElementwiseMulOp_broadcast_2(ElementwiseMulOp_broadcast):
    def init_input_attr_output(self):
314
        self.inputs = {
315
            'X': np.random.rand(2, 3, 100).astype(np.float64),
316
            'Y': np.random.rand(100).astype(np.float64),
317
        }
318
        self.attrs = {'axis': self.axis}
319
        self.outputs = {
320
            'Out': self.inputs['X'] * self.inputs['Y'].reshape(1, 1, 100)
321 322 323
        }


324 325
class TestElementwiseMulOp_broadcast_3(ElementwiseMulOp_broadcast):
    def init_input_attr_output(self):
326
        self.inputs = {
327
            'X': np.random.rand(2, 10, 12, 3).astype(np.float64),
328
            'Y': np.random.rand(10, 12).astype(np.float64),
329 330
        }

331
        self.attrs = {'axis': self.axis}
332
        self.outputs = {
333
            'Out': self.inputs['X'] * self.inputs['Y'].reshape(1, 10, 12, 1)
334
        }
335 336 337

    def init_axis(self):
        self.axis = 1
338 339


340 341
class TestElementwiseMulOp_broadcast_4(ElementwiseMulOp_broadcast):
    def init_input_attr_output(self):
342
        self.inputs = {
343
            'X': np.random.rand(10, 2, 11).astype(np.float64),
344
            'Y': np.random.rand(10, 1, 11).astype(np.float64),
345
        }
346
        self.attrs = {'axis': self.axis}
347 348 349
        self.outputs = {'Out': self.inputs['X'] * self.inputs['Y']}


350 351
class TestElementwiseMulOp_broadcast_5(ElementwiseMulOp_broadcast):
    def init_input_attr_output(self):
352
        self.inputs = {
353
            'X': np.random.rand(10, 4, 2, 3).astype(np.float64),
354
            'Y': np.random.rand(10, 4, 1, 3).astype(np.float64),
355
        }
356
        self.attrs = {'axis': self.axis}
357 358 359
        self.outputs = {'Out': self.inputs['X'] * self.inputs['Y']}


360 361 362
@unittest.skipIf(
    not core.is_compiled_with_cuda(), "core is not compiled with CUDA"
)
W
Wu Yi 已提交
363 364 365 366
class TestElementwiseMulOpFp16(ElementwiseMulOp):
    def init_dtype(self):
        self.dtype = np.float16

367
    def if_enable_cinn(self):
368
        pass
369

370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402
    def test_check_output(self):
        # TODO(wangzhongpu): support mkldnn op in dygraph mode
        self.check_output(check_dygraph=(not self.use_mkldnn))

    def test_check_grad_normal(self):
        # TODO(wangzhongpu): support mkldnn op in dygraph mode
        self.check_grad(
            ['X', 'Y'],
            'Out',
            check_dygraph=(not self.use_mkldnn),
            check_prim=True,
        )

    def test_check_grad_ingore_x(self):
        # TODO(wangzhongpu): support mkldnn op in dygraph mode
        self.check_grad(
            ['Y'],
            'Out',
            no_grad_set=set("X"),
            check_dygraph=(not self.use_mkldnn),
            check_prim=True,
        )

    def test_check_grad_ingore_y(self):
        # TODO(wangzhongpu): support mkldnn op in dygraph mode
        self.check_grad(
            ['X'],
            'Out',
            no_grad_set=set('Y'),
            check_dygraph=(not self.use_mkldnn),
            check_prim=True,
        )

W
Wu Yi 已提交
403

404 405 406
class TestElementwiseMulOp_commonuse_1(ElementwiseMulOp):
    def setUp(self):
        self.op_type = "elementwise_mul"
407 408
        self.prim_op_type = "prim"
        self.python_api = paddle.multiply
409
        self.public_python_api = paddle.multiply
410
        self.inputs = {
411
            'X': np.random.rand(2, 3, 100).astype(np.float64),
412
            'Y': np.random.rand(1, 1, 100).astype(np.float64),
413 414
        }
        self.outputs = {'Out': self.inputs['X'] * self.inputs['Y']}
415
        self.init_kernel_type()
416 417 418 419 420


class TestElementwiseMulOp_commonuse_2(ElementwiseMulOp):
    def setUp(self):
        self.op_type = "elementwise_mul"
421 422
        self.prim_op_type = "prim"
        self.python_api = paddle.multiply
423
        self.public_python_api = paddle.multiply
424
        self.inputs = {
425
            'X': np.random.rand(30, 3, 1, 5).astype(np.float64),
426
            'Y': np.random.rand(30, 1, 4, 1).astype(np.float64),
427 428
        }
        self.outputs = {'Out': self.inputs['X'] * self.inputs['Y']}
429
        self.init_kernel_type()
430 431 432 433 434


class TestElementwiseMulOp_xsize_lessthan_ysize(ElementwiseMulOp):
    def setUp(self):
        self.op_type = "elementwise_mul"
435 436
        self.prim_op_type = "prim"
        self.python_api = paddle.multiply
437
        self.public_python_api = paddle.multiply
438
        self.inputs = {
439
            'X': np.random.rand(10, 10).astype(np.float64),
440
            'Y': np.random.rand(2, 2, 10, 10).astype(np.float64),
441 442 443 444 445
        }

        self.attrs = {'axis': 2}

        self.outputs = {
446
            'Out': self.inputs['X'].reshape(1, 1, 10, 10) * self.inputs['Y']
447
        }
448
        self.init_kernel_type()
449 450


451 452 453
class TestComplexElementwiseMulOp(OpTest):
    def setUp(self):
        self.op_type = "elementwise_mul"
454
        self.python_api = paddle.multiply
455 456 457 458 459 460
        self.init_base_dtype()
        self.init_input_output()
        self.init_grad_input_output()

        self.inputs = {
            'X': OpTest.np_dtype_to_fluid_dtype(self.x),
461
            'Y': OpTest.np_dtype_to_fluid_dtype(self.y),
462 463 464 465 466 467 468 469
        }
        self.attrs = {'axis': -1, 'use_mkldnn': False}
        self.outputs = {'Out': self.out}

    def init_base_dtype(self):
        self.dtype = np.float64

    def init_input_output(self):
470 471 472 473 474 475
        self.x = np.random.random((2, 3, 4, 5)).astype(
            self.dtype
        ) + 1j * np.random.random((2, 3, 4, 5)).astype(self.dtype)
        self.y = np.random.random((2, 3, 4, 5)).astype(
            self.dtype
        ) + 1j * np.random.random((2, 3, 4, 5)).astype(self.dtype)
476 477 478
        self.out = self.x * self.y

    def init_grad_input_output(self):
479 480 481
        self.grad_out = np.ones((2, 3, 4, 5), self.dtype) + 1j * np.ones(
            (2, 3, 4, 5), self.dtype
        )
482 483 484 485 486 487 488
        self.grad_x = self.grad_out * np.conj(self.y)
        self.grad_y = self.grad_out * np.conj(self.x)

    def test_check_output(self):
        self.check_output()

    def test_check_grad_normal(self):
489 490 491 492 493 494
        self.check_grad(
            ['X', 'Y'],
            'Out',
            user_defined_grads=[self.grad_x, self.grad_y],
            user_defined_grad_outputs=[self.grad_out],
        )
495 496

    def test_check_grad_ingore_x(self):
497 498 499 500 501 502 503
        self.check_grad(
            ['Y'],
            'Out',
            no_grad_set=set("X"),
            user_defined_grads=[self.grad_y],
            user_defined_grad_outputs=[self.grad_out],
        )
504 505

    def test_check_grad_ingore_y(self):
506 507 508 509 510 511 512
        self.check_grad(
            ['X'],
            'Out',
            no_grad_set=set('Y'),
            user_defined_grads=[self.grad_x],
            user_defined_grad_outputs=[self.grad_out],
        )
513 514


C
chentianyu03 已提交
515 516 517
class TestRealComplexElementwiseMulOp(TestComplexElementwiseMulOp):
    def init_input_output(self):
        self.x = np.random.random((2, 3, 4, 5)).astype(self.dtype)
518 519 520
        self.y = np.random.random((2, 3, 4, 5)).astype(
            self.dtype
        ) + 1j * np.random.random((2, 3, 4, 5)).astype(self.dtype)
C
chentianyu03 已提交
521 522 523
        self.out = self.x * self.y

    def init_grad_input_output(self):
524 525 526
        self.grad_out = np.ones((2, 3, 4, 5), self.dtype) + 1j * np.ones(
            (2, 3, 4, 5), self.dtype
        )
C
chentianyu03 已提交
527 528 529 530
        self.grad_x = np.real(self.grad_out * np.conj(self.y))
        self.grad_y = self.grad_out * np.conj(self.x)


531
class TestElementwiseMulop(unittest.TestCase):
532
    def test_dygraph_mul(self):
533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552
        paddle.disable_static()

        np_a = np.random.random((2, 3, 4)).astype(np.float32)
        np_b = np.random.random((2, 3, 4)).astype(np.float32)

        tensor_a = paddle.to_tensor(np_a, dtype="float32")
        tensor_b = paddle.to_tensor(np_b, dtype="float32")

        # normal case: nparray * tenor
        expect_out = np_a * np_b
        actual_out = np_a * tensor_b
        np.testing.assert_allclose(actual_out, expect_out)

        # normal case: tensor * nparray
        actual_out = tensor_a * np_b
        np.testing.assert_allclose(actual_out, expect_out)

        paddle.enable_static()


553
if __name__ == '__main__':
554
    paddle.enable_static()
555
    unittest.main()