test_elementwise_max_op.py 10.4 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

F
fengjiayi 已提交
15
import unittest
16

F
fengjiayi 已提交
17
import numpy as np
18 19
from op_test import OpTest, convert_float_to_uint16, skip_check_grad_ci

20
import paddle
21
import paddle.fluid.core as core
F
fengjiayi 已提交
22 23 24 25 26


class TestElementwiseOp(OpTest):
    def setUp(self):
        self.op_type = "elementwise_max"
27
        self.python_api = paddle.maximum
H
heyanru 已提交
28 29
        self.prim_op_type = "prim"
        self.enable_cinn = False
F
fengjiayi 已提交
30 31 32
        # If x and y have the same value, the max() is not differentiable.
        # So we generate test data by the following method
        # to avoid them being too close to each other.
33 34 35
        x = np.random.uniform(0.1, 1, [13, 17]).astype("float64")
        sgn = np.random.choice([-1, 1], [13, 17]).astype("float64")
        y = x + sgn * np.random.uniform(0.1, 1, [13, 17]).astype("float64")
F
fengjiayi 已提交
36 37 38 39
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.maximum(self.inputs['X'], self.inputs['Y'])}

    def test_check_output(self):
40 41 42 43
        if hasattr(self, 'attrs'):
            self.check_output(check_eager=False)
        else:
            self.check_output(check_eager=True)
F
fengjiayi 已提交
44 45

    def test_check_grad_normal(self):
46
        if hasattr(self, 'attrs'):
H
heyanru 已提交
47 48 49 50 51 52
            if self.attrs['axis'] == -1:
                self.check_grad(
                    ['X', 'Y'], 'Out', check_eager=False, check_prim=True
                )
            else:
                self.check_grad(['X', 'Y'], 'Out', check_eager=False)
53
        else:
H
heyanru 已提交
54 55 56
            self.check_grad(
                ['X', 'Y'], 'Out', check_eager=True, check_prim=True
            )
F
fengjiayi 已提交
57 58

    def test_check_grad_ingore_x(self):
H
heyanru 已提交
59 60 61 62 63 64 65 66 67 68 69 70 71 72 73
        if hasattr(self, 'attrs') and self.attrs['axis'] != -1:
            self.check_grad(
                ['Y'],
                'Out',
                max_relative_error=0.005,
                no_grad_set=set("X"),
            )
        else:
            self.check_grad(
                ['Y'],
                'Out',
                max_relative_error=0.005,
                no_grad_set=set("X"),
                check_prim=True,
            )
F
fengjiayi 已提交
74 75

    def test_check_grad_ingore_y(self):
H
heyanru 已提交
76 77 78 79 80 81 82 83 84 85 86 87 88 89 90
        if hasattr(self, 'attrs') and self.attrs['axis'] != -1:
            self.check_grad(
                ['X'],
                'Out',
                max_relative_error=0.005,
                no_grad_set=set('Y'),
            )
        else:
            self.check_grad(
                ['X'],
                'Out',
                max_relative_error=0.005,
                no_grad_set=set('Y'),
                check_prim=True,
            )
91 92


93 94 95 96
class TestElementwiseMaxOp_ZeroDim1(TestElementwiseOp):
    def setUp(self):
        self.op_type = "elementwise_max"
        self.python_api = paddle.maximum
H
heyanru 已提交
97 98
        self.prim_op_type = "prim"
        self.enable_cinn = False
99 100 101 102 103 104 105 106 107 108
        x = np.random.uniform(0.1, 1, []).astype("float64")
        y = np.random.uniform(0.1, 1, []).astype("float64")
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.maximum(self.inputs['X'], self.inputs['Y'])}


class TestElementwiseMaxOp_ZeroDim2(TestElementwiseOp):
    def setUp(self):
        self.op_type = "elementwise_max"
        self.python_api = paddle.maximum
H
heyanru 已提交
109 110
        self.prim_op_type = "prim"
        self.enable_cinn = False
111 112 113 114 115 116 117 118 119 120
        x = np.random.uniform(0.1, 1, [13, 17]).astype("float64")
        y = np.random.uniform(0.1, 1, []).astype("float64")
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.maximum(self.inputs['X'], self.inputs['Y'])}


class TestElementwiseMaxOp_ZeroDim3(TestElementwiseOp):
    def setUp(self):
        self.op_type = "elementwise_max"
        self.python_api = paddle.maximum
H
heyanru 已提交
121 122
        self.prim_op_type = "prim"
        self.enable_cinn = False
123 124 125 126 127 128
        x = np.random.uniform(0.1, 1, []).astype("float64")
        y = np.random.uniform(0.1, 1, [13, 17]).astype("float64")
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.maximum(self.inputs['X'], self.inputs['Y'])}


129 130 131 132 133 134 135 136
@unittest.skipIf(
    core.is_compiled_with_cuda()
    and (
        core.cudnn_version() < 8100
        or paddle.device.cuda.get_device_capability()[0] < 8
    ),
    "run test when gpu is availble and the minimum cudnn version is 8.1.0 and gpu's compute capability is at least 8.0.",
)
137 138 139
class TestElementwiseBF16Op(OpTest):
    def setUp(self):
        self.op_type = "elementwise_max"
140
        self.python_api = paddle.maximum
H
heyanru 已提交
141 142
        self.prim_op_type = "prim"
        self.enable_cinn = False
143 144 145 146 147 148 149 150 151
        self.dtype = np.uint16
        # If x and y have the same value, the max() is not differentiable.
        # So we generate test data by the following method
        # to avoid them being too close to each other.
        x = np.random.uniform(0.1, 1, [13, 17]).astype(np.float32)
        sgn = np.random.choice([-1, 1], [13, 17]).astype(np.float32)
        y = x + sgn * np.random.uniform(0.1, 1, [13, 17]).astype(np.float32)
        self.inputs = {
            'X': convert_float_to_uint16(x),
152
            'Y': convert_float_to_uint16(y),
153 154 155 156
        }
        self.outputs = {'Out': convert_float_to_uint16(np.maximum(x, y))}

    def test_check_output(self):
157 158 159 160
        if hasattr(self, 'attrs'):
            self.check_output(check_eager=False)
        else:
            self.check_output(check_eager=True)
161 162

    def test_check_grad_normal(self):
163
        if hasattr(self, 'attrs'):
H
heyanru 已提交
164
            # check_prim=False, bfloat16 is not supported in `less_equal`
165 166 167
            self.check_grad(['X', 'Y'], 'Out', check_eager=False)
        else:
            self.check_grad(['X', 'Y'], 'Out', check_eager=True)
168 169 170 171 172 173 174 175

    def test_check_grad_ingore_x(self):
        self.check_grad(['Y'], 'Out', no_grad_set=set("X"))

    def test_check_grad_ingore_y(self):
        self.check_grad(['X'], 'Out', no_grad_set=set('Y'))


176
@skip_check_grad_ci(
177 178
    reason="[skip shape check] Use y_shape(1) to test broadcast."
)
179 180 181
class TestElementwiseMaxOp_scalar(TestElementwiseOp):
    def setUp(self):
        self.op_type = "elementwise_max"
182
        self.python_api = paddle.maximum
H
heyanru 已提交
183 184
        self.prim_op_type = "prim"
        self.enable_cinn = False
185 186
        x = np.random.random_integers(-5, 5, [2, 3, 20]).astype("float64")
        y = np.array([0.5]).astype("float64")
187 188 189 190
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.maximum(self.inputs['X'], self.inputs['Y'])}


F
fengjiayi 已提交
191 192 193
class TestElementwiseMaxOp_Vector(TestElementwiseOp):
    def setUp(self):
        self.op_type = "elementwise_max"
194
        self.python_api = paddle.maximum
H
heyanru 已提交
195 196
        self.prim_op_type = "prim"
        self.enable_cinn = False
197 198 199
        x = np.random.random((100,)).astype("float64")
        sgn = np.random.choice([-1, 1], (100,)).astype("float64")
        y = x + sgn * np.random.uniform(0.1, 1, (100,)).astype("float64")
F
fengjiayi 已提交
200 201 202 203 204 205 206
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.maximum(self.inputs['X'], self.inputs['Y'])}


class TestElementwiseMaxOp_broadcast_0(TestElementwiseOp):
    def setUp(self):
        self.op_type = "elementwise_max"
207
        self.python_api = paddle.maximum
H
heyanru 已提交
208
        self.prim_op_type = "prim"
209
        x = np.random.uniform(0.5, 1, (100, 5, 2)).astype(np.float64)
210 211 212 213
        sgn = np.random.choice([-1, 1], (100,)).astype(np.float64)
        y = x[:, 0, 0] + sgn * np.random.uniform(1, 2, (100,)).astype(
            np.float64
        )
F
fengjiayi 已提交
214 215 216 217
        self.inputs = {'X': x, 'Y': y}

        self.attrs = {'axis': 0}
        self.outputs = {
218 219 220
            'Out': np.maximum(
                self.inputs['X'], self.inputs['Y'].reshape(100, 1, 1)
            )
F
fengjiayi 已提交
221 222 223 224 225 226
        }


class TestElementwiseMaxOp_broadcast_1(TestElementwiseOp):
    def setUp(self):
        self.op_type = "elementwise_max"
227
        self.python_api = paddle.maximum
H
heyanru 已提交
228
        self.prim_op_type = "prim"
229
        x = np.random.uniform(0.5, 1, (2, 100, 3)).astype(np.float64)
230 231 232 233
        sgn = np.random.choice([-1, 1], (100,)).astype(np.float64)
        y = x[0, :, 0] + sgn * np.random.uniform(1, 2, (100,)).astype(
            np.float64
        )
F
fengjiayi 已提交
234 235 236 237
        self.inputs = {'X': x, 'Y': y}

        self.attrs = {'axis': 1}
        self.outputs = {
238 239 240
            'Out': np.maximum(
                self.inputs['X'], self.inputs['Y'].reshape(1, 100, 1)
            )
F
fengjiayi 已提交
241 242 243 244 245 246
        }


class TestElementwiseMaxOp_broadcast_2(TestElementwiseOp):
    def setUp(self):
        self.op_type = "elementwise_max"
247
        self.python_api = paddle.maximum
H
heyanru 已提交
248
        self.prim_op_type = "prim"
249
        x = np.random.uniform(0.5, 1, (1, 3, 100)).astype(np.float64)
250 251 252 253
        sgn = np.random.choice([-1, 1], (100,)).astype(np.float64)
        y = x[0, 0, :] + sgn * np.random.uniform(1, 2, (100,)).astype(
            np.float64
        )
F
fengjiayi 已提交
254 255 256
        self.inputs = {'X': x, 'Y': y}

        self.outputs = {
257 258 259
            'Out': np.maximum(
                self.inputs['X'], self.inputs['Y'].reshape(1, 1, 100)
            )
F
fengjiayi 已提交
260 261 262 263 264 265
        }


class TestElementwiseMaxOp_broadcast_3(TestElementwiseOp):
    def setUp(self):
        self.op_type = "elementwise_max"
266
        self.python_api = paddle.maximum
H
heyanru 已提交
267
        self.prim_op_type = "prim"
268 269
        x = np.random.uniform(0.5, 1, (2, 50, 2, 1)).astype(np.float64)
        sgn = np.random.choice([-1, 1], (50, 2)).astype(np.float64)
270 271 272
        y = x[0, :, :, 0] + sgn * np.random.uniform(1, 2, (50, 2)).astype(
            np.float64
        )
F
fengjiayi 已提交
273 274 275 276
        self.inputs = {'X': x, 'Y': y}

        self.attrs = {'axis': 1}
        self.outputs = {
277 278 279
            'Out': np.maximum(
                self.inputs['X'], self.inputs['Y'].reshape(1, 50, 2, 1)
            )
F
fengjiayi 已提交
280 281 282
        }


283 284 285
class TestElementwiseMaxOp_broadcast_4(TestElementwiseOp):
    def setUp(self):
        self.op_type = "elementwise_max"
286
        self.python_api = paddle.maximum
H
heyanru 已提交
287
        self.prim_op_type = "prim"
288 289
        x = np.random.uniform(0.5, 1, (2, 3, 4, 5)).astype(np.float64)
        sgn = np.random.choice([-1, 1], (2, 3, 1, 5)).astype(np.float64)
290
        y = x + sgn * np.random.uniform(1, 2, (2, 3, 1, 5)).astype(np.float64)
291 292 293 294 295
        self.inputs = {'X': x, 'Y': y}

        self.outputs = {'Out': np.maximum(self.inputs['X'], self.inputs['Y'])}


F
fengjiayi 已提交
296 297
if __name__ == '__main__':
    unittest.main()