test_elementwise_min_op.py 9.1 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

F
fengjiayi 已提交
15 16
import unittest
import numpy as np
17
from op_test import OpTest, skip_check_grad_ci
S
sneaxiy 已提交
18 19 20
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
21
from paddle import _legacy_C_ops
S
sneaxiy 已提交
22 23

paddle.enable_static()
F
fengjiayi 已提交
24 25 26 27 28


class TestElementwiseOp(OpTest):
    def setUp(self):
        self.op_type = "elementwise_min"
29
        self.python_api = paddle.minimum
F
fengjiayi 已提交
30
        # If x and y have the same value, the min() is not differentiable.
F
fengjiayi 已提交
31 32
        # So we generate test data by the following method
        # to avoid them being too close to each other.
33 34 35
        x = np.random.uniform(0.1, 1, [13, 17]).astype("float64")
        sgn = np.random.choice([-1, 1], [13, 17]).astype("float64")
        y = x + sgn * np.random.uniform(0.1, 1, [13, 17]).astype("float64")
F
fengjiayi 已提交
36 37 38 39
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.minimum(self.inputs['X'], self.inputs['Y'])}

    def test_check_output(self):
40 41 42 43
        if hasattr(self, 'attrs'):
            self.check_output(check_eager=False)
        else:
            self.check_output(check_eager=True)
F
fengjiayi 已提交
44 45

    def test_check_grad_normal(self):
46 47 48 49
        if hasattr(self, 'attrs'):
            self.check_grad(['X', 'Y'], 'Out', check_eager=False)
        else:
            self.check_grad(['X', 'Y'], 'Out', check_eager=True)
F
fengjiayi 已提交
50 51

    def test_check_grad_ingore_x(self):
52 53 54
        self.check_grad(
            ['Y'], 'Out', max_relative_error=0.005, no_grad_set=set("X")
        )
F
fengjiayi 已提交
55 56

    def test_check_grad_ingore_y(self):
57 58 59
        self.check_grad(
            ['X'], 'Out', max_relative_error=0.005, no_grad_set=set('Y')
        )
F
fengjiayi 已提交
60 61


62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91
class TestElementwiseMinOp_ZeroDim1(TestElementwiseOp):
    def setUp(self):
        self.op_type = "elementwise_min"
        self.python_api = paddle.minimum
        x = np.random.uniform(0.1, 1, []).astype("float64")
        y = np.random.uniform(0.1, 1, []).astype("float64")
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.minimum(self.inputs['X'], self.inputs['Y'])}


class TestElementwiseMinOp_ZeroDim2(TestElementwiseOp):
    def setUp(self):
        self.op_type = "elementwise_min"
        self.python_api = paddle.minimum
        x = np.random.uniform(0.1, 1, [13, 17]).astype("float64")
        y = np.random.uniform(0.1, 1, []).astype("float64")
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.minimum(self.inputs['X'], self.inputs['Y'])}


class TestElementwiseMinOp_ZeroDim3(TestElementwiseOp):
    def setUp(self):
        self.op_type = "elementwise_min"
        self.python_api = paddle.minimum
        x = np.random.uniform(0.1, 1, []).astype("float64")
        y = np.random.uniform(0.1, 1, [13, 17]).astype("float64")
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.minimum(self.inputs['X'], self.inputs['Y'])}


92
@skip_check_grad_ci(
93 94
    reason="[skip shape check] Use y_shape(1) to test broadcast."
)
95 96 97
class TestElementwiseMinOp_scalar(TestElementwiseOp):
    def setUp(self):
        self.op_type = "elementwise_min"
98
        self.python_api = paddle.minimum
99 100
        x = np.random.random_integers(-5, 5, [10, 3, 4]).astype("float64")
        y = np.array([0.5]).astype("float64")
101 102 103 104
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.minimum(self.inputs['X'], self.inputs['Y'])}


105
class TestElementwiseMinOp_Vector(TestElementwiseOp):
F
fengjiayi 已提交
106 107
    def setUp(self):
        self.op_type = "elementwise_min"
108
        self.python_api = paddle.minimum
109 110 111
        x = np.random.random((100,)).astype("float64")
        sgn = np.random.choice([-1, 1], (100,)).astype("float64")
        y = x + sgn * np.random.uniform(0.1, 1, (100,)).astype("float64")
F
fengjiayi 已提交
112 113 114 115
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.minimum(self.inputs['X'], self.inputs['Y'])}


116
class TestElementwiseMinOp_broadcast_0(TestElementwiseOp):
F
fengjiayi 已提交
117 118
    def setUp(self):
        self.op_type = "elementwise_min"
119
        self.python_api = paddle.minimum
120
        x = np.random.uniform(0.5, 1, (100, 3, 2)).astype(np.float64)
121 122 123 124
        sgn = np.random.choice([-1, 1], (100,)).astype(np.float64)
        y = x[:, 0, 0] + sgn * np.random.uniform(1, 2, (100,)).astype(
            np.float64
        )
F
fengjiayi 已提交
125 126 127 128
        self.inputs = {'X': x, 'Y': y}

        self.attrs = {'axis': 0}
        self.outputs = {
129 130 131
            'Out': np.minimum(
                self.inputs['X'], self.inputs['Y'].reshape(100, 1, 1)
            )
F
fengjiayi 已提交
132 133 134
        }


135
class TestElementwiseMinOp_broadcast_1(TestElementwiseOp):
F
fengjiayi 已提交
136 137
    def setUp(self):
        self.op_type = "elementwise_min"
138
        self.python_api = paddle.minimum
139
        x = np.random.uniform(0.5, 1, (2, 100, 3)).astype(np.float64)
140 141 142 143
        sgn = np.random.choice([-1, 1], (100,)).astype(np.float64)
        y = x[0, :, 0] + sgn * np.random.uniform(1, 2, (100,)).astype(
            np.float64
        )
F
fengjiayi 已提交
144 145 146 147
        self.inputs = {'X': x, 'Y': y}

        self.attrs = {'axis': 1}
        self.outputs = {
148 149 150
            'Out': np.minimum(
                self.inputs['X'], self.inputs['Y'].reshape(1, 100, 1)
            )
F
fengjiayi 已提交
151 152 153
        }


154
class TestElementwiseMinOp_broadcast_2(TestElementwiseOp):
F
fengjiayi 已提交
155 156
    def setUp(self):
        self.op_type = "elementwise_min"
157
        self.python_api = paddle.minimum
158
        x = np.random.uniform(0.5, 1, (2, 3, 100)).astype(np.float64)
159 160 161 162
        sgn = np.random.choice([-1, 1], (100,)).astype(np.float64)
        y = x[0, 0, :] + sgn * np.random.uniform(1, 2, (100,)).astype(
            np.float64
        )
F
fengjiayi 已提交
163 164 165
        self.inputs = {'X': x, 'Y': y}

        self.outputs = {
166 167 168
            'Out': np.minimum(
                self.inputs['X'], self.inputs['Y'].reshape(1, 1, 100)
            )
F
fengjiayi 已提交
169 170 171
        }


172
class TestElementwiseMinOp_broadcast_3(TestElementwiseOp):
F
fengjiayi 已提交
173 174
    def setUp(self):
        self.op_type = "elementwise_min"
175
        self.python_api = paddle.minimum
176 177
        x = np.random.uniform(0.5, 1, (2, 25, 4, 1)).astype(np.float64)
        sgn = np.random.choice([-1, 1], (25, 4)).astype(np.float64)
178 179 180
        y = x[0, :, :, 0] + sgn * np.random.uniform(1, 2, (25, 4)).astype(
            np.float64
        )
F
fengjiayi 已提交
181 182 183 184
        self.inputs = {'X': x, 'Y': y}

        self.attrs = {'axis': 1}
        self.outputs = {
185 186 187
            'Out': np.minimum(
                self.inputs['X'], self.inputs['Y'].reshape(1, 25, 4, 1)
            )
F
fengjiayi 已提交
188 189 190
        }


191 192 193
class TestElementwiseMinOp_broadcast_4(TestElementwiseOp):
    def setUp(self):
        self.op_type = "elementwise_min"
194
        self.python_api = paddle.minimum
195 196
        x = np.random.uniform(0.5, 1, (2, 10, 2, 5)).astype(np.float64)
        sgn = np.random.choice([-1, 1], (2, 10, 1, 5)).astype(np.float64)
197
        y = x + sgn * np.random.uniform(1, 2, (2, 10, 1, 5)).astype(np.float64)
198 199 200 201 202
        self.inputs = {'X': x, 'Y': y}

        self.outputs = {'Out': np.minimum(self.inputs['X'], self.inputs['Y'])}


S
sneaxiy 已提交
203 204 205 206 207 208 209 210 211 212 213 214 215 216
class TestElementwiseMinOpFP16(unittest.TestCase):
    def get_out_and_grad(self, x_np, y_np, axis, place, use_fp32=False):
        assert x_np.dtype == np.float16
        assert y_np.dtype == np.float16
        if use_fp32:
            x_np = x_np.astype(np.float32)
            y_np = y_np.astype(np.float32)
        dtype = np.float16

        with fluid.dygraph.guard(place):
            x = paddle.to_tensor(x_np)
            y = paddle.to_tensor(y_np)
            x.stop_gradient = False
            y.stop_gradient = False
217
            z = _legacy_C_ops.elementwise_min(x, y, 'axis', axis)
S
sneaxiy 已提交
218
            x_g, y_g = paddle.grad([z], [x, y])
219 220 221 222 223
            return (
                z.numpy().astype(dtype),
                x_g.numpy().astype(dtype),
                y_g.numpy().astype(dtype),
            )
S
sneaxiy 已提交
224 225 226 227 228 229 230 231 232 233 234

    def check_main(self, x_shape, y_shape, axis=-1):
        if not paddle.is_compiled_with_cuda():
            return
        place = paddle.CUDAPlace(0)
        if not core.is_float16_supported(place):
            return

        x_np = np.random.random(size=x_shape).astype(np.float16)
        y_np = np.random.random(size=y_shape).astype(np.float16)

235 236 237
        z_1, x_g_1, y_g_1 = self.get_out_and_grad(
            x_np, y_np, axis, place, False
        )
S
sneaxiy 已提交
238
        z_2, x_g_2, y_g_2 = self.get_out_and_grad(x_np, y_np, axis, place, True)
239 240 241
        np.testing.assert_array_equal(z_1, z_2)
        np.testing.assert_array_equal(x_g_1, x_g_2)
        np.testing.assert_array_equal(y_g_1, y_g_2)
S
sneaxiy 已提交
242 243 244

    def test_main(self):
        self.check_main((13, 17), (13, 17))
245 246 247 248 249
        self.check_main((10, 3, 4), (1,))
        self.check_main((100,), (100,))
        self.check_main((100, 3, 2), (100,), 0)
        self.check_main((2, 100, 3), (100,), 1)
        self.check_main((2, 3, 100), (100,))
S
sneaxiy 已提交
250 251 252 253
        self.check_main((2, 25, 4, 1), (25, 4), 1)
        self.check_main((2, 10, 2, 5), (2, 10, 1, 5))


F
fengjiayi 已提交
254 255
if __name__ == '__main__':
    unittest.main()