test_elementwise_min_op.py 9.0 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

F
fengjiayi 已提交
15 16
import unittest
import numpy as np
17
from op_test import OpTest, skip_check_grad_ci
S
sneaxiy 已提交
18 19 20 21 22
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core

paddle.enable_static()
F
fengjiayi 已提交
23 24 25 26 27


class TestElementwiseOp(OpTest):
    def setUp(self):
        self.op_type = "elementwise_min"
28
        self.python_api = paddle.minimum
F
fengjiayi 已提交
29
        # If x and y have the same value, the min() is not differentiable.
F
fengjiayi 已提交
30 31
        # So we generate test data by the following method
        # to avoid them being too close to each other.
32 33 34
        x = np.random.uniform(0.1, 1, [13, 17]).astype("float64")
        sgn = np.random.choice([-1, 1], [13, 17]).astype("float64")
        y = x + sgn * np.random.uniform(0.1, 1, [13, 17]).astype("float64")
F
fengjiayi 已提交
35 36 37 38
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.minimum(self.inputs['X'], self.inputs['Y'])}

    def test_check_output(self):
39 40 41 42
        if hasattr(self, 'attrs'):
            self.check_output(check_eager=False)
        else:
            self.check_output(check_eager=True)
F
fengjiayi 已提交
43 44

    def test_check_grad_normal(self):
45 46 47 48
        if hasattr(self, 'attrs'):
            self.check_grad(['X', 'Y'], 'Out', check_eager=False)
        else:
            self.check_grad(['X', 'Y'], 'Out', check_eager=True)
F
fengjiayi 已提交
49 50

    def test_check_grad_ingore_x(self):
51 52 53
        self.check_grad(
            ['Y'], 'Out', max_relative_error=0.005, no_grad_set=set("X")
        )
F
fengjiayi 已提交
54 55

    def test_check_grad_ingore_y(self):
56 57 58
        self.check_grad(
            ['X'], 'Out', max_relative_error=0.005, no_grad_set=set('Y')
        )
F
fengjiayi 已提交
59 60


61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90
class TestElementwiseMinOp_ZeroDim1(TestElementwiseOp):
    def setUp(self):
        self.op_type = "elementwise_min"
        self.python_api = paddle.minimum
        x = np.random.uniform(0.1, 1, []).astype("float64")
        y = np.random.uniform(0.1, 1, []).astype("float64")
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.minimum(self.inputs['X'], self.inputs['Y'])}


class TestElementwiseMinOp_ZeroDim2(TestElementwiseOp):
    def setUp(self):
        self.op_type = "elementwise_min"
        self.python_api = paddle.minimum
        x = np.random.uniform(0.1, 1, [13, 17]).astype("float64")
        y = np.random.uniform(0.1, 1, []).astype("float64")
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.minimum(self.inputs['X'], self.inputs['Y'])}


class TestElementwiseMinOp_ZeroDim3(TestElementwiseOp):
    def setUp(self):
        self.op_type = "elementwise_min"
        self.python_api = paddle.minimum
        x = np.random.uniform(0.1, 1, []).astype("float64")
        y = np.random.uniform(0.1, 1, [13, 17]).astype("float64")
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.minimum(self.inputs['X'], self.inputs['Y'])}


91
@skip_check_grad_ci(
92 93
    reason="[skip shape check] Use y_shape(1) to test broadcast."
)
94 95 96
class TestElementwiseMinOp_scalar(TestElementwiseOp):
    def setUp(self):
        self.op_type = "elementwise_min"
97
        self.python_api = paddle.minimum
98 99
        x = np.random.random_integers(-5, 5, [10, 3, 4]).astype("float64")
        y = np.array([0.5]).astype("float64")
100 101 102 103
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.minimum(self.inputs['X'], self.inputs['Y'])}


104
class TestElementwiseMinOp_Vector(TestElementwiseOp):
F
fengjiayi 已提交
105 106
    def setUp(self):
        self.op_type = "elementwise_min"
107
        self.python_api = paddle.minimum
108 109 110
        x = np.random.random((100,)).astype("float64")
        sgn = np.random.choice([-1, 1], (100,)).astype("float64")
        y = x + sgn * np.random.uniform(0.1, 1, (100,)).astype("float64")
F
fengjiayi 已提交
111 112 113 114
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.minimum(self.inputs['X'], self.inputs['Y'])}


115
class TestElementwiseMinOp_broadcast_0(TestElementwiseOp):
F
fengjiayi 已提交
116 117
    def setUp(self):
        self.op_type = "elementwise_min"
118
        self.python_api = paddle.minimum
119
        x = np.random.uniform(0.5, 1, (100, 3, 2)).astype(np.float64)
120 121 122 123
        sgn = np.random.choice([-1, 1], (100,)).astype(np.float64)
        y = x[:, 0, 0] + sgn * np.random.uniform(1, 2, (100,)).astype(
            np.float64
        )
F
fengjiayi 已提交
124 125 126 127
        self.inputs = {'X': x, 'Y': y}

        self.attrs = {'axis': 0}
        self.outputs = {
128 129 130
            'Out': np.minimum(
                self.inputs['X'], self.inputs['Y'].reshape(100, 1, 1)
            )
F
fengjiayi 已提交
131 132 133
        }


134
class TestElementwiseMinOp_broadcast_1(TestElementwiseOp):
F
fengjiayi 已提交
135 136
    def setUp(self):
        self.op_type = "elementwise_min"
137
        self.python_api = paddle.minimum
138
        x = np.random.uniform(0.5, 1, (2, 100, 3)).astype(np.float64)
139 140 141 142
        sgn = np.random.choice([-1, 1], (100,)).astype(np.float64)
        y = x[0, :, 0] + sgn * np.random.uniform(1, 2, (100,)).astype(
            np.float64
        )
F
fengjiayi 已提交
143 144 145 146
        self.inputs = {'X': x, 'Y': y}

        self.attrs = {'axis': 1}
        self.outputs = {
147 148 149
            'Out': np.minimum(
                self.inputs['X'], self.inputs['Y'].reshape(1, 100, 1)
            )
F
fengjiayi 已提交
150 151 152
        }


153
class TestElementwiseMinOp_broadcast_2(TestElementwiseOp):
F
fengjiayi 已提交
154 155
    def setUp(self):
        self.op_type = "elementwise_min"
156
        self.python_api = paddle.minimum
157
        x = np.random.uniform(0.5, 1, (2, 3, 100)).astype(np.float64)
158 159 160 161
        sgn = np.random.choice([-1, 1], (100,)).astype(np.float64)
        y = x[0, 0, :] + sgn * np.random.uniform(1, 2, (100,)).astype(
            np.float64
        )
F
fengjiayi 已提交
162 163 164
        self.inputs = {'X': x, 'Y': y}

        self.outputs = {
165 166 167
            'Out': np.minimum(
                self.inputs['X'], self.inputs['Y'].reshape(1, 1, 100)
            )
F
fengjiayi 已提交
168 169 170
        }


171
class TestElementwiseMinOp_broadcast_3(TestElementwiseOp):
F
fengjiayi 已提交
172 173
    def setUp(self):
        self.op_type = "elementwise_min"
174
        self.python_api = paddle.minimum
175 176
        x = np.random.uniform(0.5, 1, (2, 25, 4, 1)).astype(np.float64)
        sgn = np.random.choice([-1, 1], (25, 4)).astype(np.float64)
177 178 179
        y = x[0, :, :, 0] + sgn * np.random.uniform(1, 2, (25, 4)).astype(
            np.float64
        )
F
fengjiayi 已提交
180 181 182 183
        self.inputs = {'X': x, 'Y': y}

        self.attrs = {'axis': 1}
        self.outputs = {
184 185 186
            'Out': np.minimum(
                self.inputs['X'], self.inputs['Y'].reshape(1, 25, 4, 1)
            )
F
fengjiayi 已提交
187 188 189
        }


190 191 192
class TestElementwiseMinOp_broadcast_4(TestElementwiseOp):
    def setUp(self):
        self.op_type = "elementwise_min"
193
        self.python_api = paddle.minimum
194 195
        x = np.random.uniform(0.5, 1, (2, 10, 2, 5)).astype(np.float64)
        sgn = np.random.choice([-1, 1], (2, 10, 1, 5)).astype(np.float64)
196
        y = x + sgn * np.random.uniform(1, 2, (2, 10, 1, 5)).astype(np.float64)
197 198 199 200 201
        self.inputs = {'X': x, 'Y': y}

        self.outputs = {'Out': np.minimum(self.inputs['X'], self.inputs['Y'])}


S
sneaxiy 已提交
202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217
class TestElementwiseMinOpFP16(unittest.TestCase):
    def get_out_and_grad(self, x_np, y_np, axis, place, use_fp32=False):
        assert x_np.dtype == np.float16
        assert y_np.dtype == np.float16
        if use_fp32:
            x_np = x_np.astype(np.float32)
            y_np = y_np.astype(np.float32)
        dtype = np.float16

        with fluid.dygraph.guard(place):
            x = paddle.to_tensor(x_np)
            y = paddle.to_tensor(y_np)
            x.stop_gradient = False
            y.stop_gradient = False
            z = fluid.layers.elementwise_min(x, y, axis)
            x_g, y_g = paddle.grad([z], [x, y])
218 219 220 221 222
            return (
                z.numpy().astype(dtype),
                x_g.numpy().astype(dtype),
                y_g.numpy().astype(dtype),
            )
S
sneaxiy 已提交
223 224 225 226 227 228 229 230 231 232 233

    def check_main(self, x_shape, y_shape, axis=-1):
        if not paddle.is_compiled_with_cuda():
            return
        place = paddle.CUDAPlace(0)
        if not core.is_float16_supported(place):
            return

        x_np = np.random.random(size=x_shape).astype(np.float16)
        y_np = np.random.random(size=y_shape).astype(np.float16)

234 235 236
        z_1, x_g_1, y_g_1 = self.get_out_and_grad(
            x_np, y_np, axis, place, False
        )
S
sneaxiy 已提交
237
        z_2, x_g_2, y_g_2 = self.get_out_and_grad(x_np, y_np, axis, place, True)
238 239 240
        np.testing.assert_array_equal(z_1, z_2)
        np.testing.assert_array_equal(x_g_1, x_g_2)
        np.testing.assert_array_equal(y_g_1, y_g_2)
S
sneaxiy 已提交
241 242 243

    def test_main(self):
        self.check_main((13, 17), (13, 17))
244 245 246 247 248
        self.check_main((10, 3, 4), (1,))
        self.check_main((100,), (100,))
        self.check_main((100, 3, 2), (100,), 0)
        self.check_main((2, 100, 3), (100,), 1)
        self.check_main((2, 3, 100), (100,))
S
sneaxiy 已提交
249 250 251 252
        self.check_main((2, 25, 4, 1), (25, 4), 1)
        self.check_main((2, 10, 2, 5), (2, 10, 1, 5))


F
fengjiayi 已提交
253 254
if __name__ == '__main__':
    unittest.main()