test_elementwise_min_op.py 7.9 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

F
fengjiayi 已提交
15 16
import unittest
import numpy as np
17
from op_test import OpTest, skip_check_grad_ci
S
sneaxiy 已提交
18 19 20 21 22
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core

paddle.enable_static()
F
fengjiayi 已提交
23 24 25


class TestElementwiseOp(OpTest):
26

F
fengjiayi 已提交
27 28
    def setUp(self):
        self.op_type = "elementwise_min"
29
        self.python_api = paddle.minimum
F
fengjiayi 已提交
30
        # If x and y have the same value, the min() is not differentiable.
F
fengjiayi 已提交
31 32
        # So we generate test data by the following method
        # to avoid them being too close to each other.
33 34 35
        x = np.random.uniform(0.1, 1, [13, 17]).astype("float64")
        sgn = np.random.choice([-1, 1], [13, 17]).astype("float64")
        y = x + sgn * np.random.uniform(0.1, 1, [13, 17]).astype("float64")
F
fengjiayi 已提交
36 37 38 39
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.minimum(self.inputs['X'], self.inputs['Y'])}

    def test_check_output(self):
40 41 42 43
        if hasattr(self, 'attrs'):
            self.check_output(check_eager=False)
        else:
            self.check_output(check_eager=True)
F
fengjiayi 已提交
44 45

    def test_check_grad_normal(self):
46 47 48 49
        if hasattr(self, 'attrs'):
            self.check_grad(['X', 'Y'], 'Out', check_eager=False)
        else:
            self.check_grad(['X', 'Y'], 'Out', check_eager=True)
F
fengjiayi 已提交
50 51

    def test_check_grad_ingore_x(self):
52 53 54 55
        self.check_grad(['Y'],
                        'Out',
                        max_relative_error=0.005,
                        no_grad_set=set("X"))
F
fengjiayi 已提交
56 57

    def test_check_grad_ingore_y(self):
58 59 60 61
        self.check_grad(['X'],
                        'Out',
                        max_relative_error=0.005,
                        no_grad_set=set('Y'))
F
fengjiayi 已提交
62 63


64 65
@skip_check_grad_ci(
    reason="[skip shape check] Use y_shape(1) to test broadcast.")
66
class TestElementwiseMinOp_scalar(TestElementwiseOp):
67

68 69
    def setUp(self):
        self.op_type = "elementwise_min"
70
        self.python_api = paddle.minimum
71 72
        x = np.random.random_integers(-5, 5, [10, 3, 4]).astype("float64")
        y = np.array([0.5]).astype("float64")
73 74 75 76
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.minimum(self.inputs['X'], self.inputs['Y'])}


77
class TestElementwiseMinOp_Vector(TestElementwiseOp):
78

F
fengjiayi 已提交
79 80
    def setUp(self):
        self.op_type = "elementwise_min"
81
        self.python_api = paddle.minimum
82 83 84
        x = np.random.random((100, )).astype("float64")
        sgn = np.random.choice([-1, 1], (100, )).astype("float64")
        y = x + sgn * np.random.uniform(0.1, 1, (100, )).astype("float64")
F
fengjiayi 已提交
85 86 87 88
        self.inputs = {'X': x, 'Y': y}
        self.outputs = {'Out': np.minimum(self.inputs['X'], self.inputs['Y'])}


89
class TestElementwiseMinOp_broadcast_0(TestElementwiseOp):
90

F
fengjiayi 已提交
91 92
    def setUp(self):
        self.op_type = "elementwise_min"
93
        self.python_api = paddle.minimum
94 95
        x = np.random.uniform(0.5, 1, (100, 3, 2)).astype(np.float64)
        sgn = np.random.choice([-1, 1], (100, )).astype(np.float64)
F
fengjiayi 已提交
96
        y = x[:, 0, 0] + sgn * \
97
            np.random.uniform(1, 2, (100, )).astype(np.float64)
F
fengjiayi 已提交
98 99 100 101
        self.inputs = {'X': x, 'Y': y}

        self.attrs = {'axis': 0}
        self.outputs = {
102 103
            'Out': np.minimum(self.inputs['X'],
                              self.inputs['Y'].reshape(100, 1, 1))
F
fengjiayi 已提交
104 105 106
        }


107
class TestElementwiseMinOp_broadcast_1(TestElementwiseOp):
108

F
fengjiayi 已提交
109 110
    def setUp(self):
        self.op_type = "elementwise_min"
111
        self.python_api = paddle.minimum
112 113
        x = np.random.uniform(0.5, 1, (2, 100, 3)).astype(np.float64)
        sgn = np.random.choice([-1, 1], (100, )).astype(np.float64)
F
fengjiayi 已提交
114
        y = x[0, :, 0] + sgn * \
115
            np.random.uniform(1, 2, (100, )).astype(np.float64)
F
fengjiayi 已提交
116 117 118 119
        self.inputs = {'X': x, 'Y': y}

        self.attrs = {'axis': 1}
        self.outputs = {
120 121
            'Out': np.minimum(self.inputs['X'],
                              self.inputs['Y'].reshape(1, 100, 1))
F
fengjiayi 已提交
122 123 124
        }


125
class TestElementwiseMinOp_broadcast_2(TestElementwiseOp):
126

F
fengjiayi 已提交
127 128
    def setUp(self):
        self.op_type = "elementwise_min"
129
        self.python_api = paddle.minimum
130 131
        x = np.random.uniform(0.5, 1, (2, 3, 100)).astype(np.float64)
        sgn = np.random.choice([-1, 1], (100, )).astype(np.float64)
F
fengjiayi 已提交
132
        y = x[0, 0, :] + sgn * \
133
            np.random.uniform(1, 2, (100, )).astype(np.float64)
F
fengjiayi 已提交
134 135 136
        self.inputs = {'X': x, 'Y': y}

        self.outputs = {
137 138
            'Out': np.minimum(self.inputs['X'],
                              self.inputs['Y'].reshape(1, 1, 100))
F
fengjiayi 已提交
139 140 141
        }


142
class TestElementwiseMinOp_broadcast_3(TestElementwiseOp):
143

F
fengjiayi 已提交
144 145
    def setUp(self):
        self.op_type = "elementwise_min"
146
        self.python_api = paddle.minimum
147 148
        x = np.random.uniform(0.5, 1, (2, 25, 4, 1)).astype(np.float64)
        sgn = np.random.choice([-1, 1], (25, 4)).astype(np.float64)
F
fengjiayi 已提交
149
        y = x[0, :, :, 0] + sgn * \
150
            np.random.uniform(1, 2, (25, 4)).astype(np.float64)
F
fengjiayi 已提交
151 152 153 154 155
        self.inputs = {'X': x, 'Y': y}

        self.attrs = {'axis': 1}
        self.outputs = {
            'Out':
156
            np.minimum(self.inputs['X'], self.inputs['Y'].reshape(1, 25, 4, 1))
F
fengjiayi 已提交
157 158 159
        }


160
class TestElementwiseMinOp_broadcast_4(TestElementwiseOp):
161

162 163
    def setUp(self):
        self.op_type = "elementwise_min"
164
        self.python_api = paddle.minimum
165 166
        x = np.random.uniform(0.5, 1, (2, 10, 2, 5)).astype(np.float64)
        sgn = np.random.choice([-1, 1], (2, 10, 1, 5)).astype(np.float64)
167
        y = x + sgn * \
168
            np.random.uniform(1, 2, (2, 10, 1, 5)).astype(np.float64)
169 170 171 172 173
        self.inputs = {'X': x, 'Y': y}

        self.outputs = {'Out': np.minimum(self.inputs['X'], self.inputs['Y'])}


S
sneaxiy 已提交
174
class TestElementwiseMinOpFP16(unittest.TestCase):
175

S
sneaxiy 已提交
176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206
    def get_out_and_grad(self, x_np, y_np, axis, place, use_fp32=False):
        assert x_np.dtype == np.float16
        assert y_np.dtype == np.float16
        if use_fp32:
            x_np = x_np.astype(np.float32)
            y_np = y_np.astype(np.float32)
        dtype = np.float16

        with fluid.dygraph.guard(place):
            x = paddle.to_tensor(x_np)
            y = paddle.to_tensor(y_np)
            x.stop_gradient = False
            y.stop_gradient = False
            z = fluid.layers.elementwise_min(x, y, axis)
            x_g, y_g = paddle.grad([z], [x, y])
            return z.numpy().astype(dtype), x_g.numpy().astype(
                dtype), y_g.numpy().astype(dtype)

    def check_main(self, x_shape, y_shape, axis=-1):
        if not paddle.is_compiled_with_cuda():
            return
        place = paddle.CUDAPlace(0)
        if not core.is_float16_supported(place):
            return

        x_np = np.random.random(size=x_shape).astype(np.float16)
        y_np = np.random.random(size=y_shape).astype(np.float16)

        z_1, x_g_1, y_g_1 = self.get_out_and_grad(x_np, y_np, axis, place,
                                                  False)
        z_2, x_g_2, y_g_2 = self.get_out_and_grad(x_np, y_np, axis, place, True)
207 208 209
        np.testing.assert_array_equal(z_1, z_2)
        np.testing.assert_array_equal(x_g_1, x_g_2)
        np.testing.assert_array_equal(y_g_1, y_g_2)
S
sneaxiy 已提交
210 211 212 213 214 215 216 217 218 219 220 221

    def test_main(self):
        self.check_main((13, 17), (13, 17))
        self.check_main((10, 3, 4), (1, ))
        self.check_main((100, ), (100, ))
        self.check_main((100, 3, 2), (100, ), 0)
        self.check_main((2, 100, 3), (100, ), 1)
        self.check_main((2, 3, 100), (100, ))
        self.check_main((2, 25, 4, 1), (25, 4), 1)
        self.check_main((2, 10, 2, 5), (2, 10, 1, 5))


F
fengjiayi 已提交
222 223
if __name__ == '__main__':
    unittest.main()