test_flip.py 6.2 KB
Newer Older
W
Wilber 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20
#   Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest
import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
from op_test import OpTest
21 22 23
import gradient_checker
from decorator_helper import prog_scope
import paddle.fluid.layers as layers
W
Wilber 已提交
24 25 26 27 28 29 30 31 32


class TestFlipOp_API(unittest.TestCase):
    """Test flip api."""

    def test_static_graph(self):
        startup_program = fluid.Program()
        train_program = fluid.Program()
        with fluid.program_guard(train_program, startup_program):
Y
yaoxuefeng 已提交
33
            axis = [0]
W
Wilber 已提交
34
            input = fluid.data(name='input', dtype='float32', shape=[2, 3])
Y
yaoxuefeng 已提交
35
            output = paddle.flip(input, axis)
R
Roc 已提交
36 37
            output = paddle.flip(output, -1)
            output = output.flip(0)
W
Wilber 已提交
38 39 40 41 42 43
            place = fluid.CPUPlace()
            if fluid.core.is_compiled_with_cuda():
                place = fluid.CUDAPlace(0)
            exe = fluid.Executor(place)
            exe.run(startup_program)
            img = np.array([[1, 2, 3], [4, 5, 6]]).astype(np.float32)
44 45 46
            res = exe.run(
                train_program, feed={'input': img}, fetch_list=[output]
            )
W
Wilber 已提交
47
            out_np = np.array(res[0])
R
Roc 已提交
48
            out_ref = np.array([[3, 2, 1], [6, 5, 4]]).astype(np.float32)
49 50 51 52
            self.assertTrue(
                (out_np == out_ref).all(),
                msg='flip output is wrong, out =' + str(out_np),
            )
W
Wilber 已提交
53 54 55 56 57 58

    def test_dygraph(self):
        img = np.array([[1, 2, 3], [4, 5, 6]]).astype(np.float32)
        with fluid.dygraph.guard():
            inputs = fluid.dygraph.to_variable(img)
            ret = paddle.flip(inputs, [0])
R
Roc 已提交
59 60 61 62
            ret = ret.flip(0)
            ret = paddle.flip(ret, 1)
            out_ref = np.array([[3, 2, 1], [6, 5, 4]]).astype(np.float32)

W
Wilber 已提交
63 64
            self.assertTrue(
                (ret.numpy() == out_ref).all(),
65 66
                msg='flip output is wrong, out =' + str(ret.numpy()),
            )
W
Wilber 已提交
67 68 69 70 71


class TestFlipOp(OpTest):
    def setUp(self):
        self.op_type = 'flip'
H
hong 已提交
72
        self.python_api = paddle.tensor.flip
W
Wilber 已提交
73 74 75 76 77 78
        self.init_test_case()
        self.inputs = {'X': np.random.random(self.in_shape).astype('float64')}
        self.init_attrs()
        self.outputs = {'Out': self.calc_ref_res()}

    def init_attrs(self):
Y
yaoxuefeng 已提交
79
        self.attrs = {"axis": self.axis}
W
Wilber 已提交
80 81

    def test_check_output(self):
H
hong 已提交
82
        self.check_output(check_eager=True)
W
Wilber 已提交
83 84

    def test_check_grad(self):
H
hong 已提交
85
        self.check_grad(["X"], "Out", check_eager=True)
W
Wilber 已提交
86 87 88

    def init_test_case(self):
        self.in_shape = (6, 4, 2, 3)
Y
yaoxuefeng 已提交
89
        self.axis = [0, 1]
W
Wilber 已提交
90 91 92

    def calc_ref_res(self):
        res = self.inputs['X']
R
Roc 已提交
93 94
        if isinstance(self.axis, int):
            return np.flip(res, self.axis)
Y
yaoxuefeng 已提交
95
        for axis in self.axis:
W
Wilber 已提交
96 97 98 99 100 101 102
            res = np.flip(res, axis)
        return res


class TestFlipOpAxis1(TestFlipOp):
    def init_test_case(self):
        self.in_shape = (2, 4, 4)
Y
yaoxuefeng 已提交
103
        self.axis = [0]
W
Wilber 已提交
104 105 106 107 108


class TestFlipOpAxis2(TestFlipOp):
    def init_test_case(self):
        self.in_shape = (4, 4, 6, 3)
Y
yaoxuefeng 已提交
109
        self.axis = [0, 2]
W
Wilber 已提交
110 111 112 113 114


class TestFlipOpAxis3(TestFlipOp):
    def init_test_case(self):
        self.in_shape = (4, 3, 1)
Y
yaoxuefeng 已提交
115
        self.axis = [0, 1, 2]
W
Wilber 已提交
116 117 118 119 120


class TestFlipOpAxis4(TestFlipOp):
    def init_test_case(self):
        self.in_shape = (6, 4, 2, 2)
Y
yaoxuefeng 已提交
121 122 123 124 125 126 127 128 129 130 131 132 133
        self.axis = [0, 1, 2, 3]


class TestFlipOpEmptyAxis(TestFlipOp):
    def init_test_case(self):
        self.in_shape = (6, 4, 2, 2)
        self.axis = []


class TestFlipOpNegAxis(TestFlipOp):
    def init_test_case(self):
        self.in_shape = (6, 4, 2, 2)
        self.axis = [-1]
W
Wilber 已提交
134 135


136 137 138 139 140 141 142 143 144 145 146 147 148 149 150
class TestFlipDoubleGradCheck(unittest.TestCase):
    def flip_wrapper(self, x):
        return paddle.flip(x[0], [0, 1])

    @prog_scope()
    def func(self, place):
        # the shape of input variable should be clearly specified, not inlcude -1.
        eps = 0.005
        dtype = np.float32

        data = layers.data('data', [3, 2, 2], False, dtype)
        data.persistable = True
        out = paddle.flip(data, [0, 1])
        data_arr = np.random.uniform(-1, 1, data.shape).astype(dtype)

151 152 153
        gradient_checker.double_grad_check(
            [data], out, x_init=[data_arr], place=place, eps=eps
        )
154
        fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
155 156 157
        gradient_checker.double_grad_check_for_dygraph(
            self.flip_wrapper, [data], out, x_init=[data_arr], place=place
        )
158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182

    def test_grad(self):
        paddle.enable_static()
        places = [fluid.CPUPlace()]
        if core.is_compiled_with_cuda():
            places.append(fluid.CUDAPlace(0))
        for p in places:
            self.func(p)


class TestFlipTripleGradCheck(unittest.TestCase):
    def flip_wrapper(self, x):
        return paddle.flip(x[0], [0, 1])

    @prog_scope()
    def func(self, place):
        # the shape of input variable should be clearly specified, not inlcude -1.
        eps = 0.005
        dtype = np.float32

        data = layers.data('data', [3, 2, 2], False, dtype)
        data.persistable = True
        out = paddle.flip(data, [0, 1])
        data_arr = np.random.uniform(-1, 1, data.shape).astype(dtype)

183 184 185
        gradient_checker.triple_grad_check(
            [data], out, x_init=[data_arr], place=place, eps=eps
        )
186
        fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
187 188 189
        gradient_checker.triple_grad_check_for_dygraph(
            self.flip_wrapper, [data], out, x_init=[data_arr], place=place
        )
190 191 192 193 194 195 196 197 198 199

    def test_grad(self):
        paddle.enable_static()
        places = [fluid.CPUPlace()]
        if core.is_compiled_with_cuda():
            places.append(fluid.CUDAPlace(0))
        for p in places:
            self.func(p)


W
Wilber 已提交
200
if __name__ == "__main__":
H
hong 已提交
201
    paddle.enable_static()
W
Wilber 已提交
202
    unittest.main()