test_flip.py 6.2 KB
Newer Older
W
Wilber 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
#   Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest
16 17

import gradient_checker
W
Wilber 已提交
18
import numpy as np
19 20 21
from decorator_helper import prog_scope
from op_test import OpTest

W
Wilber 已提交
22 23 24 25 26 27 28 29 30 31 32 33
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core


class TestFlipOp_API(unittest.TestCase):
    """Test flip api."""

    def test_static_graph(self):
        startup_program = fluid.Program()
        train_program = fluid.Program()
        with fluid.program_guard(train_program, startup_program):
Y
yaoxuefeng 已提交
34
            axis = [0]
W
Wilber 已提交
35
            input = fluid.data(name='input', dtype='float32', shape=[2, 3])
Y
yaoxuefeng 已提交
36
            output = paddle.flip(input, axis)
R
Roc 已提交
37 38
            output = paddle.flip(output, -1)
            output = output.flip(0)
W
Wilber 已提交
39 40 41 42 43 44
            place = fluid.CPUPlace()
            if fluid.core.is_compiled_with_cuda():
                place = fluid.CUDAPlace(0)
            exe = fluid.Executor(place)
            exe.run(startup_program)
            img = np.array([[1, 2, 3], [4, 5, 6]]).astype(np.float32)
45 46 47
            res = exe.run(
                train_program, feed={'input': img}, fetch_list=[output]
            )
W
Wilber 已提交
48
            out_np = np.array(res[0])
R
Roc 已提交
49
            out_ref = np.array([[3, 2, 1], [6, 5, 4]]).astype(np.float32)
50 51 52 53
            self.assertTrue(
                (out_np == out_ref).all(),
                msg='flip output is wrong, out =' + str(out_np),
            )
W
Wilber 已提交
54 55 56 57 58 59

    def test_dygraph(self):
        img = np.array([[1, 2, 3], [4, 5, 6]]).astype(np.float32)
        with fluid.dygraph.guard():
            inputs = fluid.dygraph.to_variable(img)
            ret = paddle.flip(inputs, [0])
R
Roc 已提交
60 61 62 63
            ret = ret.flip(0)
            ret = paddle.flip(ret, 1)
            out_ref = np.array([[3, 2, 1], [6, 5, 4]]).astype(np.float32)

W
Wilber 已提交
64 65
            self.assertTrue(
                (ret.numpy() == out_ref).all(),
66 67
                msg='flip output is wrong, out =' + str(ret.numpy()),
            )
W
Wilber 已提交
68 69 70 71 72


class TestFlipOp(OpTest):
    def setUp(self):
        self.op_type = 'flip'
H
hong 已提交
73
        self.python_api = paddle.tensor.flip
W
Wilber 已提交
74 75 76 77 78 79
        self.init_test_case()
        self.inputs = {'X': np.random.random(self.in_shape).astype('float64')}
        self.init_attrs()
        self.outputs = {'Out': self.calc_ref_res()}

    def init_attrs(self):
Y
yaoxuefeng 已提交
80
        self.attrs = {"axis": self.axis}
W
Wilber 已提交
81 82

    def test_check_output(self):
H
hong 已提交
83
        self.check_output(check_eager=True)
W
Wilber 已提交
84 85

    def test_check_grad(self):
H
hong 已提交
86
        self.check_grad(["X"], "Out", check_eager=True)
W
Wilber 已提交
87 88 89

    def init_test_case(self):
        self.in_shape = (6, 4, 2, 3)
Y
yaoxuefeng 已提交
90
        self.axis = [0, 1]
W
Wilber 已提交
91 92 93

    def calc_ref_res(self):
        res = self.inputs['X']
R
Roc 已提交
94 95
        if isinstance(self.axis, int):
            return np.flip(res, self.axis)
Y
yaoxuefeng 已提交
96
        for axis in self.axis:
W
Wilber 已提交
97 98 99 100 101 102 103
            res = np.flip(res, axis)
        return res


class TestFlipOpAxis1(TestFlipOp):
    def init_test_case(self):
        self.in_shape = (2, 4, 4)
Y
yaoxuefeng 已提交
104
        self.axis = [0]
W
Wilber 已提交
105 106 107 108 109


class TestFlipOpAxis2(TestFlipOp):
    def init_test_case(self):
        self.in_shape = (4, 4, 6, 3)
Y
yaoxuefeng 已提交
110
        self.axis = [0, 2]
W
Wilber 已提交
111 112 113 114 115


class TestFlipOpAxis3(TestFlipOp):
    def init_test_case(self):
        self.in_shape = (4, 3, 1)
Y
yaoxuefeng 已提交
116
        self.axis = [0, 1, 2]
W
Wilber 已提交
117 118 119 120 121


class TestFlipOpAxis4(TestFlipOp):
    def init_test_case(self):
        self.in_shape = (6, 4, 2, 2)
Y
yaoxuefeng 已提交
122 123 124 125 126 127 128 129 130 131 132 133 134
        self.axis = [0, 1, 2, 3]


class TestFlipOpEmptyAxis(TestFlipOp):
    def init_test_case(self):
        self.in_shape = (6, 4, 2, 2)
        self.axis = []


class TestFlipOpNegAxis(TestFlipOp):
    def init_test_case(self):
        self.in_shape = (6, 4, 2, 2)
        self.axis = [-1]
W
Wilber 已提交
135 136


137 138 139 140 141 142 143 144 145 146
class TestFlipDoubleGradCheck(unittest.TestCase):
    def flip_wrapper(self, x):
        return paddle.flip(x[0], [0, 1])

    @prog_scope()
    def func(self, place):
        # the shape of input variable should be clearly specified, not inlcude -1.
        eps = 0.005
        dtype = np.float32

G
GGBond8488 已提交
147
        data = paddle.static.data('data', [3, 2, 2], dtype)
148 149 150 151
        data.persistable = True
        out = paddle.flip(data, [0, 1])
        data_arr = np.random.uniform(-1, 1, data.shape).astype(dtype)

152 153 154
        gradient_checker.double_grad_check(
            [data], out, x_init=[data_arr], place=place, eps=eps
        )
155
        fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
156 157 158
        gradient_checker.double_grad_check_for_dygraph(
            self.flip_wrapper, [data], out, x_init=[data_arr], place=place
        )
159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178

    def test_grad(self):
        paddle.enable_static()
        places = [fluid.CPUPlace()]
        if core.is_compiled_with_cuda():
            places.append(fluid.CUDAPlace(0))
        for p in places:
            self.func(p)


class TestFlipTripleGradCheck(unittest.TestCase):
    def flip_wrapper(self, x):
        return paddle.flip(x[0], [0, 1])

    @prog_scope()
    def func(self, place):
        # the shape of input variable should be clearly specified, not inlcude -1.
        eps = 0.005
        dtype = np.float32

G
GGBond8488 已提交
179
        data = paddle.static.data('data', [3, 2, 2], dtype)
180 181 182 183
        data.persistable = True
        out = paddle.flip(data, [0, 1])
        data_arr = np.random.uniform(-1, 1, data.shape).astype(dtype)

184 185 186
        gradient_checker.triple_grad_check(
            [data], out, x_init=[data_arr], place=place, eps=eps
        )
187
        fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
188 189 190
        gradient_checker.triple_grad_check_for_dygraph(
            self.flip_wrapper, [data], out, x_init=[data_arr], place=place
        )
191 192 193 194 195 196 197 198 199 200

    def test_grad(self):
        paddle.enable_static()
        places = [fluid.CPUPlace()]
        if core.is_compiled_with_cuda():
            places.append(fluid.CUDAPlace(0))
        for p in places:
            self.func(p)


W
Wilber 已提交
201
if __name__ == "__main__":
H
hong 已提交
202
    paddle.enable_static()
W
Wilber 已提交
203
    unittest.main()