test_flip.py 6.2 KB
Newer Older
W
Wilber 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
#   Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest
16 17

import gradient_checker
W
Wilber 已提交
18
import numpy as np
19 20 21
from decorator_helper import prog_scope
from op_test import OpTest

W
Wilber 已提交
22 23 24
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
25
import paddle.fluid.layers as layers
W
Wilber 已提交
26 27 28 29 30 31 32 33 34


class TestFlipOp_API(unittest.TestCase):
    """Test flip api."""

    def test_static_graph(self):
        startup_program = fluid.Program()
        train_program = fluid.Program()
        with fluid.program_guard(train_program, startup_program):
Y
yaoxuefeng 已提交
35
            axis = [0]
W
Wilber 已提交
36
            input = fluid.data(name='input', dtype='float32', shape=[2, 3])
Y
yaoxuefeng 已提交
37
            output = paddle.flip(input, axis)
R
Roc 已提交
38 39
            output = paddle.flip(output, -1)
            output = output.flip(0)
W
Wilber 已提交
40 41 42 43 44 45
            place = fluid.CPUPlace()
            if fluid.core.is_compiled_with_cuda():
                place = fluid.CUDAPlace(0)
            exe = fluid.Executor(place)
            exe.run(startup_program)
            img = np.array([[1, 2, 3], [4, 5, 6]]).astype(np.float32)
46 47 48
            res = exe.run(
                train_program, feed={'input': img}, fetch_list=[output]
            )
W
Wilber 已提交
49
            out_np = np.array(res[0])
R
Roc 已提交
50
            out_ref = np.array([[3, 2, 1], [6, 5, 4]]).astype(np.float32)
51 52 53 54
            self.assertTrue(
                (out_np == out_ref).all(),
                msg='flip output is wrong, out =' + str(out_np),
            )
W
Wilber 已提交
55 56 57 58 59 60

    def test_dygraph(self):
        img = np.array([[1, 2, 3], [4, 5, 6]]).astype(np.float32)
        with fluid.dygraph.guard():
            inputs = fluid.dygraph.to_variable(img)
            ret = paddle.flip(inputs, [0])
R
Roc 已提交
61 62 63 64
            ret = ret.flip(0)
            ret = paddle.flip(ret, 1)
            out_ref = np.array([[3, 2, 1], [6, 5, 4]]).astype(np.float32)

W
Wilber 已提交
65 66
            self.assertTrue(
                (ret.numpy() == out_ref).all(),
67 68
                msg='flip output is wrong, out =' + str(ret.numpy()),
            )
W
Wilber 已提交
69 70 71 72 73


class TestFlipOp(OpTest):
    def setUp(self):
        self.op_type = 'flip'
H
hong 已提交
74
        self.python_api = paddle.tensor.flip
W
Wilber 已提交
75 76 77 78 79 80
        self.init_test_case()
        self.inputs = {'X': np.random.random(self.in_shape).astype('float64')}
        self.init_attrs()
        self.outputs = {'Out': self.calc_ref_res()}

    def init_attrs(self):
Y
yaoxuefeng 已提交
81
        self.attrs = {"axis": self.axis}
W
Wilber 已提交
82 83

    def test_check_output(self):
H
hong 已提交
84
        self.check_output(check_eager=True)
W
Wilber 已提交
85 86

    def test_check_grad(self):
H
hong 已提交
87
        self.check_grad(["X"], "Out", check_eager=True)
W
Wilber 已提交
88 89 90

    def init_test_case(self):
        self.in_shape = (6, 4, 2, 3)
Y
yaoxuefeng 已提交
91
        self.axis = [0, 1]
W
Wilber 已提交
92 93 94

    def calc_ref_res(self):
        res = self.inputs['X']
R
Roc 已提交
95 96
        if isinstance(self.axis, int):
            return np.flip(res, self.axis)
Y
yaoxuefeng 已提交
97
        for axis in self.axis:
W
Wilber 已提交
98 99 100 101 102 103 104
            res = np.flip(res, axis)
        return res


class TestFlipOpAxis1(TestFlipOp):
    def init_test_case(self):
        self.in_shape = (2, 4, 4)
Y
yaoxuefeng 已提交
105
        self.axis = [0]
W
Wilber 已提交
106 107 108 109 110


class TestFlipOpAxis2(TestFlipOp):
    def init_test_case(self):
        self.in_shape = (4, 4, 6, 3)
Y
yaoxuefeng 已提交
111
        self.axis = [0, 2]
W
Wilber 已提交
112 113 114 115 116


class TestFlipOpAxis3(TestFlipOp):
    def init_test_case(self):
        self.in_shape = (4, 3, 1)
Y
yaoxuefeng 已提交
117
        self.axis = [0, 1, 2]
W
Wilber 已提交
118 119 120 121 122


class TestFlipOpAxis4(TestFlipOp):
    def init_test_case(self):
        self.in_shape = (6, 4, 2, 2)
Y
yaoxuefeng 已提交
123 124 125 126 127 128 129 130 131 132 133 134 135
        self.axis = [0, 1, 2, 3]


class TestFlipOpEmptyAxis(TestFlipOp):
    def init_test_case(self):
        self.in_shape = (6, 4, 2, 2)
        self.axis = []


class TestFlipOpNegAxis(TestFlipOp):
    def init_test_case(self):
        self.in_shape = (6, 4, 2, 2)
        self.axis = [-1]
W
Wilber 已提交
136 137


138 139 140 141 142 143 144 145 146 147 148 149 150 151 152
class TestFlipDoubleGradCheck(unittest.TestCase):
    def flip_wrapper(self, x):
        return paddle.flip(x[0], [0, 1])

    @prog_scope()
    def func(self, place):
        # the shape of input variable should be clearly specified, not inlcude -1.
        eps = 0.005
        dtype = np.float32

        data = layers.data('data', [3, 2, 2], False, dtype)
        data.persistable = True
        out = paddle.flip(data, [0, 1])
        data_arr = np.random.uniform(-1, 1, data.shape).astype(dtype)

153 154 155
        gradient_checker.double_grad_check(
            [data], out, x_init=[data_arr], place=place, eps=eps
        )
156
        fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
157 158 159
        gradient_checker.double_grad_check_for_dygraph(
            self.flip_wrapper, [data], out, x_init=[data_arr], place=place
        )
160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184

    def test_grad(self):
        paddle.enable_static()
        places = [fluid.CPUPlace()]
        if core.is_compiled_with_cuda():
            places.append(fluid.CUDAPlace(0))
        for p in places:
            self.func(p)


class TestFlipTripleGradCheck(unittest.TestCase):
    def flip_wrapper(self, x):
        return paddle.flip(x[0], [0, 1])

    @prog_scope()
    def func(self, place):
        # the shape of input variable should be clearly specified, not inlcude -1.
        eps = 0.005
        dtype = np.float32

        data = layers.data('data', [3, 2, 2], False, dtype)
        data.persistable = True
        out = paddle.flip(data, [0, 1])
        data_arr = np.random.uniform(-1, 1, data.shape).astype(dtype)

185 186 187
        gradient_checker.triple_grad_check(
            [data], out, x_init=[data_arr], place=place, eps=eps
        )
188
        fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
189 190 191
        gradient_checker.triple_grad_check_for_dygraph(
            self.flip_wrapper, [data], out, x_init=[data_arr], place=place
        )
192 193 194 195 196 197 198 199 200 201

    def test_grad(self):
        paddle.enable_static()
        places = [fluid.CPUPlace()]
        if core.is_compiled_with_cuda():
            places.append(fluid.CUDAPlace(0))
        for p in places:
            self.func(p)


W
Wilber 已提交
202
if __name__ == "__main__":
H
hong 已提交
203
    paddle.enable_static()
W
Wilber 已提交
204
    unittest.main()