test_sign_op.py 5.4 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
import unittest
16 17

import gradient_checker
18
import numpy as np
19
from decorator_helper import prog_scope
20
from op_test import OpTest
21

W
WangXi 已提交
22
import paddle
23
import paddle.fluid as fluid
24 25
import paddle.fluid.core as core
import paddle.fluid.layers as layers
26
from paddle.fluid import Program, program_guard
27 28 29 30 31 32


class TestSignOp(OpTest):
    def setUp(self):
        self.op_type = "sign"
        self.inputs = {
33
            'X': np.random.uniform(-10, 10, (10, 10)).astype("float64")
34 35 36 37 38 39 40 41 42 43
        }
        self.outputs = {'Out': np.sign(self.inputs['X'])}

    def test_check_output(self):
        self.check_output()

    def test_check_grad(self):
        self.check_grad(['X'], 'Out')


44
class TestSignOpError(unittest.TestCase):
45 46 47 48
    def test_errors(self):
        with program_guard(Program(), Program()):
            # The input type of sign_op must be Variable or numpy.ndarray.
            input1 = 12
49
            self.assertRaises(TypeError, paddle.sign, input1)
50
            # The input dtype of sign_op must be float16, float32, float64.
51 52 53 54 55 56
            input2 = fluid.layers.data(
                name='input2', shape=[12, 10], dtype="int32"
            )
            input3 = fluid.layers.data(
                name='input3', shape=[12, 10], dtype="int64"
            )
57 58
            self.assertRaises(TypeError, paddle.sign, input2)
            self.assertRaises(TypeError, paddle.sign, input3)
59 60 61
            input4 = fluid.layers.data(
                name='input4', shape=[4], dtype="float16"
            )
62
            paddle.sign(input4)
63 64


W
WangXi 已提交
65 66 67
class TestSignAPI(unittest.TestCase):
    def test_dygraph(self):
        with fluid.dygraph.guard():
68
            np_x = np.array([-1.0, 0.0, -0.0, 1.2, 1.5], dtype='float64')
W
WangXi 已提交
69 70 71 72 73 74 75 76 77 78 79 80
            x = paddle.to_tensor(np_x)
            z = paddle.sign(x)
            np_z = z.numpy()
            z_expected = np.sign(np_x)
            self.assertEqual((np_z == z_expected).all(), True)

    def test_static(self):
        with program_guard(Program(), Program()):
            # The input type of sign_op must be Variable or numpy.ndarray.
            input1 = 12
            self.assertRaises(TypeError, paddle.tensor.math.sign, input1)
            # The input dtype of sign_op must be float16, float32, float64.
81 82 83 84 85 86
            input2 = fluid.layers.data(
                name='input2', shape=[12, 10], dtype="int32"
            )
            input3 = fluid.layers.data(
                name='input3', shape=[12, 10], dtype="int64"
            )
W
WangXi 已提交
87 88
            self.assertRaises(TypeError, paddle.tensor.math.sign, input2)
            self.assertRaises(TypeError, paddle.tensor.math.sign, input3)
89 90 91
            input4 = fluid.layers.data(
                name='input4', shape=[4], dtype="float16"
            )
W
WangXi 已提交
92 93 94
            paddle.sign(input4)


95 96 97 98 99 100 101 102 103 104 105 106 107 108 109
class TestSignDoubleGradCheck(unittest.TestCase):
    def sign_wrapper(self, x):
        return paddle.sign(x[0])

    @prog_scope()
    def func(self, place):
        # the shape of input variable should be clearly specified, not inlcude -1.
        eps = 0.005
        dtype = np.float32

        data = layers.data('data', [1, 4], False, dtype)
        data.persistable = True
        out = paddle.sign(data)
        data_arr = np.random.uniform(-1, 1, data.shape).astype(dtype)

110 111 112
        gradient_checker.double_grad_check(
            [data], out, x_init=[data_arr], place=place, eps=eps
        )
113
        fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
114 115 116
        gradient_checker.double_grad_check_for_dygraph(
            self.sign_wrapper, [data], out, x_init=[data_arr], place=place
        )
117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141

    def test_grad(self):
        paddle.enable_static()
        places = [fluid.CPUPlace()]
        if core.is_compiled_with_cuda():
            places.append(fluid.CUDAPlace(0))
        for p in places:
            self.func(p)


class TestSignTripleGradCheck(unittest.TestCase):
    def sign_wrapper(self, x):
        return paddle.sign(x[0])

    @prog_scope()
    def func(self, place):
        # the shape of input variable should be clearly specified, not inlcude -1.
        eps = 0.005
        dtype = np.float32

        data = layers.data('data', [1, 4], False, dtype)
        data.persistable = True
        out = paddle.sign(data)
        data_arr = np.random.uniform(-1, 1, data.shape).astype(dtype)

142 143 144
        gradient_checker.triple_grad_check(
            [data], out, x_init=[data_arr], place=place, eps=eps
        )
145
        fluid.set_flags({"FLAGS_retain_grad_for_all_tensor": True})
146 147 148
        gradient_checker.triple_grad_check_for_dygraph(
            self.sign_wrapper, [data], out, x_init=[data_arr], place=place
        )
149 150 151 152 153 154 155 156 157 158

    def test_grad(self):
        paddle.enable_static()
        places = [fluid.CPUPlace()]
        if core.is_compiled_with_cuda():
            places.append(fluid.CUDAPlace(0))
        for p in places:
            self.func(p)


159
if __name__ == "__main__":
160
    paddle.enable_static()
161
    unittest.main()