test_sign_op.py 5.3 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
import unittest
16 17

import gradient_checker
18
import numpy as np
19
from decorator_helper import prog_scope
20
from op_test import OpTest
21

W
WangXi 已提交
22
import paddle
23
import paddle.fluid as fluid
24
import paddle.fluid.core as core
25
from paddle.fluid import Program, program_guard
26 27 28 29 30


class TestSignOp(OpTest):
    def setUp(self):
        self.op_type = "sign"
31
        self.python_api = paddle.sign
32
        self.inputs = {
33
            'X': np.random.uniform(-10, 10, (10, 10)).astype("float64")
34 35 36 37 38 39 40 41 42 43
        }
        self.outputs = {'Out': np.sign(self.inputs['X'])}

    def test_check_output(self):
        self.check_output()

    def test_check_grad(self):
        self.check_grad(['X'], 'Out')


44
class TestSignOpError(unittest.TestCase):
45 46 47 48
    def test_errors(self):
        with program_guard(Program(), Program()):
            # The input type of sign_op must be Variable or numpy.ndarray.
            input1 = 12
49
            self.assertRaises(TypeError, paddle.sign, input1)
50
            # The input dtype of sign_op must be float16, float32, float64.
G
GGBond8488 已提交
51 52
            input2 = paddle.static.data(
                name='input2', shape=[-1, 12, 10], dtype="int32"
53
            )
G
GGBond8488 已提交
54 55
            input3 = paddle.static.data(
                name='input3', shape=[-1, 12, 10], dtype="int64"
56
            )
57 58
            self.assertRaises(TypeError, paddle.sign, input2)
            self.assertRaises(TypeError, paddle.sign, input3)
G
GGBond8488 已提交
59 60
            input4 = paddle.static.data(
                name='input4', shape=[-1, 4], dtype="float16"
61
            )
62
            paddle.sign(input4)
63 64


W
WangXi 已提交
65 66 67
class TestSignAPI(unittest.TestCase):
    def test_dygraph(self):
        with fluid.dygraph.guard():
68
            np_x = np.array([-1.0, 0.0, -0.0, 1.2, 1.5], dtype='float64')
W
WangXi 已提交
69 70 71 72 73 74 75 76 77 78 79 80
            x = paddle.to_tensor(np_x)
            z = paddle.sign(x)
            np_z = z.numpy()
            z_expected = np.sign(np_x)
            self.assertEqual((np_z == z_expected).all(), True)

    def test_static(self):
        with program_guard(Program(), Program()):
            # The input type of sign_op must be Variable or numpy.ndarray.
            input1 = 12
            self.assertRaises(TypeError, paddle.tensor.math.sign, input1)
            # The input dtype of sign_op must be float16, float32, float64.
G
GGBond8488 已提交
81 82
            input2 = paddle.static.data(
                name='input2', shape=[-1, 12, 10], dtype="int32"
83
            )
G
GGBond8488 已提交
84 85
            input3 = paddle.static.data(
                name='input3', shape=[-1, 12, 10], dtype="int64"
86
            )
W
WangXi 已提交
87 88
            self.assertRaises(TypeError, paddle.tensor.math.sign, input2)
            self.assertRaises(TypeError, paddle.tensor.math.sign, input3)
G
GGBond8488 已提交
89 90
            input4 = paddle.static.data(
                name='input4', shape=[-1, 4], dtype="float16"
91
            )
W
WangXi 已提交
92 93 94
            paddle.sign(input4)


95 96 97 98 99 100 101 102 103 104
class TestSignDoubleGradCheck(unittest.TestCase):
    def sign_wrapper(self, x):
        return paddle.sign(x[0])

    @prog_scope()
    def func(self, place):
        # the shape of input variable should be clearly specified, not inlcude -1.
        eps = 0.005
        dtype = np.float32

G
GGBond8488 已提交
105
        data = paddle.static.data('data', [1, 4], dtype)
106 107 108 109
        data.persistable = True
        out = paddle.sign(data)
        data_arr = np.random.uniform(-1, 1, data.shape).astype(dtype)

110 111 112 113 114 115
        gradient_checker.double_grad_check(
            [data], out, x_init=[data_arr], place=place, eps=eps
        )
        gradient_checker.double_grad_check_for_dygraph(
            self.sign_wrapper, [data], out, x_init=[data_arr], place=place
        )
116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135

    def test_grad(self):
        paddle.enable_static()
        places = [fluid.CPUPlace()]
        if core.is_compiled_with_cuda():
            places.append(fluid.CUDAPlace(0))
        for p in places:
            self.func(p)


class TestSignTripleGradCheck(unittest.TestCase):
    def sign_wrapper(self, x):
        return paddle.sign(x[0])

    @prog_scope()
    def func(self, place):
        # the shape of input variable should be clearly specified, not inlcude -1.
        eps = 0.005
        dtype = np.float32

G
GGBond8488 已提交
136
        data = paddle.static.data('data', [1, 4], dtype)
137 138 139 140
        data.persistable = True
        out = paddle.sign(data)
        data_arr = np.random.uniform(-1, 1, data.shape).astype(dtype)

141 142 143 144 145 146
        gradient_checker.triple_grad_check(
            [data], out, x_init=[data_arr], place=place, eps=eps
        )
        gradient_checker.triple_grad_check_for_dygraph(
            self.sign_wrapper, [data], out, x_init=[data_arr], place=place
        )
147 148 149 150 151 152 153 154 155 156

    def test_grad(self):
        paddle.enable_static()
        places = [fluid.CPUPlace()]
        if core.is_compiled_with_cuda():
            places.append(fluid.CUDAPlace(0))
        for p in places:
            self.func(p)


157
if __name__ == "__main__":
158
    paddle.enable_static()
159
    unittest.main()