test_prelu_op.py 4.0 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16
from __future__ import print_function

Z
zchen0211 已提交
17 18
import unittest
import numpy as np
19
import paddle.fluid as fluid
M
minqiyang 已提交
20
import six
21 22
import paddle.fluid as fluid
from paddle.fluid import Program, program_guard
23
from op_test import OpTest, skip_check_grad_ci
Z
zchen0211 已提交
24 25


26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42
class TestPReluAPIError(unittest.TestCase):
    def test_errors(self):
        with fluid.program_guard(fluid.Program(), fluid.Program()):
            layer = fluid.PRelu(
                mode='all',
                param_attr=fluid.ParamAttr(
                    initializer=fluid.initializer.Constant(1.0)))
            # the input must be Variable.
            x0 = fluid.create_lod_tensor(
                np.array([-1, 3, 5, 5]), [[1, 1, 1, 1]], fluid.CPUPlace())
            self.assertRaises(TypeError, layer, x0)
            # the input dtype must be float32
            data_t = fluid.data(
                name="input", shape=[5, 200, 100, 100], dtype="float64")
            self.assertRaises(TypeError, layer, data_t)


Z
zchen0211 已提交
43
class PReluTest(OpTest):
Z
zchen0211 已提交
44
    def setUp(self):
45 46
        self.init_input_shape()
        self.init_attr()
Z
zchen0211 已提交
47
        self.op_type = "prelu"
J
jerrywgz 已提交
48

49
        x_np = np.random.uniform(-1, 1, self.x_shape)
J
jerrywgz 已提交
50 51 52 53 54
        # Since zero point in prelu is not differentiable, avoid randomize
        # zero.
        x_np[np.abs(x_np) < 0.005] = 0.02

        if self.attrs == {'mode': "all"}:
55
            alpha_np = np.random.uniform(-1, -0.5, (1))
J
jerrywgz 已提交
56
        elif self.attrs == {'mode': "channel"}:
57
            alpha_np = np.random.uniform(-1, -0.5, (1, x_np.shape[1], 1, 1))
J
jerrywgz 已提交
58
        else:
59 60 61
            alpha_np = np.random.uniform(-1, -0.5, \
                (1, x_np.shape[1], x_np.shape[2], x_np.shape[3]))
        self.inputs = {'X': x_np, 'Alpha': alpha_np}
J
jerrywgz 已提交
62

Z
zchen0211 已提交
63
        out_np = np.maximum(self.inputs['X'], 0.)
Z
zchen0211 已提交
64 65
        out_np = out_np + np.minimum(self.inputs['X'],
                                     0.) * self.inputs['Alpha']
Z
zchen0211 已提交
66 67
        assert out_np is not self.inputs['X']
        self.outputs = {'Out': out_np}
Z
zchen0211 已提交
68

69 70 71 72
    def init_input_shape(self):
        self.x_shape = (2, 100, 3, 4)

    def init_attr(self):
J
jerrywgz 已提交
73 74
        self.attrs = {'mode': "channel"}

75
    def test_check_output(self):
Z
zchen0211 已提交
76 77
        self.check_output()

78
    def test_check_grad(self):
79
        self.check_grad(['X', 'Alpha'], 'Out')
J
jerrywgz 已提交
80 81


82
# TODO(minqiyang): Resume these test cases after fixing Python3 CI job issues
M
minqiyang 已提交
83
if six.PY2:
J
jerrywgz 已提交
84

85 86 87 88 89 90 91 92
    @skip_check_grad_ci(
        reason="[skip shape check] Input(Alpha) must be 1-D and only has one data in 'all' mode"
    )
    class TestModeAll(PReluTest):
        def init_input_shape(self):
            self.x_shape = (2, 3, 4, 5)

        def init_attr(self):
M
minqiyang 已提交
93 94
            self.attrs = {'mode': "all"}

95 96 97
    class TestModeElt(PReluTest):
        def init_input_shape(self):
            self.x_shape = (3, 2, 5, 10)
M
minqiyang 已提交
98

99
        def init_attr(self):
M
minqiyang 已提交
100
            self.attrs = {'mode': "element"}
Z
zchen0211 已提交
101 102


103 104 105 106 107 108 109 110 111 112 113 114 115 116
class TestPReluOpError(unittest.TestCase):
    def test_errors(self):
        with program_guard(Program()):
            # The input type must be Variable.
            self.assertRaises(TypeError, fluid.layers.prelu, 1, 'all')
            # The input dtype must be float16, float32, float64.
            x_int32 = fluid.data(name='x_int32', shape=[12, 10], dtype='int32')
            self.assertRaises(TypeError, fluid.layers.prelu, x_int32, 'all')
            # support the input dtype is float32
            x_fp16 = fluid.layers.data(
                name='x_fp16', shape=[12, 10], dtype='float32')
            fluid.layers.prelu(x_fp16, 'all')


Z
zchen0211 已提交
117 118
if __name__ == "__main__":
    unittest.main()