test_dropout_op.py 5.6 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16
from __future__ import print_function

17 18
import unittest
import numpy as np
K
Kexin Zhao 已提交
19
import paddle.fluid.core as core
20
from op_test import OpTest
21 22


23
class TestDropoutOp(OpTest):
24
    def setUp(self):
25
        self.op_type = "dropout"
26
        self.inputs = {'X': np.random.random((32, 64)).astype("float32")}
27
        self.attrs = {'dropout_prob': 0.0, 'fix_seed': True, 'is_test': False}
Y
Yu Yang 已提交
28 29 30 31
        self.outputs = {
            'Out': self.inputs['X'],
            'Mask': np.ones((32, 64)).astype('float32')
        }
32

33 34 35 36 37
    def test_check_output(self):
        self.check_output()

    def test_check_grad_normal(self):
        self.check_grad(['X'], 'Out', max_relative_error=0.05)
38 39


40
class TestDropoutOp2(TestDropoutOp):
41
    def setUp(self):
42
        self.op_type = "dropout"
43
        self.inputs = {'X': np.random.random((32, 64)).astype("float32")}
44
        self.attrs = {'dropout_prob': 1.0, 'fix_seed': True, 'is_test': False}
Y
Yu Yang 已提交
45 46 47 48
        self.outputs = {
            'Out': np.zeros((32, 64)).astype('float32'),
            'Mask': np.zeros((32, 64)).astype('float32')
        }
49 50


51
class TestDropoutOp3(TestDropoutOp):
52
    def setUp(self):
53 54
        self.op_type = "dropout"
        self.inputs = {'X': np.random.random((32, 64, 2)).astype("float32")}
55
        self.attrs = {'dropout_prob': 0.0, 'fix_seed': True, 'is_test': False}
Y
Yu Yang 已提交
56 57 58 59
        self.outputs = {
            'Out': self.inputs['X'],
            'Mask': np.ones((32, 64, 2)).astype('float32')
        }
60 61


62 63 64 65
class TestDropoutOp4(OpTest):
    def setUp(self):
        self.op_type = "dropout"
        self.inputs = {'X': np.random.random((32, 64)).astype("float32")}
66
        self.attrs = {'dropout_prob': 0.35, 'fix_seed': True, 'is_test': True}
67 68 69
        self.outputs = {
            'Out': self.inputs['X'] * (1.0 - self.attrs['dropout_prob'])
        }
70 71 72 73 74 75 76 77 78

    def test_check_output(self):
        self.check_output()


class TestDropoutOp5(OpTest):
    def setUp(self):
        self.op_type = "dropout"
        self.inputs = {'X': np.random.random((32, 64, 3)).astype("float32")}
79
        self.attrs = {'dropout_prob': 0.75, 'is_test': True}
80 81 82
        self.outputs = {
            'Out': self.inputs['X'] * (1.0 - self.attrs['dropout_prob'])
        }
83 84

    def test_check_output(self):
P
phlrain 已提交
85 86 87 88 89 90 91 92 93 94 95
        self.check_output()


class TestDropoutOp6(TestDropoutOp):
    def setUp(self):
        self.op_type = "dropout"
        self.inputs = {'X': np.random.random((32, 64)).astype("float32")}
        self.attrs = {
            'dropout_prob': 1.0,
            'fix_seed': True,
            'is_test': False,
P
phlrain 已提交
96
            'dropout_implementation': 'upscale_in_train'
P
phlrain 已提交
97 98 99 100 101 102 103 104 105 106 107 108 109 110 111
        }
        self.outputs = {
            'Out': np.zeros((32, 64)).astype('float32'),
            'Mask': np.zeros((32, 64)).astype('float32')
        }


class TestDropoutOp7(TestDropoutOp):
    def setUp(self):
        self.op_type = "dropout"
        self.inputs = {'X': np.random.random((32, 64, 2)).astype("float32")}
        self.attrs = {
            'dropout_prob': 0.0,
            'fix_seed': True,
            'is_test': False,
P
phlrain 已提交
112
            'dropout_implementation': 'upscale_in_train'
P
phlrain 已提交
113 114 115 116 117 118 119 120 121 122 123 124 125 126 127
        }
        self.outputs = {
            'Out': self.inputs['X'],
            'Mask': np.ones((32, 64, 2)).astype('float32')
        }


class TestDropoutOp8(OpTest):
    def setUp(self):
        self.op_type = "dropout"
        self.inputs = {'X': np.random.random((32, 64)).astype("float32")}
        self.attrs = {
            'dropout_prob': 0.35,
            'fix_seed': True,
            'is_test': True,
P
phlrain 已提交
128
            'dropout_implementation': 'upscale_in_train'
P
phlrain 已提交
129 130 131 132 133 134 135 136 137 138 139 140 141 142
        }
        self.outputs = {'Out': self.inputs['X']}

    def test_check_output(self):
        self.check_output()


class TestDropoutOp9(OpTest):
    def setUp(self):
        self.op_type = "dropout"
        self.inputs = {'X': np.random.random((32, 64, 3)).astype("float32")}
        self.attrs = {
            'dropout_prob': 0.75,
            'is_test': True,
P
phlrain 已提交
143
            'dropout_implementation': 'upscale_in_train'
P
phlrain 已提交
144 145 146 147
        }
        self.outputs = {'Out': self.inputs['X']}

    def test_check_output(self):
148 149 150
        self.check_output()


K
Kexin Zhao 已提交
151
class TestFP16DropoutOp(OpTest):
K
Kexin Zhao 已提交
152 153
    def setUp(self):
        self.op_type = "dropout"
K
Kexin Zhao 已提交
154 155 156 157
        self.init_test_case()

        x = np.random.random(self.input_size).astype("float16")
        out = x * (1.0 - self.prob)
K
Kexin Zhao 已提交
158
        self.inputs = {'X': OpTest.np_dtype_to_fluid_dtype(x)}
K
Kexin Zhao 已提交
159 160 161 162 163
        self.attrs = {
            'dropout_prob': self.prob,
            'fix_seed': self.fix_seed,
            'is_test': True
        }
164
        self.outputs = {'Out': out}
K
Kexin Zhao 已提交
165

K
Kexin Zhao 已提交
166 167 168 169 170
    def init_test_case(self):
        self.input_size = [32, 64]
        self.prob = 0.35
        self.fix_seed = True

K
Kexin Zhao 已提交
171 172 173 174 175
    def test_check_output(self):
        if core.is_compiled_with_cuda() and core.op_support_gpu("dropout"):
            self.check_output_with_place(core.CUDAPlace(0), atol=1e-3)


K
Kexin Zhao 已提交
176 177 178 179 180
class TestFP16DropoutOp2(TestFP16DropoutOp):
    def init_test_case(self):
        self.input_size = [32, 64, 3]
        self.prob = 0.75
        self.fix_seed = False
K
Kexin Zhao 已提交
181 182


183 184
if __name__ == '__main__':
    unittest.main()