test_dropout_op.py 3.7 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16
import unittest
import numpy as np
K
Kexin Zhao 已提交
17
import paddle.fluid.core as core
18
from op_test import OpTest
19 20


21
class TestDropoutOp(OpTest):
22
    def setUp(self):
23
        self.op_type = "dropout"
24
        self.inputs = {'X': np.random.random((32, 64)).astype("float32")}
25
        self.attrs = {'dropout_prob': 0.0, 'fix_seed': True, 'is_test': False}
Y
Yu Yang 已提交
26 27 28 29
        self.outputs = {
            'Out': self.inputs['X'],
            'Mask': np.ones((32, 64)).astype('float32')
        }
30

31 32 33 34 35
    def test_check_output(self):
        self.check_output()

    def test_check_grad_normal(self):
        self.check_grad(['X'], 'Out', max_relative_error=0.05)
36 37


38
class TestDropoutOp2(TestDropoutOp):
39
    def setUp(self):
40
        self.op_type = "dropout"
41
        self.inputs = {'X': np.random.random((32, 64)).astype("float32")}
42
        self.attrs = {'dropout_prob': 1.0, 'fix_seed': True, 'is_test': False}
Y
Yu Yang 已提交
43 44 45 46
        self.outputs = {
            'Out': np.zeros((32, 64)).astype('float32'),
            'Mask': np.zeros((32, 64)).astype('float32')
        }
47 48


49
class TestDropoutOp3(TestDropoutOp):
50
    def setUp(self):
51 52
        self.op_type = "dropout"
        self.inputs = {'X': np.random.random((32, 64, 2)).astype("float32")}
53
        self.attrs = {'dropout_prob': 0.0, 'fix_seed': True, 'is_test': False}
Y
Yu Yang 已提交
54 55 56 57
        self.outputs = {
            'Out': self.inputs['X'],
            'Mask': np.ones((32, 64, 2)).astype('float32')
        }
58 59


60 61 62 63
class TestDropoutOp4(OpTest):
    def setUp(self):
        self.op_type = "dropout"
        self.inputs = {'X': np.random.random((32, 64)).astype("float32")}
64
        self.attrs = {'dropout_prob': 0.35, 'fix_seed': True, 'is_test': True}
65 66 67
        self.outputs = {
            'Out': self.inputs['X'] * (1.0 - self.attrs['dropout_prob'])
        }
68 69 70 71 72 73 74 75 76

    def test_check_output(self):
        self.check_output()


class TestDropoutOp5(OpTest):
    def setUp(self):
        self.op_type = "dropout"
        self.inputs = {'X': np.random.random((32, 64, 3)).astype("float32")}
77
        self.attrs = {'dropout_prob': 0.75, 'is_test': True}
78 79 80
        self.outputs = {
            'Out': self.inputs['X'] * (1.0 - self.attrs['dropout_prob'])
        }
81 82 83 84 85

    def test_check_output(self):
        self.check_output()


K
Kexin Zhao 已提交
86
class TestFP16DropoutOp(OpTest):
K
Kexin Zhao 已提交
87 88
    def setUp(self):
        self.op_type = "dropout"
K
Kexin Zhao 已提交
89 90 91 92
        self.init_test_case()

        x = np.random.random(self.input_size).astype("float16")
        out = x * (1.0 - self.prob)
K
Kexin Zhao 已提交
93
        self.inputs = {'X': OpTest.np_dtype_to_fluid_dtype(x)}
K
Kexin Zhao 已提交
94 95 96 97 98
        self.attrs = {
            'dropout_prob': self.prob,
            'fix_seed': self.fix_seed,
            'is_test': True
        }
99
        self.outputs = {'Out': out}
K
Kexin Zhao 已提交
100

K
Kexin Zhao 已提交
101 102 103 104 105
    def init_test_case(self):
        self.input_size = [32, 64]
        self.prob = 0.35
        self.fix_seed = True

K
Kexin Zhao 已提交
106 107 108 109 110
    def test_check_output(self):
        if core.is_compiled_with_cuda() and core.op_support_gpu("dropout"):
            self.check_output_with_place(core.CUDAPlace(0), atol=1e-3)


K
Kexin Zhao 已提交
111 112 113 114 115
class TestFP16DropoutOp2(TestFP16DropoutOp):
    def init_test_case(self):
        self.input_size = [32, 64, 3]
        self.prob = 0.75
        self.fix_seed = False
K
Kexin Zhao 已提交
116 117


118 119
if __name__ == '__main__':
    unittest.main()