test_conv2d_fusion_op.py 11.0 KB
Newer Older
Q
qingqing01 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest

17
import numpy as np
Q
qingqing01 已提交
18 19 20
from op_test import OpTest
from test_conv2d_op import conv2d_forward_naive

21 22
import paddle.fluid.core as core

Q
qingqing01 已提交
23

24
def create_test_padding_SAME_class(parent):
25
    class TestPaddingSAMECase(parent):
26 27 28 29 30
        def init_paddings(self):
            self.pad = [0, 0]
            self.padding_algorithm = "SAME"

    cls_name = "{0}_{1}".format(parent.__name__, "PaddingSAMEOp")
31 32
    TestPaddingSAMECase.__name__ = cls_name
    globals()[cls_name] = TestPaddingSAMECase
33 34 35 36 37 38 39 40 41 42 43 44 45


def create_test_padding_VALID_class(parent):
    class TestPaddingVALIDCase(parent):
        def init_paddings(self):
            self.pad = [1, 1]
            self.padding_algorithm = "VALID"

    cls_name = "{0}_{1}".format(parent.__name__, "PaddingVALIDOp")
    TestPaddingVALIDCase.__name__ = cls_name
    globals()[cls_name] = TestPaddingVALIDCase


C
cnn 已提交
46
class TestConv2DFusionOp(OpTest):
Q
qingqing01 已提交
47 48 49
    def setUp(self):
        self.op_type = "conv2d_fusion"
        self.exhaustive_search = False
50
        self.data_format = "NCHW"
Q
qingqing01 已提交
51 52 53
        self.dtype = np.float32
        self.activation = 'relu'
        self.add_residual_data = True
54
        self.split_channels = None
Q
qingqing01 已提交
55
        self.outputs = None
56
        self.padding_algorithm = "EXIPLICIT"
Q
qingqing01 已提交
57 58 59 60

        self.init_group()
        self.init_dilation()
        self.init_test_case()
61
        self.init_residual()
Q
qingqing01 已提交
62
        self.init_activation()
63
        self.init_paddings()
Q
qingqing01 已提交
64 65 66 67 68
        self.set_search_method()

        conv2d_param = {
            'stride': self.stride,
            'pad': self.pad,
69
            'dilation': self.dilations,
Q
qingqing01 已提交
70 71 72 73
        }

        input = np.random.random(self.input_size).astype(self.dtype)
        filter = np.random.random(self.filter_size).astype(self.dtype)
74
        bias = np.random.random(self.filter_size[0]).astype(self.dtype)
Q
qingqing01 已提交
75

76 77 78 79 80 81 82 83
        self.output, _, _, _, _ = conv2d_forward_naive(
            input,
            filter,
            self.groups,
            conv2d_param,
            self.padding_algorithm,
            self.data_format,
        )
84

85
        self.output = self.output.astype(self.dtype)
Q
qingqing01 已提交
86 87 88

        self.inputs = {
            'Input': OpTest.np_dtype_to_fluid_dtype(input),
89
            'Filter': OpTest.np_dtype_to_fluid_dtype(filter),
90
            'Bias': OpTest.np_dtype_to_fluid_dtype(bias),
Q
qingqing01 已提交
91 92 93
        }

        if self.add_residual_data:
Q
qingqing01 已提交
94
            residual_data = np.random.random(self.output.shape).astype(
95 96
                self.dtype
            )
Q
qingqing01 已提交
97
            self.inputs['ResidualData'] = OpTest.np_dtype_to_fluid_dtype(
98 99
                residual_data
            )
Q
qingqing01 已提交
100
            self.output += residual_data
Q
qingqing01 已提交
101

102 103
        # Add bias
        self.output = self.output + bias.reshape((1, bias.size, 1, 1))
Q
qingqing01 已提交
104 105 106

        assert self.activation in ['relu', 'identity']
        if self.activation == 'relu':
Q
qingqing01 已提交
107
            self.output = np.maximum(self.output, 0)
Q
qingqing01 已提交
108 109 110 111 112 113 114 115

        self.attrs = {
            'strides': self.stride,
            'paddings': self.pad,
            'groups': self.groups,
            'dilations': self.dilations,
            'data_format': self.data_format,
            'exhaustive_search': self.exhaustive_search,
Q
qingqing01 已提交
116
            'activation': self.activation,
117
            'padding_algorithm': self.padding_algorithm,
Q
qingqing01 已提交
118
        }
119 120 121
        if self.split_channels is not None:
            self.attrs['split_channels'] = self.split_channels

Q
qingqing01 已提交
122 123 124
        self.outputs = {'Output': self.output}

        self.set_outputs()
Q
qingqing01 已提交
125

126
    def has_cuda(self):
Q
qingqing01 已提交
127 128 129
        return core.is_compiled_with_cuda()

    def test_check_output(self):
130
        if self.has_cuda():
Q
qingqing01 已提交
131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147
            place = core.CUDAPlace(0)
            self.check_output_with_place(place, atol=1e-5)

    def init_test_case(self):
        self.pad = [0, 0]
        self.stride = [1, 1]
        self.input_size = [2, 3, 5, 5]  # NCHW
        assert np.mod(self.input_size[1], self.groups) == 0
        f_c = self.input_size[1] // self.groups
        self.filter_size = [6, f_c, 3, 3]

    def init_dilation(self):
        self.dilations = [1, 1]

    def init_group(self):
        self.groups = 1

148
    def init_residual(self):
Q
qingqing01 已提交
149 150 151 152 153 154 155 156
        self.add_residual_data = True

    def init_activation(self):
        self.activation = 'relu'

    def set_search_method(self):
        self.exhaustive_search = False

Q
qingqing01 已提交
157 158 159
    def set_outputs(self):
        pass

160 161 162 163
    def init_paddings(self):
        self.pad = [0, 0]
        self.padding_algorithm = "EXPLICIT"

Q
qingqing01 已提交
164

C
cnn 已提交
165
class TestWithoutResidual(TestConv2DFusionOp):
166
    def init_residual(self):
Q
qingqing01 已提交
167 168 169
        self.add_residual_data = False


C
cnn 已提交
170
class TestIdentityActivation(TestConv2DFusionOp):
Q
qingqing01 已提交
171 172 173 174
    def init_activation(self):
        self.activation = 'identity'


Z
zhangchunle 已提交
175
class TestIdentityActivation1(TestConv2DFusionOp):
176 177 178 179 180
    def init_activation(self):
        self.activation = 'identity'
        self.add_residual_data = False


C
cnn 已提交
181
class TestWithGroup(TestConv2DFusionOp):
Q
qingqing01 已提交
182 183 184 185
    def init_group(self):
        self.groups = 3


C
cnn 已提交
186
class TestWithDilation(TestConv2DFusionOp):
Q
qingqing01 已提交
187 188 189 190 191 192 193 194 195 196 197 198 199 200 201
    def init_test_case(self):
        self.pad = [0, 0]
        self.stride = [1, 1]
        self.input_size = [2, 3, 10, 10]  # NCHW
        assert np.mod(self.input_size[1], self.groups) == 0
        f_c = self.input_size[1] // self.groups
        self.filter_size = [6, f_c, 3, 3]

    def init_dilation(self):
        self.dilations = [2, 2]

    def init_group(self):
        self.groups = 3


C
cnn 已提交
202
class TestCUDNNExhaustiveSearch(TestConv2DFusionOp):
Q
qingqing01 已提交
203 204 205 206
    def set_search_method(self):
        self.exhaustive_search = True


C
cnn 已提交
207
class TestMultipleOutputs(TestConv2DFusionOp):
Q
qingqing01 已提交
208 209 210 211 212 213 214
    def init_test_case(self):
        self.pad = [1, 1]
        self.stride = [1, 1]
        self.input_size = [1, 32, 17, 17]  # NCHW
        assert np.mod(self.input_size[1], self.groups) == 0
        f_c = self.input_size[1] // self.groups
        self.filter_size = [126, f_c, 3, 3]
215
        self.split_channels = [84, 42]
Q
qingqing01 已提交
216 217 218 219 220 221 222

    def set_outputs(self):
        out1 = self.output[:, 0:84, :, :]
        out2 = self.output[:, 84:126, :, :]
        self.outputs['Outputs'] = [('out1', out1), ('out2', out2)]


C
cnn 已提交
223
class TestAsyPadding(TestConv2DFusionOp):
224 225 226 227 228
    def init_paddings(self):
        self.pad = [0, 0, 1, 2]
        self.padding_algorithm = "EXPLICIT"


C
cnn 已提交
229
class TestWithPad_AsyPadding(TestConv2DFusionOp):
230 231 232 233 234 235 236 237 238 239 240 241
    def init_test_case(self):
        self.stride = [1, 1]
        self.input_size = [2, 3, 10, 10]  # NCHW
        assert np.mod(self.input_size[1], self.groups) == 0
        f_c = self.input_size[1] // self.groups
        self.filter_size = [6, f_c, 3, 3]

    def init_paddings(self):
        self.pad = [2, 1, 3, 2]
        self.padding_algorithm = "EXPLICIT"


C
cnn 已提交
242
class TestWithStride_AsyPadding(TestConv2DFusionOp):
243 244 245 246 247 248 249 250 251 252 253 254
    def init_test_case(self):
        self.stride = [2, 2]
        self.input_size = [2, 3, 6, 6]  # NCHW
        assert np.mod(self.input_size[1], self.groups) == 0
        f_c = self.input_size[1] // self.groups
        self.filter_size = [6, f_c, 3, 3]

    def init_paddings(self):
        self.pad = [2, 1, 3, 2]
        self.padding_algorithm = "EXPLICIT"


C
cnn 已提交
255
class TestWith1x1_AsyPadding(TestConv2DFusionOp):
256 257 258 259 260 261 262 263 264 265 266 267 268 269 270
    def init_test_case(self):
        self.stride = [1, 1]
        self.input_size = [2, 3, 5, 5]  # NCHW
        assert np.mod(self.input_size[1], self.groups) == 0
        f_c = self.input_size[1] // self.groups
        self.filter_size = [6, f_c, 1, 1]

    def init_group(self):
        self.groups = 3

    def init_paddings(self):
        self.pad = [2, 2, 4, 0]
        self.padding_algorithm = "EXPLICIT"


C
cnn 已提交
271
class TestWithGroup_AsyPadding(TestConv2DFusionOp):
272 273 274 275
    def init_group(self):
        self.groups = 3


C
cnn 已提交
276
class TestWithDepthWise3x3_AsyPadding(TestConv2DFusionOp):
277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294
    def init_test_case(self):
        self.stride = [1, 1]
        self.input_size = [3, 4, 10, 10]  # NCHW
        assert np.mod(self.input_size[1], self.groups) == 0
        f_c = self.input_size[1] // self.groups
        self.filter_size = [8, f_c, 3, 3]

    def init_dilation(self):
        self.dilations = [2, 2]

    def init_group(self):
        self.groups = 4

    def init_paddings(self):
        self.pad = [1, 3, 2, 1]
        self.padding_algorithm = "EXPLICIT"


C
cnn 已提交
295
class TestWithDepthWise5x5_AsyPadding(TestConv2DFusionOp):
296 297 298 299 300 301 302 303 304 305 306 307 308 309 310
    def init_test_case(self):
        self.stride = [1, 1]
        self.input_size = [2, 4, 10, 10]  # NCHW
        assert np.mod(self.input_size[1], self.groups) == 0
        f_c = self.input_size[1] // self.groups
        self.filter_size = [8, f_c, 5, 5]

    def init_group(self):
        self.groups = 4

    def init_paddings(self):
        self.pad = [0, 1, 1, 0]
        self.padding_algorithm = "EXPLICIT"


C
cnn 已提交
311
class TestWithDepthWise7x7_AsyPadding(TestConv2DFusionOp):
312 313 314 315 316 317 318 319 320 321 322 323 324 325 326
    def init_test_case(self):
        self.stride = [2, 2]
        self.input_size = [2, 8, 10, 10]  # NCHW
        assert np.mod(self.input_size[1], self.groups) == 0
        f_c = self.input_size[1] // self.groups
        self.filter_size = [16, f_c, 7, 7]

    def init_group(self):
        self.groups = 8

    def init_paddings(self):
        self.pad = [1, 3, 4, 1]
        self.padding_algorithm = "EXPLICIT"


C
cnn 已提交
327
class TestWithDilation_AsyPadding(TestConv2DFusionOp):
328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345
    def init_test_case(self):
        self.stride = [1, 1]
        self.input_size = [2, 3, 10, 10]  # NCHW
        assert np.mod(self.input_size[1], self.groups) == 0
        f_c = self.input_size[1] // self.groups
        self.filter_size = [6, f_c, 3, 3]

    def init_dilation(self):
        self.dilations = [2, 2]

    def init_group(self):
        self.groups = 3

    def init_paddings(self):
        self.pad = [0, 1, 3, 0]
        self.padding_algorithm = "EXPLICIT"


C
cnn 已提交
346
class TestWithInput1x1Filter1x1_AsyPadding(TestConv2DFusionOp):
347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373
    def init_test_case(self):
        self.stride = [1, 1]
        self.input_size = [2, 3, 1, 1]  # NCHW
        assert np.mod(self.input_size[1], self.groups) == 0
        f_c = self.input_size[1] // self.groups
        self.filter_size = [6, f_c, 1, 1]

    def init_group(self):
        self.groups = 3

    def init_paddings(self):
        self.pad = [0, 3, 4, 0]
        self.padding_algorithm = "EXPLICIT"


create_test_padding_SAME_class(TestAsyPadding)
create_test_padding_SAME_class(TestWithPad_AsyPadding)
create_test_padding_SAME_class(TestWithStride_AsyPadding)
create_test_padding_SAME_class(TestWithGroup_AsyPadding)
create_test_padding_SAME_class(TestWithInput1x1Filter1x1_AsyPadding)

create_test_padding_VALID_class(TestAsyPadding)
create_test_padding_VALID_class(TestWithPad_AsyPadding)
create_test_padding_VALID_class(TestWithStride_AsyPadding)
create_test_padding_VALID_class(TestWithGroup_AsyPadding)
create_test_padding_VALID_class(TestWithInput1x1Filter1x1_AsyPadding)

Q
qingqing01 已提交
374 375
if __name__ == '__main__':
    unittest.main()