test_affine_channel_op.py 3.3 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
14 15 16
"""
Unit testing for affine_channel_op
"""
17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38

import unittest
import numpy as np
from op_test import OpTest


def affine_channel(x, scale, bias, layout):
    C = x.shape[1] if layout == 'NCHW' else x.shape[-1]
    if len(x.shape) == 4:
        new_shape = (1, C, 1, 1) if layout == 'NCHW' else (1, 1, 1, C)
    else:
        new_shape = (1, C)
    scale = scale.reshape(new_shape)
    bias = bias.reshape(new_shape)
    return x * scale + bias


class TestAffineChannelOp(OpTest):
    def setUp(self):
        self.op_type = "affine_channel"
        self.init_test_case()

39 40 41
        x = np.random.random(self.shape).astype("float64")
        scale = np.random.random(self.C).astype("float64")
        bias = np.random.random(self.C).astype("float64")
42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61

        y = affine_channel(x, scale, bias, self.layout)

        self.inputs = {'X': x, 'Scale': scale, 'Bias': bias}
        self.attrs = {'data_layout': self.layout}
        self.outputs = {'Out': y}

    def test_check_output(self):
        self.check_output()

    def test_check_grad(self):
        self.check_grad(['X', 'Scale', 'Bias'], 'Out')

    def test_check_grad_stopgrad_dx(self):
        self.check_grad(['Scale', 'Bias'], 'Out', no_grad_set=set('X'))

    def test_check_grad_stopgrad_dscale_dbias(self):
        self.check_grad(['X'], 'Out', no_grad_set=set(['Scale', 'Bias']))

    def init_test_case(self):
62
        self.shape = [2, 100, 3, 3]
Z
zhupengyang 已提交
63
        self.C = 100
64 65 66 67 68
        self.layout = 'NCHW'


class TestAffineChannelNHWC(TestAffineChannelOp):
    def init_test_case(self):
69
        self.shape = [2, 3, 3, 100]
Z
zhupengyang 已提交
70
        self.C = 100
71 72
        self.layout = 'NHWC'

Q
qingqing01 已提交
73 74 75 76 77 78
    def test_check_grad_stopgrad_dx(self):
        return

    def test_check_grad_stopgrad_dscale_dbias(self):
        return

79 80 81

class TestAffineChannel2D(TestAffineChannelOp):
    def init_test_case(self):
82
        self.shape = [2, 100]
Z
zhupengyang 已提交
83
        self.C = 100
84 85
        self.layout = 'NCHW'

Q
qingqing01 已提交
86 87 88 89 90 91
    def test_check_grad_stopgrad_dx(self):
        return

    def test_check_grad_stopgrad_dscale_dbias(self):
        return

92

93
# TODO(qingqing): disable unit testing for large shape
94
# class TestAffineChannelNCHWLargeShape(TestAffineChannelOp):
95 96 97 98 99 100 101 102 103 104 105 106 107 108 109
#    def init_test_case(self):
#        self.shape = [4, 128, 112, 112]
#        self.C = 128
#        self.layout = 'NCHW'
#
#    # since the gradient check is very slow in large shape, so skip check_grad
#    def test_check_grad(self):
#        pass
#
#    def test_check_grad_stopgrad_dx(self):
#        pass
#
#    def test_check_grad_stopgrad_dscale_dbias(self):
#        pass

110
# class TestAffineChannelNHWCLargeShape(TestAffineChannelNCHWLargeShape):
111 112 113 114
#    def init_test_case(self):
#        self.shape = [64, 32, 32, 128]
#        self.C = 128
#        self.layout = 'NHWC'
115 116 117

if __name__ == '__main__':
    unittest.main()