test_imperative_out_scale.py 6.2 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
#   copyright (c) 2018 paddlepaddle authors. all rights reserved.
#
# licensed under the apache license, version 2.0 (the "license");
# you may not use this file except in compliance with the license.
# you may obtain a copy of the license at
#
#     http://www.apache.org/licenses/license-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the license is distributed on an "as is" basis,
# without warranties or conditions of any kind, either express or implied.
# see the license for the specific language governing permissions and
# limitations under the license.

import os
16
import tempfile
17 18 19 20
import unittest

import numpy as np
from imperative_test_utils import fix_model_dict, train_lenet
21

22
import paddle
23
from paddle import fluid
24 25 26 27 28 29 30 31 32 33 34
from paddle.framework import core, set_flags
from paddle.nn import (
    BatchNorm2D,
    Conv2D,
    Linear,
    MaxPool2D,
    Sequential,
    Softmax,
)
from paddle.nn.layer import LeakyReLU, PReLU, ReLU, Sigmoid
from paddle.quantization import ImperativeQuantAware
35

36 37 38 39
paddle.enable_static()

os.environ["CPU_NUM"] = "1"
if core.is_compiled_with_cuda():
40
    set_flags({"FLAGS_cudnn_deterministic": True})
41 42


43 44 45 46 47 48 49 50
def get_vaild_warning_num(warning, w):
    num = 0
    for i in range(len(w)):
        if warning in str(w[i].message):
            num += 1
    return num


51
class ImperativeLenet(paddle.nn.Layer):
G
guofei 已提交
52
    def __init__(self, num_classes=10):
53
        super().__init__()
54 55 56 57 58 59 60 61 62
        conv2d_w1_attr = paddle.ParamAttr(name="conv2d_w_1")
        conv2d_w2_attr = paddle.ParamAttr(name="conv2d_w_2")
        fc_w1_attr = paddle.ParamAttr(name="fc_w_1")
        fc_w2_attr = paddle.ParamAttr(name="fc_w_2")
        fc_w3_attr = paddle.ParamAttr(name="fc_w_3")
        conv2d_b2_attr = paddle.ParamAttr(name="conv2d_b_2")
        fc_b1_attr = paddle.ParamAttr(name="fc_b_1")
        fc_b2_attr = paddle.ParamAttr(name="fc_b_2")
        fc_b3_attr = paddle.ParamAttr(name="fc_b_3")
63
        self.features = Sequential(
64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86
            Conv2D(
                in_channels=1,
                out_channels=6,
                kernel_size=3,
                stride=1,
                padding=1,
                weight_attr=conv2d_w1_attr,
                bias_attr=False,
            ),
            BatchNorm2D(6),
            ReLU(),
            MaxPool2D(kernel_size=2, stride=2),
            Conv2D(
                in_channels=6,
                out_channels=16,
                kernel_size=5,
                stride=1,
                padding=0,
                weight_attr=conv2d_w2_attr,
                bias_attr=conv2d_b2_attr,
            ),
            BatchNorm2D(16),
            PReLU(),
87
            MaxPool2D(kernel_size=2, stride=2),
88
        )
89 90

        self.fc = Sequential(
91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112
            Linear(
                in_features=400,
                out_features=120,
                weight_attr=fc_w1_attr,
                bias_attr=fc_b1_attr,
            ),
            LeakyReLU(),
            Linear(
                in_features=120,
                out_features=84,
                weight_attr=fc_w2_attr,
                bias_attr=fc_b2_attr,
            ),
            Sigmoid(),
            Linear(
                in_features=84,
                out_features=num_classes,
                weight_attr=fc_w3_attr,
                bias_attr=fc_b3_attr,
            ),
            Softmax(),
        )
113 114 115 116

    def forward(self, inputs):
        x = self.features(inputs)

117
        x = paddle.flatten(x, 1)
118 119 120 121 122
        x = self.fc(x)
        return x


class TestImperativeOutSclae(unittest.TestCase):
123 124
    def setUp(self):
        self.root_path = tempfile.TemporaryDirectory()
125 126 127 128 129 130
        self.param_save_path = os.path.join(
            self.root_path.name, "lenet.pdparams"
        )
        self.save_path = os.path.join(
            self.root_path.name, "lenet_dynamic_outscale_infer_model"
        )
131 132 133 134

    def tearDown(self):
        self.root_path.cleanup()

135
    def test_out_scale_acc(self):
136
        seed = 1000
G
guofei 已提交
137
        lr = 0.001
138

139 140
        weight_quantize_type = 'abs_max'
        activation_quantize_type = 'moving_average_abs_max'
G
guofei 已提交
141 142
        imperative_out_scale = ImperativeQuantAware(
            weight_quantize_type=weight_quantize_type,
143 144
            activation_quantize_type=activation_quantize_type,
        )
145 146 147

        with fluid.dygraph.guard():
            np.random.seed(seed)
148 149
            paddle.static.default_main_program().random_seed = seed
            paddle.static.default_startup_program().random_seed = seed
150

151
            lenet = ImperativeLenet()
152
            lenet = fix_model_dict(lenet)
153
            imperative_out_scale.quantize(lenet)
154

155 156 157
            reader = paddle.batch(
                paddle.dataset.mnist.test(), batch_size=32, drop_last=True
            )
158 159
            adam = paddle.optimizer.Adam(
                learning_rate=lr, parameters=lenet.parameters()
160
            )
161
            loss_list = train_lenet(lenet, reader, adam)
162 163
            lenet.eval()

164
        save_dict = lenet.state_dict()
165
        paddle.save(save_dict, self.param_save_path)
166

167
        for i in range(len(loss_list) - 1):
168 169 170 171
            self.assertTrue(
                loss_list[i] > loss_list[i + 1],
                msg='Failed to do the imperative qat.',
            )
G
guofei 已提交
172

173 174
        with fluid.dygraph.guard():
            lenet = ImperativeLenet()
175
            load_dict = paddle.load(self.param_save_path)
176 177 178
            imperative_out_scale.quantize(lenet)
            lenet.set_dict(load_dict)

179 180 181
            reader = paddle.batch(
                paddle.dataset.mnist.test(), batch_size=32, drop_last=True
            )
182 183
            adam = paddle.optimizer.Adam(
                learning_rate=lr, parameters=lenet.parameters()
184
            )
185 186 187
            loss_list = train_lenet(lenet, reader, adam)
            lenet.eval()

188 189
        imperative_out_scale.save_quantized_model(
            layer=lenet,
190
            path=self.save_path,
191
            input_spec=[
192 193 194 195 196
                paddle.static.InputSpec(
                    shape=[None, 1, 28, 28], dtype='float32'
                )
            ],
        )
197

198
        for i in range(len(loss_list) - 1):
199 200 201 202
            self.assertTrue(
                loss_list[i] > loss_list[i + 1],
                msg='Failed to do the imperative qat.',
            )
203 204


205 206
if __name__ == '__main__':
    unittest.main()