test_resnet_v2.py 13.5 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import math
16
import os
17
import tempfile
18 19 20 21
import time
import unittest

import numpy as np
22
from predictor_utils import PredictorTools
23 24

import paddle
25
from paddle.fluid import core
26 27 28 29 30 31 32 33 34

SEED = 2020
IMAGENET1000 = 1281167
base_lr = 0.001
momentum_rate = 0.9
l2_decay = 1e-4
# NOTE: Reduce batch_size from 8 to 2 to avoid unittest timeout.
batch_size = 2
epoch_num = 1
35 36 37
place = (
    paddle.CUDAPlace(0) if paddle.is_compiled_with_cuda() else paddle.CPUPlace()
)
38

39 40 41 42 43 44 45 46 47 48

if paddle.is_compiled_with_cuda():
    paddle.fluid.set_flags({'FLAGS_cudnn_deterministic': True})


def optimizer_setting(parameter_list=None):
    optimizer = paddle.optimizer.Momentum(
        learning_rate=base_lr,
        momentum=momentum_rate,
        weight_decay=paddle.regularizer.L2Decay(l2_decay),
49 50
        parameters=parameter_list,
    )
51 52 53 54 55

    return optimizer


class ConvBNLayer(paddle.nn.Layer):
56 57 58 59 60 61 62 63 64
    def __init__(
        self,
        num_channels,
        num_filters,
        filter_size,
        stride=1,
        groups=1,
        act=None,
    ):
65
        super().__init__()
66

67 68 69 70 71 72 73 74 75
        self._conv = paddle.nn.Conv2D(
            in_channels=num_channels,
            out_channels=num_filters,
            kernel_size=filter_size,
            stride=stride,
            padding=(filter_size - 1) // 2,
            groups=groups,
            bias_attr=False,
        )
76 77 78 79 80 81 82 83 84 85 86 87

        self._batch_norm = paddle.nn.BatchNorm(num_filters, act=act)

    def forward(self, inputs):
        y = self._conv(inputs)
        y = self._batch_norm(y)

        return y


class BottleneckBlock(paddle.nn.Layer):
    def __init__(self, num_channels, num_filters, stride, shortcut=True):
88
        super().__init__()
89

90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108
        self.conv0 = ConvBNLayer(
            num_channels=num_channels,
            num_filters=num_filters,
            filter_size=1,
            act='relu',
        )
        self.conv1 = ConvBNLayer(
            num_channels=num_filters,
            num_filters=num_filters,
            filter_size=3,
            stride=stride,
            act='relu',
        )
        self.conv2 = ConvBNLayer(
            num_channels=num_filters,
            num_filters=num_filters * 4,
            filter_size=1,
            act=None,
        )
109 110

        if not shortcut:
111 112 113 114 115 116
            self.short = ConvBNLayer(
                num_channels=num_channels,
                num_filters=num_filters * 4,
                filter_size=1,
                stride=stride,
            )
117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133

        self.shortcut = shortcut

        self._num_channels_out = num_filters * 4

    def forward(self, inputs):
        y = self.conv0(inputs)
        conv1 = self.conv1(y)
        conv2 = self.conv2(conv1)

        if self.shortcut:
            short = inputs
        else:
            short = self.short(inputs)

        y = paddle.add(x=short, y=conv2)

134 135 136
        layer_helper = paddle.fluid.layer_helper.LayerHelper(
            self.full_name(), act='relu'
        )
137 138 139 140 141
        return layer_helper.append_activation(y)


class ResNet(paddle.nn.Layer):
    def __init__(self, layers=50, class_dim=102):
142
        super().__init__()
143 144 145

        self.layers = layers
        supported_layers = [50, 101, 152]
146 147 148 149 150
        assert (
            layers in supported_layers
        ), "supported layers are {} but input layer is {}".format(
            supported_layers, layers
        )
151 152 153 154 155 156 157 158 159 160

        if layers == 50:
            depth = [3, 4, 6, 3]
        elif layers == 101:
            depth = [3, 4, 23, 3]
        elif layers == 152:
            depth = [3, 8, 36, 3]
        num_channels = [64, 256, 512, 1024]
        num_filters = [64, 128, 256, 512]

161 162 163
        self.conv = ConvBNLayer(
            num_channels=3, num_filters=64, filter_size=7, stride=2, act='relu'
        )
164 165
        self.pool2d_max = paddle.nn.MaxPool2D(
            kernel_size=3, stride=2, padding=1
166
        )
167 168 169 170 171 172 173

        self.bottleneck_block_list = []
        for block in range(len(depth)):
            shortcut = False
            for i in range(depth[block]):
                bottleneck_block = self.add_sublayer(
                    'bb_%d_%d' % (block, i),
174 175 176 177 178 179 180 181 182
                    BottleneckBlock(
                        num_channels=num_channels[block]
                        if i == 0
                        else num_filters[block] * 4,
                        num_filters=num_filters[block],
                        stride=2 if i == 0 and block != 0 else 1,
                        shortcut=shortcut,
                    ),
                )
183 184
                self.bottleneck_block_list.append(bottleneck_block)
                shortcut = True
W
wangzhen38 已提交
185
        self.pool2d_avg = paddle.nn.AdaptiveAvgPool2D(1)
186 187 188 189 190 191 192 193 194

        self.pool2d_avg_output = num_filters[len(num_filters) - 1] * 4 * 1 * 1

        stdv = 1.0 / math.sqrt(2048 * 1.0)

        self.out = paddle.nn.Linear(
            in_features=self.pool2d_avg_output,
            out_features=class_dim,
            weight_attr=paddle.ParamAttr(
195 196 197
                initializer=paddle.nn.initializer.Uniform(-stdv, stdv)
            ),
        )
198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223

    @paddle.jit.to_static
    def forward(self, inputs):
        y = self.conv(inputs)
        y = self.pool2d_max(y)
        for bottleneck_block in self.bottleneck_block_list:
            y = bottleneck_block(y)
        y = self.pool2d_avg(y)
        y = paddle.reshape(y, shape=[-1, self.pool2d_avg_output])
        pred = self.out(y)
        pred = paddle.nn.functional.softmax(pred)

        return pred


def reader_decorator(reader):
    def __reader__():
        for item in reader():
            img = np.array(item[0]).astype('float32').reshape(3, 224, 224)
            label = np.array(item[1]).astype('int64').reshape(1)
            yield img, label

    return __reader__


class TestResnet(unittest.TestCase):
224 225 226 227
    def setUp(self):
        self.temp_dir = tempfile.TemporaryDirectory()

        self.model_save_dir = os.path.join(self.temp_dir.name, "./inference")
228 229 230 231
        self.model_save_prefix = os.path.join(
            self.temp_dir.name, "./inference/resnet_v2"
        )
        self.model_filename = (
232
            "resnet_v2" + paddle.jit.translated_layer.INFER_MODEL_SUFFIX
233 234
        )
        self.params_filename = (
235
            "resnet_v2" + paddle.jit.translated_layer.INFER_PARAMS_SUFFIX
236 237 238 239
        )
        self.dy_state_dict_save_path = os.path.join(
            self.temp_dir.name, "./resnet_v2.dygraph"
        )
240 241 242 243 244 245

    def tearDown(self):
        self.temp_dir.cleanup()

    def do_train(self, to_static):
        """
246
        Tests model decorated by `dygraph_to_static_output` in static graph mode. For users, the model is defined in dygraph mode and trained in static graph mode.
247 248 249 250 251 252
        """
        paddle.disable_static(place)
        np.random.seed(SEED)
        paddle.seed(SEED)
        paddle.framework.random._manual_program_seed(SEED)

253 254 255 256 257 258 259 260
        train_reader = paddle.batch(
            reader_decorator(paddle.dataset.flowers.train(use_xmap=False)),
            batch_size=batch_size,
            drop_last=True,
        )
        data_loader = paddle.io.DataLoader.from_generator(
            capacity=5, iterable=True
        )
261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276
        data_loader.set_sample_list_generator(train_reader)

        resnet = ResNet()
        optimizer = optimizer_setting(parameter_list=resnet.parameters())

        for epoch in range(epoch_num):
            total_loss = 0.0
            total_acc1 = 0.0
            total_acc5 = 0.0
            total_sample = 0

            for batch_id, data in enumerate(data_loader()):
                start_time = time.time()
                img, label = data

                pred = resnet(img)
277 278 279
                loss = paddle.nn.functional.cross_entropy(
                    input=pred, label=label
                )
280 281 282 283 284 285 286 287 288 289 290 291 292 293 294
                avg_loss = paddle.mean(x=loss)
                acc_top1 = paddle.metric.accuracy(input=pred, label=label, k=1)
                acc_top5 = paddle.metric.accuracy(input=pred, label=label, k=5)

                avg_loss.backward()
                optimizer.minimize(avg_loss)
                resnet.clear_gradients()

                total_loss += avg_loss
                total_acc1 += acc_top1
                total_acc5 += acc_top5
                total_sample += 1

                end_time = time.time()
                if batch_id % 2 == 0:
295 296 297 298 299 300 301 302 303 304 305
                    print(
                        "epoch %d | batch step %d, loss %0.3f, acc1 %0.3f, acc5 %0.3f, time %f"
                        % (
                            epoch,
                            batch_id,
                            total_loss.numpy() / total_sample,
                            total_acc1.numpy() / total_sample,
                            total_acc5.numpy() / total_sample,
                            end_time - start_time,
                        )
                    )
306 307 308 309
                if batch_id == 10:
                    if to_static:
                        paddle.jit.save(resnet, self.model_save_prefix)
                    else:
310 311 312
                        paddle.save(
                            resnet.state_dict(),
                            self.dy_state_dict_save_path + '.pdparams',
313
                        )
314 315 316 317 318 319 320 321
                        # avoid dataloader throw abort signaal
                    data_loader._reset()
                    break
        paddle.enable_static()

        return total_loss.numpy()

    def predict_dygraph(self, data):
R
Ryan 已提交
322
        paddle.jit.enable_to_static(False)
323 324 325
        paddle.disable_static(place)
        resnet = ResNet()

326
        model_dict = paddle.load(self.dy_state_dict_save_path + '.pdparams')
327 328 329 330
        resnet.set_dict(model_dict)
        resnet.eval()

        pred_res = resnet(
331 332 333 334
            paddle.to_tensor(
                data=data, dtype=None, place=None, stop_gradient=True
            )
        )
335 336 337 338 339 340 341

        ret = pred_res.numpy()
        paddle.enable_static()
        return ret

    def predict_static(self, data):
        exe = paddle.static.Executor(place)
342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357
        [
            inference_program,
            feed_target_names,
            fetch_targets,
        ] = paddle.static.load_inference_model(
            self.model_save_dir,
            executor=exe,
            model_filename=self.model_filename,
            params_filename=self.params_filename,
        )

        pred_res = exe.run(
            inference_program,
            feed={feed_target_names[0]: data},
            fetch_list=fetch_targets,
        )
358 359 360 361 362 363 364 365 366 367 368 369 370 371 372

        return pred_res[0]

    def predict_dygraph_jit(self, data):
        paddle.disable_static(place)
        resnet = paddle.jit.load(self.model_save_prefix)
        resnet.eval()

        pred_res = resnet(data)

        ret = pred_res.numpy()
        paddle.enable_static()
        return ret

    def predict_analysis_inference(self, data):
373 374 375 376 377 378 379
        output = PredictorTools(
            self.model_save_dir,
            self.model_filename,
            self.params_filename,
            [data],
        )
        (out,) = output()
380 381
        return out

382
    def train(self, to_static):
R
Ryan 已提交
383
        paddle.jit.enable_to_static(to_static)
384
        return self.do_train(to_static)
385 386 387

    def verify_predict(self):
        image = np.random.random([1, 3, 224, 224]).astype('float32')
388 389 390 391
        dy_pre = self.predict_dygraph(image)
        st_pre = self.predict_static(image)
        dy_jit_pre = self.predict_dygraph_jit(image)
        predictor_pre = self.predict_analysis_inference(image)
392 393 394 395
        np.testing.assert_allclose(
            dy_pre,
            st_pre,
            rtol=1e-05,
396
            err_msg=f'dy_pre:\n {dy_pre}\n, st_pre: \n{st_pre}.',
397
        )
398 399 400 401 402
        np.testing.assert_allclose(
            dy_jit_pre,
            st_pre,
            rtol=1e-05,
            err_msg='dy_jit_pre:\n {}\n, st_pre: \n{}.'.format(
403 404 405
                dy_jit_pre, st_pre
            ),
        )
406 407 408 409 410
        np.testing.assert_allclose(
            predictor_pre,
            st_pre,
            rtol=1e-05,
            err_msg='predictor_pre:\n {}\n, st_pre: \n{}.'.format(
411 412 413
                predictor_pre, st_pre
            ),
        )
414 415 416 417

    def test_resnet(self):
        static_loss = self.train(to_static=True)
        dygraph_loss = self.train(to_static=False)
418 419 420 421 422
        np.testing.assert_allclose(
            static_loss,
            dygraph_loss,
            rtol=1e-05,
            err_msg='static_loss: {} \n dygraph_loss: {}'.format(
423 424 425
                static_loss, dygraph_loss
            ),
        )
426 427
        self.verify_predict()

428
    def test_resnet_composite(self):
429
        core._set_prim_backward_enabled(True)
430
        core._add_skip_comp_ops("batch_norm")
431
        static_loss = self.train(to_static=True)
432
        core._set_prim_backward_enabled(False)
433 434 435 436 437 438 439 440 441 442
        dygraph_loss = self.train(to_static=False)
        np.testing.assert_allclose(
            static_loss,
            dygraph_loss,
            rtol=1e-05,
            err_msg='static_loss: {} \n dygraph_loss: {}'.format(
                static_loss, dygraph_loss
            ),
        )

443 444 445
    def test_in_static_mode_mkldnn(self):
        paddle.fluid.set_flags({'FLAGS_use_mkldnn': True})
        try:
446
            if paddle.fluid.core.is_compiled_with_mkldnn():
447
                self.train(to_static=True)
448 449 450 451 452 453
        finally:
            paddle.fluid.set_flags({'FLAGS_use_mkldnn': False})


if __name__ == '__main__':
    unittest.main()