pp_lcnet.py 16.1 KB
Newer Older
C
cuicheng01 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
# copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import absolute_import, division, print_function

import paddle
import paddle.nn as nn
from paddle import ParamAttr
C
cuicheng01 已提交
20
from paddle.nn import AdaptiveAvgPool2D, BatchNorm2D, Conv2D, Dropout, Linear
C
cuicheng01 已提交
21 22 23 24 25 26
from paddle.regularizer import L2Decay
from paddle.nn.initializer import KaimingNormal
from ppcls.arch.backbone.base.theseus_layer import TheseusLayer
from ppcls.utils.save_load import load_dygraph_pretrain, load_dygraph_pretrain_from_url

MODEL_URLS = {
C
cuicheng01 已提交
27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42
    "PPLCNet_x0_25":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/PPLCNet_x0_25_pretrained.pdparams",
    "PPLCNet_x0_35":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/PPLCNet_x0_35_pretrained.pdparams",
    "PPLCNet_x0_5":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/PPLCNet_x0_5_pretrained.pdparams",
    "PPLCNet_x0_75":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/PPLCNet_x0_75_pretrained.pdparams",
    "PPLCNet_x1_0":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/PPLCNet_x1_0_pretrained.pdparams",
    "PPLCNet_x1_5":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/PPLCNet_x1_5_pretrained.pdparams",
    "PPLCNet_x2_0":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/PPLCNet_x2_0_pretrained.pdparams",
    "PPLCNet_x2_5":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/PPLCNet_x2_5_pretrained.pdparams"
C
cuicheng01 已提交
43 44
}

45 46 47 48
MODEL_STAGES_PATTERN = {
    "PPLCNet": ["blocks2", "blocks3", "blocks4", "blocks5", "blocks6"]
}

C
cuicheng01 已提交
49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85
__all__ = list(MODEL_URLS.keys())

# Each element(list) represents a depthwise block, which is composed of k, in_c, out_c, s, use_se.
# k: kernel_size
# in_c: input channel number in depthwise block
# out_c: output channel number in depthwise block
# s: stride in depthwise block
# use_se: whether to use SE block

NET_CONFIG = {
    "blocks2":
    #k, in_c, out_c, s, use_se
    [[3, 16, 32, 1, False]],
    "blocks3": [[3, 32, 64, 2, False], [3, 64, 64, 1, False]],
    "blocks4": [[3, 64, 128, 2, False], [3, 128, 128, 1, False]],
    "blocks5": [[3, 128, 256, 2, False], [5, 256, 256, 1, False],
                [5, 256, 256, 1, False], [5, 256, 256, 1, False],
                [5, 256, 256, 1, False], [5, 256, 256, 1, False]],
    "blocks6": [[5, 256, 512, 2, True], [5, 512, 512, 1, True]]
}


def make_divisible(v, divisor=8, min_value=None):
    if min_value is None:
        min_value = divisor
    new_v = max(min_value, int(v + divisor / 2) // divisor * divisor)
    if new_v < 0.9 * v:
        new_v += divisor
    return new_v


class ConvBNLayer(TheseusLayer):
    def __init__(self,
                 num_channels,
                 filter_size,
                 num_filters,
                 stride,
C
cuicheng01 已提交
86 87
                 num_groups=1,
                 lr_mult=1.0):
C
cuicheng01 已提交
88 89 90 91 92 93 94 95 96
        super().__init__()

        self.conv = Conv2D(
            in_channels=num_channels,
            out_channels=num_filters,
            kernel_size=filter_size,
            stride=stride,
            padding=(filter_size - 1) // 2,
            groups=num_groups,
C
cuicheng01 已提交
97
            weight_attr=ParamAttr(initializer=KaimingNormal(), learning_rate=lr_mult),
C
cuicheng01 已提交
98 99
            bias_attr=False)

C
cuicheng01 已提交
100
        self.bn = BatchNorm2D(
C
cuicheng01 已提交
101
            num_filters,
C
cuicheng01 已提交
102 103
            weight_attr=ParamAttr(regularizer=L2Decay(0.0), learning_rate=lr_mult),
            bias_attr=ParamAttr(regularizer=L2Decay(0.0), learning_rate=lr_mult))
C
cuicheng01 已提交
104 105 106 107 108 109 110 111 112 113 114 115 116 117 118
        self.hardswish = nn.Hardswish()

    def forward(self, x):
        x = self.conv(x)
        x = self.bn(x)
        x = self.hardswish(x)
        return x


class DepthwiseSeparable(TheseusLayer):
    def __init__(self,
                 num_channels,
                 num_filters,
                 stride,
                 dw_size=3,
C
cuicheng01 已提交
119 120
                 use_se=False,
                 lr_mult=1.0):
C
cuicheng01 已提交
121 122 123 124 125 126 127
        super().__init__()
        self.use_se = use_se
        self.dw_conv = ConvBNLayer(
            num_channels=num_channels,
            num_filters=num_channels,
            filter_size=dw_size,
            stride=stride,
C
cuicheng01 已提交
128 129
            num_groups=num_channels,
            lr_mult=lr_mult)
C
cuicheng01 已提交
130
        if use_se:
C
cuicheng01 已提交
131 132
            self.se = SEModule(num_channels,
                               lr_mult=lr_mult)
C
cuicheng01 已提交
133 134 135 136
        self.pw_conv = ConvBNLayer(
            num_channels=num_channels,
            filter_size=1,
            num_filters=num_filters,
C
cuicheng01 已提交
137 138
            stride=1,
            lr_mult=lr_mult)
C
cuicheng01 已提交
139 140 141 142 143 144 145 146 147 148

    def forward(self, x):
        x = self.dw_conv(x)
        if self.use_se:
            x = self.se(x)
        x = self.pw_conv(x)
        return x


class SEModule(TheseusLayer):
C
cuicheng01 已提交
149
    def __init__(self, channel, reduction=4, lr_mult=1.0):
C
cuicheng01 已提交
150 151 152 153 154 155 156
        super().__init__()
        self.avg_pool = AdaptiveAvgPool2D(1)
        self.conv1 = Conv2D(
            in_channels=channel,
            out_channels=channel // reduction,
            kernel_size=1,
            stride=1,
C
cuicheng01 已提交
157 158 159
            padding=0,
            weight_attr=ParamAttr(learning_rate=lr_mult),
            bias_attr=ParamAttr(learning_rate=lr_mult))
C
cuicheng01 已提交
160 161 162 163 164 165
        self.relu = nn.ReLU()
        self.conv2 = Conv2D(
            in_channels=channel // reduction,
            out_channels=channel,
            kernel_size=1,
            stride=1,
C
cuicheng01 已提交
166 167 168
            padding=0,
            weight_attr=ParamAttr(learning_rate=lr_mult),
            bias_attr=ParamAttr(learning_rate=lr_mult))
C
cuicheng01 已提交
169 170 171 172 173 174 175 176 177 178 179 180 181
        self.hardsigmoid = nn.Hardsigmoid()

    def forward(self, x):
        identity = x
        x = self.avg_pool(x)
        x = self.conv1(x)
        x = self.relu(x)
        x = self.conv2(x)
        x = self.hardsigmoid(x)
        x = paddle.multiply(x=identity, y=x)
        return x


C
cuicheng01 已提交
182
class PPLCNet(TheseusLayer):
C
cuicheng01 已提交
183
    def __init__(self,
184
                 stages_pattern,
C
cuicheng01 已提交
185 186
                 scale=1.0,
                 class_num=1000,
C
cuicheng01 已提交
187
                 dropout_prob=0.2,
188
                 class_expand=1280,
C
cuicheng01 已提交
189 190
                 lr_mult_list=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
                 use_last_conv=True,
191 192
                 return_patterns=None,
                 return_stages=None):
C
cuicheng01 已提交
193 194 195
        super().__init__()
        self.scale = scale
        self.class_expand = class_expand
C
cuicheng01 已提交
196 197 198 199 200 201 202 203 204 205 206 207
        self.lr_mult_list = lr_mult_list
        self.use_last_conv = use_last_conv
        if isinstance(self.lr_mult_list, str):
            self.lr_mult_list = eval(self.lr_mult_list)

        assert isinstance(self.lr_mult_list, (
            list, tuple
        )), "lr_mult_list should be in (list, tuple) but got {}".format(
            type(self.lr_mult_list))
        assert len(self.lr_mult_list
                   ) == 6, "lr_mult_list length should be 5 but got {}".format(
                       len(self.lr_mult_list))
C
cuicheng01 已提交
208 209 210 211 212

        self.conv1 = ConvBNLayer(
            num_channels=3,
            filter_size=3,
            num_filters=make_divisible(16 * scale),
C
cuicheng01 已提交
213 214
            stride=2,
            lr_mult=self.lr_mult_list[0])
C
cuicheng01 已提交
215

216
        self.blocks2 = nn.Sequential(* [
C
cuicheng01 已提交
217 218 219 220 221
            DepthwiseSeparable(
                num_channels=make_divisible(in_c * scale),
                num_filters=make_divisible(out_c * scale),
                dw_size=k,
                stride=s,
C
cuicheng01 已提交
222 223
                use_se=se,
                lr_mult=self.lr_mult_list[1])
C
cuicheng01 已提交
224 225 226
            for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks2"])
        ])

227
        self.blocks3 = nn.Sequential(* [
C
cuicheng01 已提交
228 229 230 231 232
            DepthwiseSeparable(
                num_channels=make_divisible(in_c * scale),
                num_filters=make_divisible(out_c * scale),
                dw_size=k,
                stride=s,
C
cuicheng01 已提交
233 234
                use_se=se,
                lr_mult=self.lr_mult_list[2])
C
cuicheng01 已提交
235 236 237
            for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks3"])
        ])

238
        self.blocks4 = nn.Sequential(* [
C
cuicheng01 已提交
239 240 241 242 243
            DepthwiseSeparable(
                num_channels=make_divisible(in_c * scale),
                num_filters=make_divisible(out_c * scale),
                dw_size=k,
                stride=s,
C
cuicheng01 已提交
244 245
                use_se=se,
                lr_mult=self.lr_mult_list[3])
C
cuicheng01 已提交
246 247 248
            for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks4"])
        ])

249
        self.blocks5 = nn.Sequential(* [
C
cuicheng01 已提交
250 251 252 253 254
            DepthwiseSeparable(
                num_channels=make_divisible(in_c * scale),
                num_filters=make_divisible(out_c * scale),
                dw_size=k,
                stride=s,
C
cuicheng01 已提交
255 256
                use_se=se,
                lr_mult=self.lr_mult_list[4])
C
cuicheng01 已提交
257 258 259
            for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks5"])
        ])

260
        self.blocks6 = nn.Sequential(* [
C
cuicheng01 已提交
261 262 263 264 265
            DepthwiseSeparable(
                num_channels=make_divisible(in_c * scale),
                num_filters=make_divisible(out_c * scale),
                dw_size=k,
                stride=s,
C
cuicheng01 已提交
266 267
                use_se=se,
                lr_mult=self.lr_mult_list[5])
C
cuicheng01 已提交
268 269 270 271
            for i, (k, in_c, out_c, s, se) in enumerate(NET_CONFIG["blocks6"])
        ])

        self.avg_pool = AdaptiveAvgPool2D(1)
C
cuicheng01 已提交
272 273 274 275 276 277 278 279 280 281 282 283
        if self.use_last_conv:
            self.last_conv = Conv2D(
                in_channels=make_divisible(NET_CONFIG["blocks6"][-1][2] * scale),
                out_channels=self.class_expand,
                kernel_size=1,
                stride=1,
                padding=0,
                bias_attr=False)
            self.hardswish = nn.Hardswish()
            self.dropout = Dropout(p=dropout_prob, mode="downscale_in_infer")
        else:
            self.last_conv = None
C
cuicheng01 已提交
284
        self.flatten = nn.Flatten(start_axis=1, stop_axis=-1)
C
cuicheng01 已提交
285
        self.fc = Linear(self.class_expand if self.use_last_conv else NET_CONFIG["blocks6"][-1][2], class_num)
C
cuicheng01 已提交
286

287 288 289 290
        super().init_res(
            stages_pattern,
            return_patterns=return_patterns,
            return_stages=return_stages)
291

C
cuicheng01 已提交
292 293 294 295 296 297 298 299 300 301
    def forward(self, x):
        x = self.conv1(x)

        x = self.blocks2(x)
        x = self.blocks3(x)
        x = self.blocks4(x)
        x = self.blocks5(x)
        x = self.blocks6(x)

        x = self.avg_pool(x)
C
cuicheng01 已提交
302 303 304 305
        if self.last_conv is not None:
            x = self.last_conv(x)
            x = self.hardswish(x)
            x = self.dropout(x)
C
cuicheng01 已提交
306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323
        x = self.flatten(x)
        x = self.fc(x)
        return x


def _load_pretrained(pretrained, model, model_url, use_ssld):
    if pretrained is False:
        pass
    elif pretrained is True:
        load_dygraph_pretrain_from_url(model, model_url, use_ssld=use_ssld)
    elif isinstance(pretrained, str):
        load_dygraph_pretrain(model, pretrained)
    else:
        raise RuntimeError(
            "pretrained type is not available. Please use `string` or `boolean` type."
        )


C
cuicheng01 已提交
324
def PPLCNet_x0_25(pretrained=False, use_ssld=False, **kwargs):
C
cuicheng01 已提交
325
    """
C
cuicheng01 已提交
326
    PPLCNet_x0_25
C
cuicheng01 已提交
327 328 329 330 331
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
C
cuicheng01 已提交
332
        model: nn.Layer. Specific `PPLCNet_x0_25` model depends on args.
C
cuicheng01 已提交
333
    """
334 335
    model = PPLCNet(
        scale=0.25, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs)
C
cuicheng01 已提交
336
    _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x0_25"], use_ssld)
C
cuicheng01 已提交
337 338 339
    return model


C
cuicheng01 已提交
340
def PPLCNet_x0_35(pretrained=False, use_ssld=False, **kwargs):
C
cuicheng01 已提交
341
    """
C
cuicheng01 已提交
342
    PPLCNet_x0_35
C
cuicheng01 已提交
343 344 345 346 347
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
C
cuicheng01 已提交
348
        model: nn.Layer. Specific `PPLCNet_x0_35` model depends on args.
C
cuicheng01 已提交
349
    """
350 351
    model = PPLCNet(
        scale=0.35, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs)
C
cuicheng01 已提交
352
    _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x0_35"], use_ssld)
C
cuicheng01 已提交
353 354 355
    return model


C
cuicheng01 已提交
356
def PPLCNet_x0_5(pretrained=False, use_ssld=False, **kwargs):
C
cuicheng01 已提交
357
    """
C
cuicheng01 已提交
358
    PPLCNet_x0_5
C
cuicheng01 已提交
359 360 361 362 363
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
C
cuicheng01 已提交
364
        model: nn.Layer. Specific `PPLCNet_x0_5` model depends on args.
C
cuicheng01 已提交
365
    """
366 367
    model = PPLCNet(
        scale=0.5, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs)
C
cuicheng01 已提交
368
    _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x0_5"], use_ssld)
C
cuicheng01 已提交
369 370 371
    return model


C
cuicheng01 已提交
372
def PPLCNet_x0_75(pretrained=False, use_ssld=False, **kwargs):
C
cuicheng01 已提交
373
    """
C
cuicheng01 已提交
374
    PPLCNet_x0_75
C
cuicheng01 已提交
375 376 377 378 379
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
C
cuicheng01 已提交
380
        model: nn.Layer. Specific `PPLCNet_x0_75` model depends on args.
C
cuicheng01 已提交
381
    """
382 383
    model = PPLCNet(
        scale=0.75, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs)
C
cuicheng01 已提交
384
    _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x0_75"], use_ssld)
C
cuicheng01 已提交
385 386 387
    return model


C
cuicheng01 已提交
388
def PPLCNet_x1_0(pretrained=False, use_ssld=False, **kwargs):
C
cuicheng01 已提交
389
    """
C
cuicheng01 已提交
390
    PPLCNet_x1_0
C
cuicheng01 已提交
391 392 393 394 395
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
C
cuicheng01 已提交
396
        model: nn.Layer. Specific `PPLCNet_x1_0` model depends on args.
C
cuicheng01 已提交
397
    """
398 399
    model = PPLCNet(
        scale=1.0, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs)
C
cuicheng01 已提交
400
    _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x1_0"], use_ssld)
C
cuicheng01 已提交
401 402 403
    return model


C
cuicheng01 已提交
404
def PPLCNet_x1_5(pretrained=False, use_ssld=False, **kwargs):
C
cuicheng01 已提交
405
    """
C
cuicheng01 已提交
406
    PPLCNet_x1_5
C
cuicheng01 已提交
407 408 409 410 411
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
C
cuicheng01 已提交
412
        model: nn.Layer. Specific `PPLCNet_x1_5` model depends on args.
C
cuicheng01 已提交
413
    """
414 415
    model = PPLCNet(
        scale=1.5, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs)
C
cuicheng01 已提交
416
    _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x1_5"], use_ssld)
C
cuicheng01 已提交
417 418 419
    return model


C
cuicheng01 已提交
420
def PPLCNet_x2_0(pretrained=False, use_ssld=False, **kwargs):
C
cuicheng01 已提交
421
    """
C
cuicheng01 已提交
422
    PPLCNet_x2_0
C
cuicheng01 已提交
423 424 425 426 427
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
C
cuicheng01 已提交
428
        model: nn.Layer. Specific `PPLCNet_x2_0` model depends on args.
C
cuicheng01 已提交
429
    """
430 431
    model = PPLCNet(
        scale=2.0, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs)
C
cuicheng01 已提交
432
    _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x2_0"], use_ssld)
C
cuicheng01 已提交
433 434 435
    return model


C
cuicheng01 已提交
436
def PPLCNet_x2_5(pretrained=False, use_ssld=False, **kwargs):
C
cuicheng01 已提交
437
    """
C
cuicheng01 已提交
438
    PPLCNet_x2_5
C
cuicheng01 已提交
439 440 441 442 443
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
C
cuicheng01 已提交
444
        model: nn.Layer. Specific `PPLCNet_x2_5` model depends on args.
C
cuicheng01 已提交
445
    """
446 447
    model = PPLCNet(
        scale=2.5, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs)
C
cuicheng01 已提交
448
    _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x2_5"], use_ssld)
C
cuicheng01 已提交
449
    return model