hrnet.py 26.1 KB
Newer Older
W
weishengyu 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20
# copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import math
import paddle
W
weishengyu 已提交
21
from paddle import nn
W
weishengyu 已提交
22
from paddle import ParamAttr
W
weishengyu 已提交
23
from paddle.nn.functional import upsample
W
weishengyu 已提交
24 25
from paddle.nn.initializer import Uniform

W
weishengyu 已提交
26
from ppcls.arch.backbone.base.theseus_layer import TheseusLayer, Identity
D
dongshuilong 已提交
27
from ppcls.utils.save_load import load_dygraph_pretrain, load_dygraph_pretrain_from_url
W
weishengyu 已提交
28

W
weishengyu 已提交
29
MODEL_URLS = {
D
dongshuilong 已提交
30 31 32 33 34 35 36 37 38 39 40 41 42 43
    "HRNet_W18_C":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/HRNet_W18_C_pretrained.pdparams",
    "HRNet_W30_C":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/HRNet_W30_C_pretrained.pdparams",
    "HRNet_W32_C":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/HRNet_W32_C_pretrained.pdparams",
    "HRNet_W40_C":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/HRNet_W40_C_pretrained.pdparams",
    "HRNet_W44_C":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/HRNet_W44_C_pretrained.pdparams",
    "HRNet_W48_C":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/HRNet_W48_C_pretrained.pdparams",
    "HRNet_W64_C":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/HRNet_W64_C_pretrained.pdparams"
W
weishengyu 已提交
44 45
}

46 47
MODEL_STAGES_PATTERN = {"HRNet": ["st4"]}

W
weishengyu 已提交
48
__all__ = list(MODEL_URLS.keys())
W
weishengyu 已提交
49 50


D
dongshuilong 已提交
51 52 53 54 55 56 57 58 59 60 61 62
def _create_act(act):
    if act == "hardswish":
        return nn.Hardswish()
    elif act == "relu":
        return nn.ReLU()
    elif act is None:
        return Identity()
    else:
        raise RuntimeError(
            "The activation function is not supported: {}".format(act))


W
weishengyu 已提交
63 64 65 66 67 68 69
class ConvBNLayer(TheseusLayer):
    def __init__(self,
                 num_channels,
                 num_filters,
                 filter_size,
                 stride=1,
                 groups=1,
70
                 act="relu"):
D
dongshuilong 已提交
71
        super().__init__()
W
weishengyu 已提交
72

W
weishengyu 已提交
73
        self.conv = nn.Conv2D(
W
weishengyu 已提交
74 75 76 77 78 79 80
            in_channels=num_channels,
            out_channels=num_filters,
            kernel_size=filter_size,
            stride=stride,
            padding=(filter_size - 1) // 2,
            groups=groups,
            bias_attr=False)
D
dongshuilong 已提交
81 82
        self.bn = nn.BatchNorm(num_filters, act=None)
        self.act = _create_act(act)
W
weishengyu 已提交
83

W
weishengyu 已提交
84 85 86 87 88 89 90
    def forward(self, x):
        x = self.conv(x)
        x = self.bn(x)
        x = self.act(x)
        return x


W
weishengyu 已提交
91 92 93 94 95 96
class BottleneckBlock(TheseusLayer):
    def __init__(self,
                 num_channels,
                 num_filters,
                 has_se,
                 stride=1,
W
weishengyu 已提交
97
                 downsample=False):
D
dongshuilong 已提交
98
        super().__init__()
W
weishengyu 已提交
99 100 101 102 103 104 105 106

        self.has_se = has_se
        self.downsample = downsample

        self.conv1 = ConvBNLayer(
            num_channels=num_channels,
            num_filters=num_filters,
            filter_size=1,
W
weishengyu 已提交
107
            act="relu")
W
weishengyu 已提交
108 109 110 111 112
        self.conv2 = ConvBNLayer(
            num_channels=num_filters,
            num_filters=num_filters,
            filter_size=3,
            stride=stride,
W
weishengyu 已提交
113
            act="relu")
W
weishengyu 已提交
114 115 116 117
        self.conv3 = ConvBNLayer(
            num_channels=num_filters,
            num_filters=num_filters * 4,
            filter_size=1,
W
weishengyu 已提交
118
            act=None)
W
weishengyu 已提交
119 120 121 122 123 124

        if self.downsample:
            self.conv_down = ConvBNLayer(
                num_channels=num_channels,
                num_filters=num_filters * 4,
                filter_size=1,
W
weishengyu 已提交
125
                act=None)
W
weishengyu 已提交
126 127 128 129 130

        if self.has_se:
            self.se = SELayer(
                num_channels=num_filters * 4,
                num_filters=num_filters * 4,
W
weishengyu 已提交
131
                reduction_ratio=16)
W
weishengyu 已提交
132
        self.relu = nn.ReLU()
W
weishengyu 已提交
133

W
weishengyu 已提交
134 135
    def forward(self, x, res_dict=None):
        residual = x
W
weishengyu 已提交
136 137 138
        x = self.conv1(x)
        x = self.conv2(x)
        x = self.conv3(x)
W
weishengyu 已提交
139
        if self.downsample:
W
weishengyu 已提交
140
            residual = self.conv_down(residual)
W
weishengyu 已提交
141
        if self.has_se:
W
weishengyu 已提交
142 143 144 145
            x = self.se(x)
        x = paddle.add(x=residual, y=x)
        x = self.relu(x)
        return x
W
weishengyu 已提交
146 147


W
dbg  
weishengyu 已提交
148
class BasicBlock(nn.Layer):
D
dongshuilong 已提交
149 150
    def __init__(self, num_channels, num_filters, has_se=False):
        super().__init__()
W
weishengyu 已提交
151 152 153 154 155 156 157

        self.has_se = has_se

        self.conv1 = ConvBNLayer(
            num_channels=num_channels,
            num_filters=num_filters,
            filter_size=3,
W
dbg  
weishengyu 已提交
158
            stride=1,
W
dbg  
weishengyu 已提交
159
            act="relu")
W
weishengyu 已提交
160 161 162 163 164
        self.conv2 = ConvBNLayer(
            num_channels=num_filters,
            num_filters=num_filters,
            filter_size=3,
            stride=1,
W
dbg  
weishengyu 已提交
165
            act=None)
W
weishengyu 已提交
166 167 168 169 170

        if self.has_se:
            self.se = SELayer(
                num_channels=num_filters,
                num_filters=num_filters,
W
weishengyu 已提交
171
                reduction_ratio=16)
W
weishengyu 已提交
172
        self.relu = nn.ReLU()
W
weishengyu 已提交
173

W
weishengyu 已提交
174 175 176 177
    def forward(self, x):
        residual = x
        x = self.conv1(x)
        x = self.conv2(x)
W
weishengyu 已提交
178 179

        if self.has_se:
W
weishengyu 已提交
180
            x = self.se(x)
W
weishengyu 已提交
181

W
weishengyu 已提交
182 183 184
        x = paddle.add(x=residual, y=x)
        x = self.relu(x)
        return x
W
weishengyu 已提交
185 186 187


class SELayer(TheseusLayer):
W
weishengyu 已提交
188
    def __init__(self, num_channels, num_filters, reduction_ratio):
D
dongshuilong 已提交
189
        super().__init__()
W
weishengyu 已提交
190

D
dongshuilong 已提交
191
        self.avg_pool = nn.AdaptiveAvgPool2D(1)
W
weishengyu 已提交
192 193 194 195 196

        self._num_channels = num_channels

        med_ch = int(num_channels / reduction_ratio)
        stdv = 1.0 / math.sqrt(num_channels * 1.0)
W
weishengyu 已提交
197
        self.fc_squeeze = nn.Linear(
W
weishengyu 已提交
198 199
            num_channels,
            med_ch,
D
dongshuilong 已提交
200
            weight_attr=ParamAttr(initializer=Uniform(-stdv, stdv)))
W
weishengyu 已提交
201
        self.relu = nn.ReLU()
W
weishengyu 已提交
202
        stdv = 1.0 / math.sqrt(med_ch * 1.0)
W
weishengyu 已提交
203
        self.fc_excitation = nn.Linear(
W
weishengyu 已提交
204 205
            med_ch,
            num_filters,
W
weishengyu 已提交
206
            weight_attr=ParamAttr(initializer=Uniform(-stdv, stdv)))
W
weishengyu 已提交
207
        self.sigmoid = nn.Sigmoid()
W
weishengyu 已提交
208

W
weishengyu 已提交
209 210
    def forward(self, x, res_dict=None):
        residual = x
D
dongshuilong 已提交
211
        x = self.avg_pool(x)
W
weishengyu 已提交
212 213 214 215 216 217 218 219
        x = paddle.squeeze(x, axis=[2, 3])
        x = self.fc_squeeze(x)
        x = self.relu(x)
        x = self.fc_excitation(x)
        x = self.sigmoid(x)
        x = paddle.unsqueeze(x, axis=[2, 3])
        x = residual * x
        return x
W
weishengyu 已提交
220 221 222


class Stage(TheseusLayer):
D
dongshuilong 已提交
223 224
    def __init__(self, num_modules, num_filters, has_se=False):
        super().__init__()
W
weishengyu 已提交
225 226 227

        self._num_modules = num_modules

W
dbg  
weishengyu 已提交
228
        self.stage_func_list = nn.LayerList()
W
weishengyu 已提交
229
        for i in range(num_modules):
W
weishengyu 已提交
230 231
            self.stage_func_list.append(
                HighResolutionModule(
D
dongshuilong 已提交
232
                    num_filters=num_filters, has_se=has_se))
W
weishengyu 已提交
233

W
weishengyu 已提交
234 235
    def forward(self, x, res_dict=None):
        x = x
W
weishengyu 已提交
236
        for idx in range(self._num_modules):
W
weishengyu 已提交
237 238
            x = self.stage_func_list[idx](x)
        return x
W
weishengyu 已提交
239 240 241


class HighResolutionModule(TheseusLayer):
D
dongshuilong 已提交
242 243
    def __init__(self, num_filters, has_se=False):
        super().__init__()
W
weishengyu 已提交
244

W
weishengyu 已提交
245
        self.basic_block_list = nn.LayerList()
W
dbg  
weishengyu 已提交
246 247

        for i in range(len(num_filters)):
W
weishengyu 已提交
248
            self.basic_block_list.append(
249
                nn.Sequential(* [
W
dbg  
weishengyu 已提交
250
                    BasicBlock(
W
weishengyu 已提交
251
                        num_channels=num_filters[i],
W
dbg  
weishengyu 已提交
252
                        num_filters=num_filters[i],
D
dongshuilong 已提交
253 254
                        has_se=has_se) for j in range(4)
                ]))
W
weishengyu 已提交
255 256

        self.fuse_func = FuseLayers(
D
dongshuilong 已提交
257
            in_channels=num_filters, out_channels=num_filters)
W
weishengyu 已提交
258

W
weishengyu 已提交
259 260 261
    def forward(self, x, res_dict=None):
        out = []
        for idx, xi in enumerate(x):
W
dbg  
weishengyu 已提交
262 263
            basic_block_list = self.basic_block_list[idx]
            for basic_block_func in basic_block_list:
W
weishengyu 已提交
264 265 266
                xi = basic_block_func(xi)
            out.append(xi)
        out = self.fuse_func(out)
W
weishengyu 已提交
267 268 269 270
        return out


class FuseLayers(TheseusLayer):
D
dongshuilong 已提交
271 272
    def __init__(self, in_channels, out_channels):
        super().__init__()
W
weishengyu 已提交
273

W
weishengyu 已提交
274
        self._actual_ch = len(in_channels)
W
weishengyu 已提交
275 276
        self._in_channels = in_channels

W
weishengyu 已提交
277
        self.residual_func_list = nn.LayerList()
W
weishengyu 已提交
278
        self.relu = nn.ReLU()
W
weishengyu 已提交
279
        for i in range(len(in_channels)):
W
weishengyu 已提交
280 281
            for j in range(len(in_channels)):
                if j > i:
W
weishengyu 已提交
282
                    self.residual_func_list.append(
W
weishengyu 已提交
283 284 285 286 287
                        ConvBNLayer(
                            num_channels=in_channels[j],
                            num_filters=out_channels[i],
                            filter_size=1,
                            stride=1,
W
weishengyu 已提交
288
                            act=None))
W
weishengyu 已提交
289 290 291 292
                elif j < i:
                    pre_num_filters = in_channels[j]
                    for k in range(i - j):
                        if k == i - j - 1:
W
weishengyu 已提交
293
                            self.residual_func_list.append(
W
weishengyu 已提交
294 295 296 297 298
                                ConvBNLayer(
                                    num_channels=pre_num_filters,
                                    num_filters=out_channels[i],
                                    filter_size=3,
                                    stride=2,
W
weishengyu 已提交
299
                                    act=None))
W
weishengyu 已提交
300 301
                            pre_num_filters = out_channels[i]
                        else:
W
weishengyu 已提交
302
                            self.residual_func_list.append(
W
weishengyu 已提交
303 304 305 306 307
                                ConvBNLayer(
                                    num_channels=pre_num_filters,
                                    num_filters=out_channels[j],
                                    filter_size=3,
                                    stride=2,
W
weishengyu 已提交
308
                                    act="relu"))
W
weishengyu 已提交
309 310
                            pre_num_filters = out_channels[j]

W
weishengyu 已提交
311 312
    def forward(self, x, res_dict=None):
        out = []
W
weishengyu 已提交
313
        residual_func_idx = 0
W
weishengyu 已提交
314
        for i in range(len(self._in_channels)):
W
weishengyu 已提交
315
            residual = x[i]
W
weishengyu 已提交
316 317
            for j in range(len(self._in_channels)):
                if j > i:
W
weishengyu 已提交
318
                    xj = self.residual_func_list[residual_func_idx](x[j])
W
weishengyu 已提交
319 320
                    residual_func_idx += 1

W
weishengyu 已提交
321
                    xj = upsample(xj, scale_factor=2**(j - i), mode="nearest")
W
weishengyu 已提交
322
                    residual = paddle.add(x=residual, y=xj)
W
weishengyu 已提交
323
                elif j < i:
W
weishengyu 已提交
324
                    xj = x[j]
W
weishengyu 已提交
325
                    for k in range(i - j):
W
weishengyu 已提交
326
                        xj = self.residual_func_list[residual_func_idx](xj)
W
weishengyu 已提交
327 328
                        residual_func_idx += 1

W
weishengyu 已提交
329
                    residual = paddle.add(x=residual, y=xj)
W
weishengyu 已提交
330

W
weishengyu 已提交
331 332
            residual = self.relu(residual)
            out.append(residual)
W
weishengyu 已提交
333

W
weishengyu 已提交
334
        return out
W
weishengyu 已提交
335 336 337 338 339 340


class LastClsOut(TheseusLayer):
    def __init__(self,
                 num_channel_list,
                 has_se,
W
weishengyu 已提交
341
                 num_filters_list=[32, 64, 128, 256]):
D
dongshuilong 已提交
342
        super().__init__()
W
weishengyu 已提交
343

W
weishengyu 已提交
344
        self.func_list = nn.LayerList()
W
weishengyu 已提交
345
        for idx in range(len(num_channel_list)):
W
weishengyu 已提交
346
            self.func_list.append(
W
weishengyu 已提交
347 348 349 350
                BottleneckBlock(
                    num_channels=num_channel_list[idx],
                    num_filters=num_filters_list[idx],
                    has_se=has_se,
W
weishengyu 已提交
351
                    downsample=True))
W
weishengyu 已提交
352

W
weishengyu 已提交
353 354 355 356 357 358
    def forward(self, x, res_dict=None):
        out = []
        for idx, xi in enumerate(x):
            xi = self.func_list[idx](xi)
            out.append(xi)
        return out
W
weishengyu 已提交
359 360 361


class HRNet(TheseusLayer):
W
dbg  
weishengyu 已提交
362
    """
363 364 365 366 367
    HRNet
    Args:
        width: int=18. Base channel number of HRNet.
        has_se: bool=False. If 'True', add se module to HRNet.
        class_num: int=1000. Output num of last fc layer.
D
dongshuilong 已提交
368 369
    Returns:
        model: nn.Layer. Specific HRNet model depends on args.
W
dbg  
weishengyu 已提交
370
    """
D
dongshuilong 已提交
371

372
    def __init__(self,
373
                 stages_pattern,
374 375 376
                 width=18,
                 has_se=False,
                 class_num=1000,
377 378
                 return_patterns=None,
                 return_stages=None):
D
dongshuilong 已提交
379
        super().__init__()
W
weishengyu 已提交
380 381 382

        self.width = width
        self.has_se = has_se
383
        self._class_num = class_num
W
weishengyu 已提交
384

W
weishengyu 已提交
385 386
        channels_2 = [self.width, self.width * 2]
        channels_3 = [self.width, self.width * 2, self.width * 4]
D
dongshuilong 已提交
387 388 389
        channels_4 = [
            self.width, self.width * 2, self.width * 4, self.width * 8
        ]
W
weishengyu 已提交
390 391 392 393 394 395

        self.conv_layer1_1 = ConvBNLayer(
            num_channels=3,
            num_filters=64,
            filter_size=3,
            stride=2,
D
dongshuilong 已提交
396
            act="relu")
W
weishengyu 已提交
397 398 399 400 401 402

        self.conv_layer1_2 = ConvBNLayer(
            num_channels=64,
            num_filters=64,
            filter_size=3,
            stride=2,
D
dongshuilong 已提交
403
            act="relu")
W
weishengyu 已提交
404

405
        self.layer1 = nn.Sequential(* [
W
weishengyu 已提交
406
            BottleneckBlock(
W
weishengyu 已提交
407 408 409 410
                num_channels=64 if i == 0 else 256,
                num_filters=64,
                has_se=has_se,
                stride=1,
D
dongshuilong 已提交
411
                downsample=True if i == 0 else False) for i in range(4)
W
weishengyu 已提交
412
        ])
W
weishengyu 已提交
413

W
weishengyu 已提交
414
        self.conv_tr1_1 = ConvBNLayer(
D
dongshuilong 已提交
415
            num_channels=256, num_filters=width, filter_size=3)
W
weishengyu 已提交
416
        self.conv_tr1_2 = ConvBNLayer(
D
dongshuilong 已提交
417
            num_channels=256, num_filters=width * 2, filter_size=3, stride=2)
W
weishengyu 已提交
418 419

        self.st2 = Stage(
D
dongshuilong 已提交
420
            num_modules=1, num_filters=channels_2, has_se=self.has_se)
W
weishengyu 已提交
421

W
weishengyu 已提交
422
        self.conv_tr2 = ConvBNLayer(
W
weishengyu 已提交
423 424
            num_channels=width * 2,
            num_filters=width * 4,
W
dbg  
weishengyu 已提交
425
            filter_size=3,
D
dongshuilong 已提交
426
            stride=2)
W
weishengyu 已提交
427
        self.st3 = Stage(
D
dongshuilong 已提交
428
            num_modules=4, num_filters=channels_3, has_se=self.has_se)
W
weishengyu 已提交
429

W
weishengyu 已提交
430
        self.conv_tr3 = ConvBNLayer(
W
weishengyu 已提交
431 432
            num_channels=width * 4,
            num_filters=width * 8,
W
dbg  
weishengyu 已提交
433
            filter_size=3,
D
dongshuilong 已提交
434
            stride=2)
W
weishengyu 已提交
435

W
weishengyu 已提交
436
        self.st4 = Stage(
D
dongshuilong 已提交
437
            num_modules=3, num_filters=channels_4, has_se=self.has_se)
W
weishengyu 已提交
438 439 440 441 442 443

        # classification
        num_filters_list = [32, 64, 128, 256]
        self.last_cls = LastClsOut(
            num_channel_list=channels_4,
            has_se=self.has_se,
W
weishengyu 已提交
444
            num_filters_list=num_filters_list)
W
weishengyu 已提交
445 446

        last_num_filters = [256, 512, 1024]
W
weishengyu 已提交
447
        self.cls_head_conv_list = nn.LayerList()
W
weishengyu 已提交
448 449
        for idx in range(3):
            self.cls_head_conv_list.append(
D
dongshuilong 已提交
450 451 452 453 454
                ConvBNLayer(
                    num_channels=num_filters_list[idx] * 4,
                    num_filters=last_num_filters[idx],
                    filter_size=3,
                    stride=2))
W
weishengyu 已提交
455 456

        self.conv_last = ConvBNLayer(
D
dongshuilong 已提交
457
            num_channels=1024, num_filters=2048, filter_size=1, stride=1)
W
weishengyu 已提交
458

W
weishengyu 已提交
459
        self.avg_pool = nn.AdaptiveAvgPool2D(1)
W
weishengyu 已提交
460 461 462

        stdv = 1.0 / math.sqrt(2048 * 1.0)

W
weishengyu 已提交
463
        self.fc = nn.Linear(
W
weishengyu 已提交
464
            2048,
465
            class_num,
W
weishengyu 已提交
466
            weight_attr=ParamAttr(initializer=Uniform(-stdv, stdv)))
467 468 469 470 471

        super().init_res(
            stages_pattern,
            return_patterns=return_patterns,
            return_stages=return_stages)
W
weishengyu 已提交
472

W
weishengyu 已提交
473
    def forward(self, x):
W
weishengyu 已提交
474 475
        x = self.conv_layer1_1(x)
        x = self.conv_layer1_2(x)
W
weishengyu 已提交
476

W
weishengyu 已提交
477
        x = self.layer1(x)
W
weishengyu 已提交
478

W
weishengyu 已提交
479 480 481
        tr1_1 = self.conv_tr1_1(x)
        tr1_2 = self.conv_tr1_2(x)
        x = self.st2([tr1_1, tr1_2])
W
weishengyu 已提交
482

W
weishengyu 已提交
483 484 485
        tr2 = self.conv_tr2(x[-1])
        x.append(tr2)
        x = self.st3(x)
W
weishengyu 已提交
486

W
weishengyu 已提交
487 488 489
        tr3 = self.conv_tr3(x[-1])
        x.append(tr3)
        x = self.st4(x)
W
weishengyu 已提交
490

W
weishengyu 已提交
491
        x = self.last_cls(x)
W
weishengyu 已提交
492

W
weishengyu 已提交
493
        y = x[0]
W
weishengyu 已提交
494
        for idx in range(3):
W
weishengyu 已提交
495
            y = paddle.add(x[idx + 1], self.cls_head_conv_list[idx](y))
W
weishengyu 已提交
496 497

        y = self.conv_last(y)
W
weishengyu 已提交
498
        y = self.avg_pool(y)
W
weishengyu 已提交
499
        y = paddle.reshape(y, shape=[-1, y.shape[1]])
W
weishengyu 已提交
500
        y = self.fc(y)
W
weishengyu 已提交
501 502 503
        return y


D
dongshuilong 已提交
504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526
def _load_pretrained(pretrained, model, model_url, use_ssld):
    if pretrained is False:
        pass
    elif pretrained is True:
        load_dygraph_pretrain_from_url(model, model_url, use_ssld=use_ssld)
    elif isinstance(pretrained, str):
        load_dygraph_pretrain(model, pretrained)
    else:
        raise RuntimeError(
            "pretrained type is not available. Please use `string` or `boolean` type."
        )


def HRNet_W18_C(pretrained=False, use_ssld=False, **kwargs):
    """
    HRNet_W18_C
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `HRNet_W18_C` model depends on args.
    """
527 528
    model = HRNet(
        width=18, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs)
D
dongshuilong 已提交
529
    _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W18_C"], use_ssld)
W
weishengyu 已提交
530 531 532
    return model


D
dongshuilong 已提交
533 534 535 536 537 538 539 540 541 542
def HRNet_W30_C(pretrained=False, use_ssld=False, **kwargs):
    """
    HRNet_W30_C
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `HRNet_W30_C` model depends on args.
    """
543 544
    model = HRNet(
        width=30, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs)
D
dongshuilong 已提交
545
    _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W30_C"], use_ssld)
W
weishengyu 已提交
546 547 548
    return model


D
dongshuilong 已提交
549 550 551 552 553 554 555 556 557 558
def HRNet_W32_C(pretrained=False, use_ssld=False, **kwargs):
    """
    HRNet_W32_C
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `HRNet_W32_C` model depends on args.
    """
559 560
    model = HRNet(
        width=32, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs)
D
dongshuilong 已提交
561
    _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W32_C"], use_ssld)
W
weishengyu 已提交
562 563 564
    return model


D
dongshuilong 已提交
565 566 567 568 569 570 571 572 573 574
def HRNet_W40_C(pretrained=False, use_ssld=False, **kwargs):
    """
    HRNet_W40_C
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `HRNet_W40_C` model depends on args.
    """
575 576
    model = HRNet(
        width=40, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs)
D
dongshuilong 已提交
577
    _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W40_C"], use_ssld)
W
weishengyu 已提交
578 579 580
    return model


D
dongshuilong 已提交
581 582 583 584 585 586 587 588 589 590
def HRNet_W44_C(pretrained=False, use_ssld=False, **kwargs):
    """
    HRNet_W44_C
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `HRNet_W44_C` model depends on args.
    """
591 592
    model = HRNet(
        width=44, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs)
D
dongshuilong 已提交
593
    _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W44_C"], use_ssld)
W
weishengyu 已提交
594 595 596
    return model


D
dongshuilong 已提交
597 598 599 600 601 602 603 604 605 606
def HRNet_W48_C(pretrained=False, use_ssld=False, **kwargs):
    """
    HRNet_W48_C
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `HRNet_W48_C` model depends on args.
    """
607 608
    model = HRNet(
        width=48, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs)
D
dongshuilong 已提交
609
    _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W48_C"], use_ssld)
W
weishengyu 已提交
610 611 612
    return model


D
dongshuilong 已提交
613 614 615 616 617 618 619 620 621 622
def HRNet_W60_C(pretrained=False, use_ssld=False, **kwargs):
    """
    HRNet_W60_C
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `HRNet_W60_C` model depends on args.
    """
623 624
    model = HRNet(
        width=60, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs)
D
dongshuilong 已提交
625
    _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W60_C"], use_ssld)
W
weishengyu 已提交
626 627 628
    return model


D
dongshuilong 已提交
629 630 631 632 633 634 635 636 637 638
def HRNet_W64_C(pretrained=False, use_ssld=False, **kwargs):
    """
    HRNet_W64_C
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `HRNet_W64_C` model depends on args.
    """
639 640
    model = HRNet(
        width=64, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs)
D
dongshuilong 已提交
641
    _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W64_C"], use_ssld)
W
weishengyu 已提交
642 643 644
    return model


D
dongshuilong 已提交
645 646 647 648 649 650 651 652 653 654
def SE_HRNet_W18_C(pretrained=False, use_ssld=False, **kwargs):
    """
    SE_HRNet_W18_C
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `SE_HRNet_W18_C` model depends on args.
    """
655 656 657 658 659
    model = HRNet(
        width=18,
        stages_pattern=MODEL_STAGES_PATTERN["HRNet"],
        has_se=True,
        **kwargs)
D
dongshuilong 已提交
660
    _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W18_C"], use_ssld)
W
weishengyu 已提交
661 662 663
    return model


D
dongshuilong 已提交
664 665 666 667 668 669 670 671 672 673
def SE_HRNet_W30_C(pretrained=False, use_ssld=False, **kwargs):
    """
    SE_HRNet_W30_C
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `SE_HRNet_W30_C` model depends on args.
    """
674 675 676 677 678
    model = HRNet(
        width=30,
        stages_pattern=MODEL_STAGES_PATTERN["HRNet"],
        has_se=True,
        **kwargs)
D
dongshuilong 已提交
679
    _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W30_C"], use_ssld)
W
weishengyu 已提交
680 681 682
    return model


D
dongshuilong 已提交
683 684 685 686 687 688 689 690 691 692
def SE_HRNet_W32_C(pretrained=False, use_ssld=False, **kwargs):
    """
    SE_HRNet_W32_C
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `SE_HRNet_W32_C` model depends on args.
    """
693 694 695 696 697
    model = HRNet(
        width=32,
        stages_pattern=MODEL_STAGES_PATTERN["HRNet"],
        has_se=True,
        **kwargs)
D
dongshuilong 已提交
698
    _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W32_C"], use_ssld)
W
weishengyu 已提交
699 700 701
    return model


D
dongshuilong 已提交
702 703 704 705 706 707 708 709 710 711
def SE_HRNet_W40_C(pretrained=False, use_ssld=False, **kwargs):
    """
    SE_HRNet_W40_C
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `SE_HRNet_W40_C` model depends on args.
    """
712 713 714 715 716
    model = HRNet(
        width=40,
        stages_pattern=MODEL_STAGES_PATTERN["HRNet"],
        has_se=True,
        **kwargs)
D
dongshuilong 已提交
717
    _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W40_C"], use_ssld)
W
weishengyu 已提交
718 719 720
    return model


D
dongshuilong 已提交
721 722 723 724 725 726 727 728 729 730
def SE_HRNet_W44_C(pretrained=False, use_ssld=False, **kwargs):
    """
    SE_HRNet_W44_C
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `SE_HRNet_W44_C` model depends on args.
    """
731 732 733 734 735
    model = HRNet(
        width=44,
        stages_pattern=MODEL_STAGES_PATTERN["HRNet"],
        has_se=True,
        **kwargs)
D
dongshuilong 已提交
736
    _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W44_C"], use_ssld)
W
weishengyu 已提交
737 738 739
    return model


D
dongshuilong 已提交
740 741 742 743 744 745 746 747 748 749
def SE_HRNet_W48_C(pretrained=False, use_ssld=False, **kwargs):
    """
    SE_HRNet_W48_C
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `SE_HRNet_W48_C` model depends on args.
    """
750 751 752 753 754
    model = HRNet(
        width=48,
        stages_pattern=MODEL_STAGES_PATTERN["HRNet"],
        has_se=True,
        **kwargs)
D
dongshuilong 已提交
755
    _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W48_C"], use_ssld)
W
weishengyu 已提交
756 757 758
    return model


D
dongshuilong 已提交
759 760 761 762 763 764 765 766 767 768
def SE_HRNet_W60_C(pretrained=False, use_ssld=False, **kwargs):
    """
    SE_HRNet_W60_C
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `SE_HRNet_W60_C` model depends on args.
    """
769 770 771 772 773
    model = HRNet(
        width=60,
        stages_pattern=MODEL_STAGES_PATTERN["HRNet"],
        has_se=True,
        **kwargs)
D
dongshuilong 已提交
774
    _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W60_C"], use_ssld)
W
weishengyu 已提交
775 776 777
    return model


D
dongshuilong 已提交
778 779 780 781 782 783 784 785 786 787
def SE_HRNet_W64_C(pretrained=False, use_ssld=False, **kwargs):
    """
    SE_HRNet_W64_C
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `SE_HRNet_W64_C` model depends on args.
    """
788 789 790 791 792
    model = HRNet(
        width=64,
        stages_pattern=MODEL_STAGES_PATTERN["HRNet"],
        has_se=True,
        **kwargs)
D
dongshuilong 已提交
793
    _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W64_C"], use_ssld)
W
weishengyu 已提交
794
    return model