hrnet.py 15.1 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

from collections import OrderedDict

from paddle import fluid
from paddle.fluid.param_attr import ParamAttr
from paddle.fluid.framework import Variable
from paddle.fluid.regularizer import L2Decay

from ppdet.core.workspace import register, serializable
from numbers import Integral
from paddle.fluid.initializer import MSRA
import math

__all__ = ['HRNet']


@register
@serializable
class HRNet(object):
    """
    HRNet, see https://arxiv.org/abs/1908.07919
    Args:
G
Guanghua Yu 已提交
40 41
        width (int): network width, should be 18, 30, 32, 40, 44, 48, 60 or 64
        has_se (bool): whether contain squeeze_excitation(SE) block or not
42
        freeze_at (int): freeze the backbone at which stage
G
Guanghua Yu 已提交
43
        norm_type (str): normalization type, 'bn'/'sync_bn'
44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64
        freeze_norm (bool): freeze normalization layers
        norm_decay (float): weight decay for normalization layer weights
        feature_maps (list): index of stages whose feature maps are returned
    """

    def __init__(self,
                 width=40,
                 has_se=False,
                 freeze_at=2,
                 norm_type='bn',
                 freeze_norm=True,
                 norm_decay=0.,
                 feature_maps=[2, 3, 4, 5]):
        super(HRNet, self).__init__()

        if isinstance(feature_maps, Integral):
            feature_maps = [feature_maps]

        assert 0 <= freeze_at <= 4, "freeze_at should be 0, 1, 2, 3 or 4"
        assert len(feature_maps) > 0, "need one or more feature maps"
        assert norm_type in ['bn', 'sync_bn']
65

66 67 68 69 70 71 72 73 74 75 76
        self.width = width
        self.has_se = has_se
        self.channels = {
            18: [[18, 36], [18, 36, 72], [18, 36, 72, 144]],
            30: [[30, 60], [30, 60, 120], [30, 60, 120, 240]],
            32: [[32, 64], [32, 64, 128], [32, 64, 128, 256]],
            40: [[40, 80], [40, 80, 160], [40, 80, 160, 320]],
            44: [[44, 88], [44, 88, 176], [44, 88, 176, 352]],
            48: [[48, 96], [48, 96, 192], [48, 96, 192, 384]],
            60: [[60, 120], [60, 120, 240], [60, 120, 240, 480]],
            64: [[64, 128], [64, 128, 256], [64, 128, 256, 512]],
77
        }
78 79 80 81 82 83 84 85 86

        self.freeze_at = freeze_at
        self.norm_type = norm_type
        self.norm_decay = norm_decay
        self.freeze_norm = freeze_norm
        self._model_type = 'HRNet'
        self.feature_maps = feature_maps
        self.end_points = []
        return
87

88 89
    def net(self, input, class_dim=1000):
        width = self.width
90
        channels_2, channels_3, channels_4 = self.channels[width]
91
        num_modules_2, num_modules_3, num_modules_4 = 1, 4, 3
92 93 94 95 96 97 98 99 100 101 102 103 104 105 106

        x = self.conv_bn_layer(
            input=input,
            filter_size=3,
            num_filters=64,
            stride=2,
            if_act=True,
            name='layer1_1')
        x = self.conv_bn_layer(
            input=x,
            filter_size=3,
            num_filters=64,
            stride=2,
            if_act=True,
            name='layer1_2')
107 108 109 110 111 112 113 114

        la1 = self.layer1(x, name='layer2')
        tr1 = self.transition_layer([la1], [256], channels_2, name='tr1')
        st2 = self.stage(tr1, num_modules_2, channels_2, name='st2')
        tr2 = self.transition_layer(st2, channels_2, channels_3, name='tr2')
        st3 = self.stage(tr2, num_modules_3, channels_3, name='st3')
        tr3 = self.transition_layer(st3, channels_3, channels_4, name='tr3')
        st4 = self.stage(tr3, num_modules_4, channels_4, name='st4')
115

116 117
        self.end_points = st4
        return st4[-1]
118

119 120 121
    def layer1(self, input, name=None):
        conv = input
        for i in range(4):
122 123 124 125 126
            conv = self.bottleneck_block(
                conv,
                num_filters=64,
                downsample=True if i == 0 else False,
                name=name + '_' + str(i + 1))
127
        return conv
128

129 130 131 132 133 134 135
    def transition_layer(self, x, in_channels, out_channels, name=None):
        num_in = len(in_channels)
        num_out = len(out_channels)
        out = []
        for i in range(num_out):
            if i < num_in:
                if in_channels[i] != out_channels[i]:
136 137 138 139 140
                    residual = self.conv_bn_layer(
                        x[i],
                        filter_size=3,
                        num_filters=out_channels[i],
                        name=name + '_layer_' + str(i + 1))
141 142 143 144
                    out.append(residual)
                else:
                    out.append(x[i])
            else:
145 146 147 148 149 150
                residual = self.conv_bn_layer(
                    x[-1],
                    filter_size=3,
                    num_filters=out_channels[i],
                    stride=2,
                    name=name + '_layer_' + str(i + 1))
151 152 153 154 155 156 157 158
                out.append(residual)
        return out

    def branches(self, x, block_num, channels, name=None):
        out = []
        for i in range(len(channels)):
            residual = x[i]
            for j in range(block_num):
159 160 161 162 163
                residual = self.basic_block(
                    residual,
                    channels[i],
                    name=name + '_branch_layer_' + str(i + 1) + '_' +
                    str(j + 1))
164 165 166 167 168 169 170 171 172
            out.append(residual)
        return out

    def fuse_layers(self, x, channels, multi_scale_output=True, name=None):
        out = []
        for i in range(len(channels) if multi_scale_output else 1):
            residual = x[i]
            for j in range(len(channels)):
                if j > i:
173 174 175 176 177 178 179
                    y = self.conv_bn_layer(
                        x[j],
                        filter_size=1,
                        num_filters=channels[i],
                        if_act=False,
                        name=name + '_layer_' + str(i + 1) + '_' + str(j + 1))
                    y = fluid.layers.resize_nearest(input=y, scale=2**(j - i))
180 181 182 183 184 185
                    residual = fluid.layers.elementwise_add(
                        x=residual, y=y, act=None)
                elif j < i:
                    y = x[j]
                    for k in range(i - j):
                        if k == i - j - 1:
186 187 188 189 190 191 192 193
                            y = self.conv_bn_layer(
                                y,
                                filter_size=3,
                                num_filters=channels[i],
                                stride=2,
                                if_act=False,
                                name=name + '_layer_' + str(i + 1) + '_' +
                                str(j + 1) + '_' + str(k + 1))
194
                        else:
195 196 197 198 199 200 201
                            y = self.conv_bn_layer(
                                y,
                                filter_size=3,
                                num_filters=channels[j],
                                stride=2,
                                name=name + '_layer_' + str(i + 1) + '_' +
                                str(j + 1) + '_' + str(k + 1))
202
                    residual = fluid.layers.elementwise_add(
203
                        x=residual, y=y, act=None)
204 205 206 207

            residual = fluid.layers.relu(residual)
            out.append(residual)
        return out
208 209 210 211 212 213

    def high_resolution_module(self,
                               x,
                               channels,
                               multi_scale_output=True,
                               name=None):
214
        residual = self.branches(x, 4, channels, name=name)
215 216 217 218 219
        out = self.fuse_layers(
            residual,
            channels,
            multi_scale_output=multi_scale_output,
            name=name)
220
        return out
221 222 223 224 225 226 227

    def stage(self,
              x,
              num_modules,
              channels,
              multi_scale_output=True,
              name=None):
228 229 230
        out = x
        for i in range(num_modules):
            if i == num_modules - 1 and multi_scale_output == False:
231 232 233 234 235
                out = self.high_resolution_module(
                    out,
                    channels,
                    multi_scale_output=False,
                    name=name + '_' + str(i + 1))
236
            else:
237 238
                out = self.high_resolution_module(
                    out, channels, name=name + '_' + str(i + 1))
239 240

        return out
241

242 243 244 245
    def last_cls_out(self, x, name=None):
        out = []
        num_filters_list = [128, 256, 512, 1024]
        for i in range(len(x)):
246 247 248 249 250 251
            out.append(
                self.conv_bn_layer(
                    input=x[i],
                    filter_size=1,
                    num_filters=num_filters_list[i],
                    name=name + 'conv_' + str(i + 1)))
252 253
        return out

254 255 256 257 258 259
    def basic_block(self,
                    input,
                    num_filters,
                    stride=1,
                    downsample=False,
                    name=None):
260
        residual = input
261 262 263 264 265 266 267 268 269 270 271 272
        conv = self.conv_bn_layer(
            input=input,
            filter_size=3,
            num_filters=num_filters,
            stride=stride,
            name=name + '_conv1')
        conv = self.conv_bn_layer(
            input=conv,
            filter_size=3,
            num_filters=num_filters,
            if_act=False,
            name=name + '_conv2')
273
        if downsample:
274 275 276 277 278 279
            residual = self.conv_bn_layer(
                input=input,
                filter_size=1,
                num_filters=num_filters,
                if_act=False,
                name=name + '_downsample')
280 281 282 283 284
        if self.has_se:
            conv = self.squeeze_excitation(
                input=conv,
                num_channels=num_filters,
                reduction_ratio=16,
285
                name='fc' + name)
286 287
        return fluid.layers.elementwise_add(x=residual, y=conv, act='relu')

288 289 290 291 292 293
    def bottleneck_block(self,
                         input,
                         num_filters,
                         stride=1,
                         downsample=False,
                         name=None):
294
        residual = input
295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311
        conv = self.conv_bn_layer(
            input=input,
            filter_size=1,
            num_filters=num_filters,
            name=name + '_conv1')
        conv = self.conv_bn_layer(
            input=conv,
            filter_size=3,
            num_filters=num_filters,
            stride=stride,
            name=name + '_conv2')
        conv = self.conv_bn_layer(
            input=conv,
            filter_size=1,
            num_filters=num_filters * 4,
            if_act=False,
            name=name + '_conv3')
312
        if downsample:
313 314 315 316 317 318
            residual = self.conv_bn_layer(
                input=input,
                filter_size=1,
                num_filters=num_filters * 4,
                if_act=False,
                name=name + '_downsample')
319 320 321 322 323
        if self.has_se:
            conv = self.squeeze_excitation(
                input=conv,
                num_channels=num_filters * 4,
                reduction_ratio=16,
324
                name='fc' + name)
325
        return fluid.layers.elementwise_add(x=residual, y=conv, act='relu')
326 327 328 329 330 331

    def squeeze_excitation(self,
                           input,
                           num_channels,
                           reduction_ratio,
                           name=None):
332 333 334
        pool = fluid.layers.pool2d(
            input=input, pool_size=0, pool_type='avg', global_pooling=True)
        stdv = 1.0 / math.sqrt(pool.shape[1] * 1.0)
335 336 337 338 339 340 341 342
        squeeze = fluid.layers.fc(
            input=pool,
            size=num_channels / reduction_ratio,
            act='relu',
            param_attr=fluid.param_attr.ParamAttr(
                initializer=fluid.initializer.Uniform(-stdv, stdv),
                name=name + '_sqz_weights'),
            bias_attr=ParamAttr(name=name + '_sqz_offset'))
343
        stdv = 1.0 / math.sqrt(squeeze.shape[1] * 1.0)
344 345 346 347 348 349 350 351
        excitation = fluid.layers.fc(
            input=squeeze,
            size=num_channels,
            act='sigmoid',
            param_attr=fluid.param_attr.ParamAttr(
                initializer=fluid.initializer.Uniform(-stdv, stdv),
                name=name + '_exc_weights'),
            bias_attr=ParamAttr(name=name + '_exc_offset'))
352 353
        scale = fluid.layers.elementwise_mul(x=input, y=excitation, axis=0)
        return scale
354 355 356 357 358 359 360 361 362 363

    def conv_bn_layer(self,
                      input,
                      filter_size,
                      num_filters,
                      stride=1,
                      padding=1,
                      num_groups=1,
                      if_act=True,
                      name=None):
364 365 366 367 368
        conv = fluid.layers.conv2d(
            input=input,
            num_filters=num_filters,
            filter_size=filter_size,
            stride=stride,
369
            padding=(filter_size - 1) // 2,
370 371
            groups=num_groups,
            act=None,
372 373
            param_attr=ParamAttr(
                initializer=MSRA(), name=name + '_weights'),
374 375
            bias_attr=False)
        bn_name = name + '_bn'
376
        bn = self._bn(input=conv, bn_name=bn_name)
377 378 379
        if if_act:
            bn = fluid.layers.relu(bn)
        return bn
380 381

    def _bn(self, input, act=None, bn_name=None):
382 383 384 385 386 387 388 389 390 391
        norm_lr = 0. if self.freeze_norm else 1.
        norm_decay = self.norm_decay
        pattr = ParamAttr(
            name=bn_name + '_scale',
            learning_rate=norm_lr,
            regularizer=L2Decay(norm_decay))
        battr = ParamAttr(
            name=bn_name + '_offset',
            learning_rate=norm_lr,
            regularizer=L2Decay(norm_decay))
392

393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408
        global_stats = True if self.freeze_norm else False
        out = fluid.layers.batch_norm(
            input=input,
            act=act,
            name=bn_name + '.output.1',
            param_attr=pattr,
            bias_attr=battr,
            moving_mean_name=bn_name + '_mean',
            moving_variance_name=bn_name + '_variance',
            use_global_stats=global_stats)
        scale = fluid.framework._get_var(pattr.name)
        bias = fluid.framework._get_var(battr.name)
        if self.freeze_norm:
            scale.stop_gradient = True
            bias.stop_gradient = True
        return out
409

410 411 412 413 414 415 416 417 418
    def __call__(self, input):
        assert isinstance(input, Variable)
        assert not (set(self.feature_maps) - set([2, 3, 4, 5])), \
            "feature maps {} not in [2, 3, 4, 5]".format(self.feature_maps)

        res_endpoints = []

        res = input
        feature_maps = self.feature_maps
419
        self.net(input)
420 421

        for i in feature_maps:
422
            res = self.end_points[i - 2]
423 424 425 426
            if i in self.feature_maps:
                res_endpoints.append(res)
            if self.freeze_at >= i:
                res.stop_gradient = True
427

428 429
        return OrderedDict([('res{}_sum'.format(self.feature_maps[idx]), feat)
                            for idx, feat in enumerate(res_endpoints)])