hrnet.py 9.6 KB
Newer Older
W
wuyefeilin 已提交
1
# coding: utf8
W
wuyefeilin 已提交
2
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserve.
W
wuyefeilin 已提交
3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import paddle
import paddle.fluid as fluid
from paddle.fluid.initializer import MSRA
from paddle.fluid.param_attr import ParamAttr

from utils.config import cfg


W
wuyefeilin 已提交
28 29 30 31 32 33 34 35
def conv_bn_layer(input,
                  filter_size,
                  num_filters,
                  stride=1,
                  padding=1,
                  num_groups=1,
                  if_act=True,
                  name=None):
W
wuyefeilin 已提交
36 37 38 39 40 41 42 43 44 45 46
    conv = fluid.layers.conv2d(
        input=input,
        num_filters=num_filters,
        filter_size=filter_size,
        stride=stride,
        padding=(filter_size - 1) // 2,
        groups=num_groups,
        act=None,
        param_attr=ParamAttr(initializer=MSRA(), name=name + '_weights'),
        bias_attr=False)
    bn_name = name + '_bn'
W
wuyefeilin 已提交
47 48 49 50 51 52 53 54 55 56
    bn = fluid.layers.batch_norm(
        input=conv,
        param_attr=ParamAttr(
            name=bn_name + "_scale",
            initializer=fluid.initializer.Constant(1.0)),
        bias_attr=ParamAttr(
            name=bn_name + "_offset",
            initializer=fluid.initializer.Constant(0.0)),
        moving_mean_name=bn_name + '_mean',
        moving_variance_name=bn_name + '_variance')
W
wuyefeilin 已提交
57 58 59 60
    if if_act:
        bn = fluid.layers.relu(bn)
    return bn

W
wuyefeilin 已提交
61

W
wuyefeilin 已提交
62 63
def basic_block(input, num_filters, stride=1, downsample=False, name=None):
    residual = input
W
wuyefeilin 已提交
64 65 66 67 68 69 70 71 72 73 74 75
    conv = conv_bn_layer(
        input=input,
        filter_size=3,
        num_filters=num_filters,
        stride=stride,
        name=name + '_conv1')
    conv = conv_bn_layer(
        input=conv,
        filter_size=3,
        num_filters=num_filters,
        if_act=False,
        name=name + '_conv2')
W
wuyefeilin 已提交
76
    if downsample:
W
wuyefeilin 已提交
77 78 79 80 81 82
        residual = conv_bn_layer(
            input=input,
            filter_size=1,
            num_filters=num_filters,
            if_act=False,
            name=name + '_downsample')
W
wuyefeilin 已提交
83 84
    return fluid.layers.elementwise_add(x=residual, y=conv, act='relu')

W
wuyefeilin 已提交
85

W
wuyefeilin 已提交
86 87
def bottleneck_block(input, num_filters, stride=1, downsample=False, name=None):
    residual = input
W
wuyefeilin 已提交
88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104
    conv = conv_bn_layer(
        input=input,
        filter_size=1,
        num_filters=num_filters,
        name=name + '_conv1')
    conv = conv_bn_layer(
        input=conv,
        filter_size=3,
        num_filters=num_filters,
        stride=stride,
        name=name + '_conv2')
    conv = conv_bn_layer(
        input=conv,
        filter_size=1,
        num_filters=num_filters * 4,
        if_act=False,
        name=name + '_conv3')
W
wuyefeilin 已提交
105
    if downsample:
W
wuyefeilin 已提交
106 107 108 109 110 111
        residual = conv_bn_layer(
            input=input,
            filter_size=1,
            num_filters=num_filters * 4,
            if_act=False,
            name=name + '_downsample')
W
wuyefeilin 已提交
112 113
    return fluid.layers.elementwise_add(x=residual, y=conv, act='relu')

W
wuyefeilin 已提交
114

W
wuyefeilin 已提交
115 116 117 118 119 120 121 122 123
def fuse_layers(x, channels, multi_scale_output=True, name=None):
    out = []
    for i in range(len(channels) if multi_scale_output else 1):
        residual = x[i]
        shape = residual.shape
        width = shape[-1]
        height = shape[-2]
        for j in range(len(channels)):
            if j > i:
W
wuyefeilin 已提交
124 125 126 127 128 129 130 131 132 133
                y = conv_bn_layer(
                    x[j],
                    filter_size=1,
                    num_filters=channels[i],
                    if_act=False,
                    name=name + '_layer_' + str(i + 1) + '_' + str(j + 1))
                y = fluid.layers.resize_bilinear(
                    input=y, out_shape=[height, width])
                residual = fluid.layers.elementwise_add(
                    x=residual, y=y, act=None)
W
wuyefeilin 已提交
134 135 136 137
            elif j < i:
                y = x[j]
                for k in range(i - j):
                    if k == i - j - 1:
W
wuyefeilin 已提交
138 139 140 141 142 143 144 145
                        y = conv_bn_layer(
                            y,
                            filter_size=3,
                            num_filters=channels[i],
                            stride=2,
                            if_act=False,
                            name=name + '_layer_' + str(i + 1) + '_' +
                            str(j + 1) + '_' + str(k + 1))
W
wuyefeilin 已提交
146
                    else:
W
wuyefeilin 已提交
147 148 149 150 151 152 153 154 155
                        y = conv_bn_layer(
                            y,
                            filter_size=3,
                            num_filters=channels[j],
                            stride=2,
                            name=name + '_layer_' + str(i + 1) + '_' +
                            str(j + 1) + '_' + str(k + 1))
                residual = fluid.layers.elementwise_add(
                    x=residual, y=y, act=None)
W
wuyefeilin 已提交
156 157 158 159 160

        residual = fluid.layers.relu(residual)
        out.append(residual)
    return out

W
wuyefeilin 已提交
161

W
wuyefeilin 已提交
162 163 164 165 166
def branches(x, block_num, channels, name=None):
    out = []
    for i in range(len(channels)):
        residual = x[i]
        for j in range(block_num):
W
wuyefeilin 已提交
167 168 169 170
            residual = basic_block(
                residual,
                channels[i],
                name=name + '_branch_layer_' + str(i + 1) + '_' + str(j + 1))
W
wuyefeilin 已提交
171 172 173
        out.append(residual)
    return out

W
wuyefeilin 已提交
174

W
wuyefeilin 已提交
175 176
def high_resolution_module(x, channels, multi_scale_output=True, name=None):
    residual = branches(x, 4, channels, name=name)
W
wuyefeilin 已提交
177 178
    out = fuse_layers(
        residual, channels, multi_scale_output=multi_scale_output, name=name)
W
wuyefeilin 已提交
179 180
    return out

W
wuyefeilin 已提交
181

W
wuyefeilin 已提交
182 183 184 185 186 187 188
def transition_layer(x, in_channels, out_channels, name=None):
    num_in = len(in_channels)
    num_out = len(out_channels)
    out = []
    for i in range(num_out):
        if i < num_in:
            if in_channels[i] != out_channels[i]:
W
wuyefeilin 已提交
189 190 191 192 193
                residual = conv_bn_layer(
                    x[i],
                    filter_size=3,
                    num_filters=out_channels[i],
                    name=name + '_layer_' + str(i + 1))
W
wuyefeilin 已提交
194 195 196 197
                out.append(residual)
            else:
                out.append(x[i])
        else:
W
wuyefeilin 已提交
198 199 200 201 202 203
            residual = conv_bn_layer(
                x[-1],
                filter_size=3,
                num_filters=out_channels[i],
                stride=2,
                name=name + '_layer_' + str(i + 1))
W
wuyefeilin 已提交
204 205 206
            out.append(residual)
    return out

W
wuyefeilin 已提交
207

W
wuyefeilin 已提交
208 209 210 211
def stage(x, num_modules, channels, multi_scale_output=True, name=None):
    out = x
    for i in range(num_modules):
        if i == num_modules - 1 and multi_scale_output == False:
W
wuyefeilin 已提交
212 213 214 215 216
            out = high_resolution_module(
                out,
                channels,
                multi_scale_output=False,
                name=name + '_' + str(i + 1))
W
wuyefeilin 已提交
217
        else:
W
wuyefeilin 已提交
218 219
            out = high_resolution_module(
                out, channels, name=name + '_' + str(i + 1))
W
wuyefeilin 已提交
220 221 222

    return out

W
wuyefeilin 已提交
223

W
wuyefeilin 已提交
224 225 226
def layer1(input, name=None):
    conv = input
    for i in range(4):
W
wuyefeilin 已提交
227 228 229 230 231
        conv = bottleneck_block(
            conv,
            num_filters=64,
            downsample=True if i == 0 else False,
            name=name + '_' + str(i + 1))
W
wuyefeilin 已提交
232 233
    return conv

W
wuyefeilin 已提交
234

W
wuyefeilin 已提交
235
def high_resolution_net(input, num_classes):
W
wuyefeilin 已提交
236

W
wuyefeilin 已提交
237 238 239
    channels_2 = cfg.MODEL.HRNET.STAGE2.NUM_CHANNELS
    channels_3 = cfg.MODEL.HRNET.STAGE3.NUM_CHANNELS
    channels_4 = cfg.MODEL.HRNET.STAGE4.NUM_CHANNELS
W
wuyefeilin 已提交
240

W
wuyefeilin 已提交
241 242 243 244
    num_modules_2 = cfg.MODEL.HRNET.STAGE2.NUM_MODULES
    num_modules_3 = cfg.MODEL.HRNET.STAGE3.NUM_MODULES
    num_modules_4 = cfg.MODEL.HRNET.STAGE4.NUM_MODULES

W
wuyefeilin 已提交
245 246 247 248 249 250 251 252 253 254 255 256 257 258
    x = conv_bn_layer(
        input=input,
        filter_size=3,
        num_filters=64,
        stride=2,
        if_act=True,
        name='layer1_1')
    x = conv_bn_layer(
        input=x,
        filter_size=3,
        num_filters=64,
        stride=2,
        if_act=True,
        name='layer1_2')
W
wuyefeilin 已提交
259 260 261 262 263 264 265 266 267 268 269 270

    la1 = layer1(x, name='layer2')
    tr1 = transition_layer([la1], [256], channels_2, name='tr1')
    st2 = stage(tr1, num_modules_2, channels_2, name='st2')
    tr2 = transition_layer(st2, channels_2, channels_3, name='tr2')
    st3 = stage(tr2, num_modules_3, channels_3, name='st3')
    tr3 = transition_layer(st3, channels_3, channels_4, name='tr3')
    st4 = stage(tr3, num_modules_4, channels_4, name='st4')

    # upsample
    shape = st4[0].shape
    height, width = shape[-2], shape[-1]
W
wuyefeilin 已提交
271 272 273
    st4[1] = fluid.layers.resize_bilinear(st4[1], out_shape=[height, width])
    st4[2] = fluid.layers.resize_bilinear(st4[2], out_shape=[height, width])
    st4[3] = fluid.layers.resize_bilinear(st4[3], out_shape=[height, width])
W
wuyefeilin 已提交
274 275 276 277

    out = fluid.layers.concat(st4, axis=1)
    last_channels = sum(channels_4)

W
wuyefeilin 已提交
278 279 280 281 282 283 284 285
    out = conv_bn_layer(
        input=out,
        filter_size=1,
        num_filters=last_channels,
        stride=1,
        if_act=True,
        name='conv-2')
    out = fluid.layers.conv2d(
W
wuyefeilin 已提交
286 287 288 289 290 291 292 293 294 295 296 297 298 299 300
        input=out,
        num_filters=num_classes,
        filter_size=1,
        stride=1,
        padding=0,
        act=None,
        param_attr=ParamAttr(initializer=MSRA(), name='conv-1_weights'),
        bias_attr=False)

    out = fluid.layers.resize_bilinear(out, input.shape[2:])

    return out


def hrnet(input, num_classes):
W
wuyefeilin 已提交
301
    logit = high_resolution_net(input, num_classes)
W
wuyefeilin 已提交
302 303
    return logit

W
wuyefeilin 已提交
304

W
wuyefeilin 已提交
305
if __name__ == '__main__':
306 307
    image_shape = [-1, 3, 769, 769]
    image = fluid.data(name='image', shape=image_shape, dtype='float32')
W
wuyefeilin 已提交
308 309
    logit = hrnet(image, 4)
    print("logit:", logit.shape)