rec_resnet_fpn.py 8.2 KB
Newer Older
T
tink2123 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24
#copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import math

import paddle
import paddle.fluid as fluid
from paddle.fluid.param_attr import ParamAttr

T
tink2123 已提交
25 26 27
__all__ = [
    "ResNet", "ResNet18", "ResNet34", "ResNet50", "ResNet101", "ResNet152"
]
T
tink2123 已提交
28 29

Trainable = True
T
tink2123 已提交
30
w_nolr = fluid.ParamAttr(trainable=Trainable)
T
tink2123 已提交
31 32 33 34 35 36 37 38 39 40 41 42
train_parameters = {
    "input_size": [3, 224, 224],
    "input_mean": [0.485, 0.456, 0.406],
    "input_std": [0.229, 0.224, 0.225],
    "learning_strategy": {
        "name": "piecewise_decay",
        "batch_size": 256,
        "epochs": [30, 60, 90],
        "steps": [0.1, 0.01, 0.001, 0.0001]
    }
}

T
tink2123 已提交
43

T
tink2123 已提交
44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62
class ResNet():
    def __init__(self, params):
        self.layers = params['layers']
        self.params = train_parameters

    def __call__(self, input):
        layers = self.layers
        supported_layers = [18, 34, 50, 101, 152]
        assert layers in supported_layers, \
            "supported layers are {} but input layer is {}".format(supported_layers, layers)

        if layers == 18:
            depth = [2, 2, 2, 2]
        elif layers == 34 or layers == 50:
            depth = [3, 4, 6, 3]
        elif layers == 101:
            depth = [3, 4, 23, 3]
        elif layers == 152:
            depth = [3, 8, 36, 3]
T
tink2123 已提交
63
        stride_list = [(2, 2), (2, 2), (1, 1), (1, 1)]
T
tink2123 已提交
64 65 66
        num_filters = [64, 128, 256, 512]

        conv = self.conv_bn_layer(
T
tink2123 已提交
67 68 69 70 71 72 73
            input=input,
            num_filters=64,
            filter_size=7,
            stride=2,
            act='relu',
            name="conv1")
        F = []
T
tink2123 已提交
74 75 76 77 78 79 80 81 82 83 84 85 86
        if layers >= 50:
            for block in range(len(depth)):
                for i in range(depth[block]):
                    if layers in [101, 152] and block == 2:
                        if i == 0:
                            conv_name = "res" + str(block + 2) + "a"
                        else:
                            conv_name = "res" + str(block + 2) + "b" + str(i)
                    else:
                        conv_name = "res" + str(block + 2) + chr(97 + i)
                    conv = self.bottleneck_block(
                        input=conv,
                        num_filters=num_filters[block],
T
tink2123 已提交
87 88
                        stride=stride_list[block] if i == 0 else 1,
                        name=conv_name)
T
tink2123 已提交
89
                F.append(conv)
T
tink2123 已提交
90 91 92 93 94 95 96 97 98 99 100 101 102
        else:
            for block in range(len(depth)):
                for i in range(depth[block]):
                    conv_name = "res" + str(block + 2) + chr(97 + i)

                    if i == 0 and block != 0:
                        stride = (2, 1)
                    else:
                        stride = (1, 1)

                    conv = self.basic_block(
                        input=conv,
                        num_filters=num_filters[block],
T
tink2123 已提交
103 104
                        stride=stride_list[block] if i == 0 else 1,
                        is_first=block == i == 0,
T
tink2123 已提交
105 106
                        name=conv_name)
                F.append(conv)
T
tink2123 已提交
107
        print("F:", F)
T
tink2123 已提交
108
        base = F[-1]
T
tink2123 已提交
109
        for i in [-2, -3]:
T
tink2123 已提交
110
            b, c, w, h = F[i].shape
T
tink2123 已提交
111
            if (w, h) == base.shape[2:]:
T
tink2123 已提交
112 113
                base = base
            else:
T
tink2123 已提交
114 115 116 117 118 119 120
                base = fluid.layers.conv2d_transpose(
                    input=base,
                    num_filters=c,
                    filter_size=4,
                    stride=2,
                    padding=1,
                    act=None,
T
tink2123 已提交
121 122
                    param_attr=w_nolr,
                    bias_attr=w_nolr)
T
tink2123 已提交
123 124
                base = fluid.layers.batch_norm(
                    base, act="relu", param_attr=w_nolr, bias_attr=w_nolr)
T
tink2123 已提交
125
            base = fluid.layers.concat([base, F[i]], axis=1)
T
tink2123 已提交
126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147
            base = fluid.layers.conv2d(
                base,
                num_filters=c,
                filter_size=1,
                param_attr=w_nolr,
                bias_attr=w_nolr)
            base = fluid.layers.conv2d(
                base,
                num_filters=c,
                filter_size=3,
                padding=1,
                param_attr=w_nolr,
                bias_attr=w_nolr)
            base = fluid.layers.batch_norm(
                base, act="relu", param_attr=w_nolr, bias_attr=w_nolr)

        base = fluid.layers.conv2d(
            base,
            num_filters=512,
            filter_size=1,
            bias_attr=w_nolr,
            param_attr=w_nolr)
T
tink2123 已提交
148 149 150 151 152 153 154 155 156 157 158 159 160 161

        return base

    def conv_bn_layer(self,
                      input,
                      num_filters,
                      filter_size,
                      stride=1,
                      groups=1,
                      act=None,
                      name=None):
        conv = fluid.layers.conv2d(
            input=input,
            num_filters=num_filters,
T
tink2123 已提交
162 163
            filter_size=2 if stride == (1, 1) else filter_size,
            dilation=2 if stride == (1, 1) else 1,
T
tink2123 已提交
164 165 166 167
            stride=stride,
            padding=(filter_size - 1) // 2,
            groups=groups,
            act=None,
T
tink2123 已提交
168 169
            param_attr=ParamAttr(
                name=name + "_weights", trainable=Trainable),
T
tink2123 已提交
170 171 172 173 174 175 176
            bias_attr=False,
            name=name + '.conv2d.output.1')

        if name == "conv1":
            bn_name = "bn_" + name
        else:
            bn_name = "bn" + name[3:]
T
tink2123 已提交
177 178 179 180 181 182 183 184 185 186
        return fluid.layers.batch_norm(
            input=conv,
            act=act,
            name=bn_name + '.output.1',
            param_attr=ParamAttr(
                name=bn_name + '_scale', trainable=Trainable),
            bias_attr=ParamAttr(
                bn_name + '_offset', trainable=Trainable),
            moving_mean_name=bn_name + '_mean',
            moving_variance_name=bn_name + '_variance', )
T
tink2123 已提交
187 188 189 190

    def shortcut(self, input, ch_out, stride, is_first, name):
        ch_in = input.shape[1]
        if ch_in != ch_out or stride != 1 or is_first == True:
T
tink2123 已提交
191
            if stride == (1, 1):
T
tink2123 已提交
192
                return self.conv_bn_layer(input, ch_out, 1, 1, name=name)
T
tink2123 已提交
193
            else:  #stride == (2,2)
T
tink2123 已提交
194
                return self.conv_bn_layer(input, ch_out, 1, stride, name=name)
T
tink2123 已提交
195

T
tink2123 已提交
196 197 198 199 200
        else:
            return input

    def bottleneck_block(self, input, num_filters, stride, name):
        conv0 = self.conv_bn_layer(
T
tink2123 已提交
201 202 203 204 205
            input=input,
            num_filters=num_filters,
            filter_size=1,
            act='relu',
            name=name + "_branch2a")
T
tink2123 已提交
206 207 208 209 210 211 212 213
        conv1 = self.conv_bn_layer(
            input=conv0,
            num_filters=num_filters,
            filter_size=3,
            stride=stride,
            act='relu',
            name=name + "_branch2b")
        conv2 = self.conv_bn_layer(
T
tink2123 已提交
214 215 216 217 218
            input=conv1,
            num_filters=num_filters * 4,
            filter_size=1,
            act=None,
            name=name + "_branch2c")
T
tink2123 已提交
219

T
tink2123 已提交
220 221 222 223 224 225
        short = self.shortcut(
            input,
            num_filters * 4,
            stride,
            is_first=False,
            name=name + "_branch1")
T
tink2123 已提交
226

T
tink2123 已提交
227 228
        return fluid.layers.elementwise_add(
            x=short, y=conv2, act='relu', name=name + ".add.output.5")
T
tink2123 已提交
229 230

    def basic_block(self, input, num_filters, stride, is_first, name):
T
tink2123 已提交
231 232 233 234 235 236 237 238 239 240 241 242 243 244 245
        conv0 = self.conv_bn_layer(
            input=input,
            num_filters=num_filters,
            filter_size=3,
            act='relu',
            stride=stride,
            name=name + "_branch2a")
        conv1 = self.conv_bn_layer(
            input=conv0,
            num_filters=num_filters,
            filter_size=3,
            act=None,
            name=name + "_branch2b")
        short = self.shortcut(
            input, num_filters, stride, is_first, name=name + "_branch1")
T
tink2123 已提交
246
        return fluid.layers.elementwise_add(x=short, y=conv1, act='relu')