module.py 9.2 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13
# copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
W
wuzewu 已提交
14 15
import os

16 17 18 19 20 21 22 23 24
import paddle
from paddle import ParamAttr
import paddle.nn as nn
import paddle.nn.functional as F
from paddle.nn import Conv2d, BatchNorm, Linear, Dropout
from paddle.nn import AdaptiveAvgPool2d, MaxPool2d, AvgPool2d
from paddle.regularizer import L2Decay
from paddlehub.module.module import moduleinfo
from paddlehub.module.cv_module import ImageClassifierModule
W
wuzewu 已提交
25 26


27 28 29 30 31 32 33
def make_divisible(v, divisor=8, min_value=None):
    if min_value is None:
        min_value = divisor
    new_v = max(min_value, int(v + divisor / 2) // divisor * divisor)
    if new_v < 0.9 * v:
        new_v += divisor
    return new_v
W
wuzewu 已提交
34 35


W
wuzewu 已提交
36 37 38 39 40 41 42 43 44
@moduleinfo(
    name="mobilenet_v3_small_imagenet_ssld",
    type="cv/classification",
    author="paddlepaddle",
    author_email="",
    summary="mobilenet_v3_small_imagenet_ssld is a classification model, "
    "this module is trained with Imagenet dataset.",
    version="1.1.0",
    meta=ImageClassifierModule)
45 46
class MobileNetV3Small(nn.Layer):
    """MobileNetV3Small module."""
W
wuzewu 已提交
47

48 49
    def __init__(self, dropout_prob: float = 0.2, class_dim: int = 1000, load_checkpoint: str = None):
        super(MobileNetV3Small, self).__init__()
W
wuzewu 已提交
50

51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67
        inplanes = 16
        self.cfg = [
            # k, exp, c,  se,     nl,  s,
            [3, 16, 16, True, "relu", 2],
            [3, 72, 24, False, "relu", 2],
            [3, 88, 24, False, "relu", 1],
            [5, 96, 40, True, "hard_swish", 2],
            [5, 240, 40, True, "hard_swish", 1],
            [5, 240, 40, True, "hard_swish", 1],
            [5, 120, 48, True, "hard_swish", 1],
            [5, 144, 48, True, "hard_swish", 1],
            [5, 288, 96, True, "hard_swish", 2],
            [5, 576, 96, True, "hard_swish", 1],
            [5, 576, 96, True, "hard_swish", 1],
        ]
        self.cls_ch_squeeze = 576
        self.cls_ch_expand = 1280
W
wuzewu 已提交
68

W
wuzewu 已提交
69 70 71 72 73 74 75 76 77 78
        self.conv1 = ConvBNLayer(
            in_c=3,
            out_c=make_divisible(inplanes),
            filter_size=3,
            stride=2,
            padding=1,
            num_groups=1,
            if_act=True,
            act="hard_swish",
            name="conv1")
W
wuzewu 已提交
79

80 81 82 83 84
        self.block_list = []
        i = 0
        inplanes = make_divisible(inplanes)
        for (k, exp, c, se, nl, s) in self.cfg:
            self.block_list.append(
W
wuzewu 已提交
85 86 87 88 89 90 91 92 93
                ResidualUnit(
                    in_c=inplanes,
                    mid_c=make_divisible(exp),
                    out_c=make_divisible(c),
                    filter_size=k,
                    stride=s,
                    use_se=se,
                    act=nl,
                    name="conv" + str(i + 2)))
94 95 96
            self.add_sublayer(sublayer=self.block_list[-1], name="conv" + str(i + 2))
            inplanes = make_divisible(c)
            i += 1
W
wuzewu 已提交
97

W
wuzewu 已提交
98 99 100 101 102 103 104 105 106 107
        self.last_second_conv = ConvBNLayer(
            in_c=inplanes,
            out_c=make_divisible(self.cls_ch_squeeze),
            filter_size=1,
            stride=1,
            padding=0,
            num_groups=1,
            if_act=True,
            act="hard_swish",
            name="conv_last")
W
wuzewu 已提交
108

109
        self.pool = AdaptiveAvgPool2d(1)
W
wuzewu 已提交
110

W
wuzewu 已提交
111 112 113 114 115 116 117 118
        self.last_conv = Conv2d(
            in_channels=make_divisible(self.cls_ch_squeeze),
            out_channels=self.cls_ch_expand,
            kernel_size=1,
            stride=1,
            padding=0,
            weight_attr=ParamAttr(name="last_1x1_conv_weights"),
            bias_attr=False)
W
wuzewu 已提交
119

120
        self.dropout = Dropout(p=dropout_prob, mode="downscale_in_infer")
W
wuzewu 已提交
121

W
wuzewu 已提交
122 123
        self.out = Linear(
            self.cls_ch_expand, class_dim, weight_attr=ParamAttr("fc_weights"), bias_attr=ParamAttr(name="fc_offset"))
W
wuzewu 已提交
124

125 126 127 128
        if load_checkpoint is not None:
            model_dict = paddle.load(load_checkpoint)[0]
            self.set_dict(model_dict)
            print("load custom checkpoint success")
W
wuzewu 已提交
129

130
        else:
H
haoyuying 已提交
131
            checkpoint = os.path.join(self.directory, 'mobilenet_v3_small_ssld.pdparams')
132 133
            if not os.path.exists(checkpoint):
                os.system(
H
haoyuying 已提交
134 135
                    'wget https://paddlehub.bj.bcebos.com/dygraph/image_classification/mobilenet_v3_small_ssld.pdparams -O '
                    + checkpoint)
136 137 138
            model_dict = paddle.load(checkpoint)[0]
            self.set_dict(model_dict)
            print("load pretrained checkpoint success")
W
wuzewu 已提交
139

140 141 142 143
    def forward(self, inputs: paddle.Tensor):
        x = self.conv1(inputs)
        for block in self.block_list:
            x = block(x)
W
wuzewu 已提交
144

145 146
        x = self.last_second_conv(x)
        x = self.pool(x)
W
wuzewu 已提交
147

148 149 150 151 152 153
        x = self.last_conv(x)
        x = F.hard_swish(x)
        x = self.dropout(x)
        x = paddle.reshape(x, shape=[x.shape[0], x.shape[1]])
        x = self.out(x)
        return x
W
wuzewu 已提交
154

W
wuzewu 已提交
155

156 157
class ConvBNLayer(nn.Layer):
    """Basic conv bn layer."""
W
wuzewu 已提交
158

159 160 161 162 163 164 165 166 167 168 169 170 171
    def __init__(self,
                 in_c: int,
                 out_c: int,
                 filter_size: int,
                 stride: int,
                 padding: int,
                 num_groups: int = 1,
                 if_act: bool = True,
                 act: str = None,
                 name: str = ""):
        super(ConvBNLayer, self).__init__()
        self.if_act = if_act
        self.act = act
W
wuzewu 已提交
172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187
        self.conv = Conv2d(
            in_channels=in_c,
            out_channels=out_c,
            kernel_size=filter_size,
            stride=stride,
            padding=padding,
            groups=num_groups,
            weight_attr=ParamAttr(name=name + "_weights"),
            bias_attr=False)
        self.bn = BatchNorm(
            num_channels=out_c,
            act=None,
            param_attr=ParamAttr(name=name + "_bn_scale", regularizer=L2Decay(0.0)),
            bias_attr=ParamAttr(name=name + "_bn_offset", regularizer=L2Decay(0.0)),
            moving_mean_name=name + "_bn_mean",
            moving_variance_name=name + "_bn_variance")
W
wuzewu 已提交
188

189 190 191 192 193 194 195 196 197 198 199 200
    def forward(self, x):
        x = self.conv(x)
        x = self.bn(x)
        if self.if_act:
            if self.act == "relu":
                x = F.relu(x)
            elif self.act == "hard_swish":
                x = F.hard_swish(x)
            else:
                print("The activation function is selected incorrectly.")
                exit()
        return x
W
wuzewu 已提交
201 202


203 204
class ResidualUnit(nn.Layer):
    """Residual unit for MobileNetV3."""
W
wuzewu 已提交
205

206 207 208 209 210 211 212 213 214 215 216 217
    def __init__(self,
                 in_c: int,
                 mid_c: int,
                 out_c: int,
                 filter_size: int,
                 stride: int,
                 use_se: bool,
                 act: str = None,
                 name: str = ''):
        super(ResidualUnit, self).__init__()
        self.if_shortcut = stride == 1 and in_c == out_c
        self.if_se = use_se
W
wuzewu 已提交
218

W
wuzewu 已提交
219 220 221 222 223 224 225 226 227 228 229 230
        self.expand_conv = ConvBNLayer(
            in_c=in_c, out_c=mid_c, filter_size=1, stride=1, padding=0, if_act=True, act=act, name=name + "_expand")
        self.bottleneck_conv = ConvBNLayer(
            in_c=mid_c,
            out_c=mid_c,
            filter_size=filter_size,
            stride=stride,
            padding=int((filter_size - 1) // 2),
            num_groups=mid_c,
            if_act=True,
            act=act,
            name=name + "_depthwise")
231 232
        if self.if_se:
            self.mid_se = SEModule(mid_c, name=name + "_se")
W
wuzewu 已提交
233 234
        self.linear_conv = ConvBNLayer(
            in_c=mid_c, out_c=out_c, filter_size=1, stride=1, padding=0, if_act=False, act=None, name=name + "_linear")
W
wuzewu 已提交
235

236 237 238 239 240 241 242 243 244
    def forward(self, inputs: paddle.Tensor):
        x = self.expand_conv(inputs)
        x = self.bottleneck_conv(x)
        if self.if_se:
            x = self.mid_se(x)
        x = self.linear_conv(x)
        if self.if_shortcut:
            x = paddle.elementwise_add(inputs, x)
        return x
W
wuzewu 已提交
245 246


247 248
class SEModule(nn.Layer):
    """Basic model for ResidualUnit."""
W
wuzewu 已提交
249

250 251 252
    def __init__(self, channel: int, reduction: int = 4, name: str = ""):
        super(SEModule, self).__init__()
        self.avg_pool = AdaptiveAvgPool2d(1)
W
wuzewu 已提交
253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268
        self.conv1 = Conv2d(
            in_channels=channel,
            out_channels=channel // reduction,
            kernel_size=1,
            stride=1,
            padding=0,
            weight_attr=ParamAttr(name=name + "_1_weights"),
            bias_attr=ParamAttr(name=name + "_1_offset"))
        self.conv2 = Conv2d(
            in_channels=channel // reduction,
            out_channels=channel,
            kernel_size=1,
            stride=1,
            padding=0,
            weight_attr=ParamAttr(name + "_2_weights"),
            bias_attr=ParamAttr(name=name + "_2_offset"))
W
wuzewu 已提交
269

270 271 272 273 274 275 276
    def forward(self, inputs: paddle.Tensor):
        outputs = self.avg_pool(inputs)
        outputs = self.conv1(outputs)
        outputs = F.relu(outputs)
        outputs = self.conv2(outputs)
        outputs = F.hard_sigmoid(outputs)
        return paddle.multiply(x=inputs, y=outputs, axis=0)