mobilenet_v1.py 8.8 KB
Newer Older
B
Bin Lu 已提交
1
# copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve.
B
Bin Lu 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

G
gaotingquan 已提交
15 16
# reference: https://arxiv.org/abs/1704.04861

B
Bin Lu 已提交
17
from __future__ import absolute_import, division, print_function
B
Bin Lu 已提交
18 19 20

from paddle import ParamAttr
import paddle.nn as nn
B
Bin Lu 已提交
21
from paddle.nn import Conv2D, BatchNorm, Linear, ReLU, Flatten
B
Bin Lu 已提交
22
from paddle.nn import AdaptiveAvgPool2D
B
Bin Lu 已提交
23 24
from paddle.nn.initializer import KaimingNormal

R
root 已提交
25 26
from ..base.theseus_layer import TheseusLayer
from ....utils.save_load import load_dygraph_pretrain, load_dygraph_pretrain_from_url
B
Bin Lu 已提交
27 28

MODEL_URLS = {
D
dongshuilong 已提交
29 30 31 32 33 34 35 36
    "MobileNetV1_x0_25":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV1_x0_25_pretrained.pdparams",
    "MobileNetV1_x0_5":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV1_x0_5_pretrained.pdparams",
    "MobileNetV1_x0_75":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV1_x0_75_pretrained.pdparams",
    "MobileNetV1":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV1_pretrained.pdparams"
B
Bin Lu 已提交
37 38
}

39 40 41 42
MODEL_STAGES_PATTERN = {
    "MobileNetV1": ["blocks[0]", "blocks[2]", "blocks[4]", "blocks[10]"]
}

B
Bin Lu 已提交
43
__all__ = MODEL_URLS.keys()
D
dongshuilong 已提交
44 45


B
Bin Lu 已提交
46 47 48 49 50 51 52
class ConvBNLayer(TheseusLayer):
    def __init__(self,
                 num_channels,
                 filter_size,
                 num_filters,
                 stride,
                 padding,
B
Bin Lu 已提交
53
                 num_groups=1):
D
dongshuilong 已提交
54
        super().__init__()
B
Bin Lu 已提交
55

B
Bin Lu 已提交
56
        self.conv = Conv2D(
B
Bin Lu 已提交
57 58 59 60 61 62
            in_channels=num_channels,
            out_channels=num_filters,
            kernel_size=filter_size,
            stride=stride,
            padding=padding,
            groups=num_groups,
B
Bin Lu 已提交
63
            weight_attr=ParamAttr(initializer=KaimingNormal()),
B
Bin Lu 已提交
64
            bias_attr=False)
B
Bin Lu 已提交
65
        self.bn = BatchNorm(num_filters)
B
Bin Lu 已提交
66
        self.relu = ReLU()
B
Bin Lu 已提交
67

B
Bin Lu 已提交
68
    def forward(self, x):
B
Bin Lu 已提交
69 70 71
        x = self.conv(x)
        x = self.bn(x)
        x = self.relu(x)
B
Bin Lu 已提交
72
        return x
B
Bin Lu 已提交
73 74 75


class DepthwiseSeparable(TheseusLayer):
D
dongshuilong 已提交
76 77 78
    def __init__(self, num_channels, num_filters1, num_filters2, num_groups,
                 stride, scale):
        super().__init__()
B
Bin Lu 已提交
79

B
Bin Lu 已提交
80
        self.depthwise_conv = ConvBNLayer(
B
Bin Lu 已提交
81 82 83 84 85
            num_channels=num_channels,
            num_filters=int(num_filters1 * scale),
            filter_size=3,
            stride=stride,
            padding=1,
B
Bin Lu 已提交
86
            num_groups=int(num_groups * scale))
B
Bin Lu 已提交
87

B
Bin Lu 已提交
88
        self.pointwise_conv = ConvBNLayer(
B
Bin Lu 已提交
89 90 91 92
            num_channels=int(num_filters1 * scale),
            filter_size=1,
            num_filters=int(num_filters2 * scale),
            stride=1,
B
Bin Lu 已提交
93
            padding=0)
B
Bin Lu 已提交
94

B
Bin Lu 已提交
95
    def forward(self, x):
B
Bin Lu 已提交
96 97
        x = self.depthwise_conv(x)
        x = self.pointwise_conv(x)
B
Bin Lu 已提交
98
        return x
B
Bin Lu 已提交
99 100 101


class MobileNet(TheseusLayer):
D
dongshuilong 已提交
102 103 104 105 106 107 108 109 110
    """
    MobileNet
    Args:
        scale: float=1.0. The coefficient that controls the size of network parameters. 
        class_num: int=1000. The number of classes.
    Returns:
        model: nn.Layer. Specific MobileNet model depends on args.
    """

111 112 113 114 115 116
    def __init__(self,
                 stages_pattern,
                 scale=1.0,
                 class_num=1000,
                 return_patterns=None,
                 return_stages=None):
D
dongshuilong 已提交
117
        super().__init__()
B
Bin Lu 已提交
118 119
        self.scale = scale

B
Bin Lu 已提交
120
        self.conv = ConvBNLayer(
B
Bin Lu 已提交
121 122 123 124
            num_channels=3,
            filter_size=3,
            num_filters=int(32 * scale),
            stride=2,
B
Bin Lu 已提交
125
            padding=1)
D
dongshuilong 已提交
126

B
Bin Lu 已提交
127
        #num_channels, num_filters1, num_filters2, num_groups, stride
D
dongshuilong 已提交
128 129 130 131 132 133 134 135 136 137 138 139
        self.cfg = [[int(32 * scale), 32, 64, 32, 1],
                    [int(64 * scale), 64, 128, 64, 2],
                    [int(128 * scale), 128, 128, 128, 1],
                    [int(128 * scale), 128, 256, 128, 2],
                    [int(256 * scale), 256, 256, 256, 1],
                    [int(256 * scale), 256, 512, 256, 2],
                    [int(512 * scale), 512, 512, 512, 1],
                    [int(512 * scale), 512, 512, 512, 1],
                    [int(512 * scale), 512, 512, 512, 1],
                    [int(512 * scale), 512, 512, 512, 1],
                    [int(512 * scale), 512, 512, 512, 1],
                    [int(512 * scale), 512, 1024, 512, 2],
B
Bin Lu 已提交
140
                    [int(1024 * scale), 1024, 1024, 1024, 1]]
D
dongshuilong 已提交
141

142
        self.blocks = nn.Sequential(* [
D
dongshuilong 已提交
143 144 145 146 147 148 149 150
            DepthwiseSeparable(
                num_channels=params[0],
                num_filters1=params[1],
                num_filters2=params[2],
                num_groups=params[3],
                stride=params[4],
                scale=scale) for params in self.cfg
        ])
B
Bin Lu 已提交
151

B
Bin Lu 已提交
152
        self.avg_pool = AdaptiveAvgPool2D(1)
B
Bin Lu 已提交
153
        self.flatten = Flatten(start_axis=1, stop_axis=-1)
B
Bin Lu 已提交
154

B
Bin Lu 已提交
155
        self.fc = Linear(
B
Bin Lu 已提交
156
            int(1024 * scale),
B
Bin Lu 已提交
157
            class_num,
B
Bin Lu 已提交
158
            weight_attr=ParamAttr(initializer=KaimingNormal()))
159 160 161 162 163

        super().init_res(
            stages_pattern,
            return_patterns=return_patterns,
            return_stages=return_stages)
D
dongshuilong 已提交
164

W
weishengyu 已提交
165
    def forward(self, x):
B
Bin Lu 已提交
166
        x = self.conv(x)
B
Bin Lu 已提交
167
        x = self.blocks(x)
B
Bin Lu 已提交
168
        x = self.avg_pool(x)
B
Bin Lu 已提交
169
        x = self.flatten(x)
B
Bin Lu 已提交
170
        x = self.fc(x)
B
Bin Lu 已提交
171
        return x
B
Bin Lu 已提交
172 173


D
dongshuilong 已提交
174 175 176 177 178 179 180
def _load_pretrained(pretrained, model, model_url, use_ssld):
    if pretrained is False:
        pass
    elif pretrained is True:
        load_dygraph_pretrain_from_url(model, model_url, use_ssld=use_ssld)
    elif isinstance(pretrained, str):
        load_dygraph_pretrain(model, pretrained)
B
Bin Lu 已提交
181 182
    else:
        raise RuntimeError(
D
dongshuilong 已提交
183 184
            "pretrained type is not available. Please use `string` or `boolean` type."
        )
B
Bin Lu 已提交
185 186


D
dongshuilong 已提交
187
def MobileNetV1_x0_25(pretrained=False, use_ssld=False, **kwargs):
B
Bin Lu 已提交
188
    """
D
dongshuilong 已提交
189 190 191 192 193 194 195
    MobileNetV1_x0_25
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `MobileNetV1_x0_25` model depends on args.
B
Bin Lu 已提交
196
    """
197 198 199 200
    model = MobileNet(
        scale=0.25,
        stages_pattern=MODEL_STAGES_PATTERN["MobileNetV1"],
        **kwargs)
D
dongshuilong 已提交
201 202
    _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV1_x0_25"],
                     use_ssld)
B
Bin Lu 已提交
203 204 205
    return model


D
dongshuilong 已提交
206
def MobileNetV1_x0_5(pretrained=False, use_ssld=False, **kwargs):
B
Bin Lu 已提交
207
    """
D
dongshuilong 已提交
208 209 210 211 212 213 214
    MobileNetV1_x0_5
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `MobileNetV1_x0_5` model depends on args.
B
Bin Lu 已提交
215
    """
216 217 218 219
    model = MobileNet(
        scale=0.5,
        stages_pattern=MODEL_STAGES_PATTERN["MobileNetV1"],
        **kwargs)
D
dongshuilong 已提交
220 221
    _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV1_x0_5"],
                     use_ssld)
B
Bin Lu 已提交
222 223 224
    return model


D
dongshuilong 已提交
225
def MobileNetV1_x0_75(pretrained=False, use_ssld=False, **kwargs):
B
Bin Lu 已提交
226
    """
D
dongshuilong 已提交
227 228 229 230 231 232 233
    MobileNetV1_x0_75
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `MobileNetV1_x0_75` model depends on args.
B
Bin Lu 已提交
234
    """
235 236 237 238
    model = MobileNet(
        scale=0.75,
        stages_pattern=MODEL_STAGES_PATTERN["MobileNetV1"],
        **kwargs)
D
dongshuilong 已提交
239 240
    _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV1_x0_75"],
                     use_ssld)
B
Bin Lu 已提交
241
    return model
B
Bin Lu 已提交
242 243


D
dongshuilong 已提交
244 245 246 247 248 249 250 251 252 253
def MobileNetV1(pretrained=False, use_ssld=False, **kwargs):
    """
    MobileNetV1
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `MobileNetV1` model depends on args.
    """
254 255 256 257
    model = MobileNet(
        scale=1.0,
        stages_pattern=MODEL_STAGES_PATTERN["MobileNetV1"],
        **kwargs)
D
dongshuilong 已提交
258 259
    _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV1"], use_ssld)
    return model