mobilenet_v1.py 8.2 KB
Newer Older
B
Bin Lu 已提交
1
# copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve.
B
Bin Lu 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

B
Bin Lu 已提交
15
from __future__ import absolute_import, division, print_function
B
Bin Lu 已提交
16 17 18

from paddle import ParamAttr
import paddle.nn as nn
B
Bin Lu 已提交
19
from paddle.nn import Conv2D, BatchNorm, Linear, ReLU, Flatten
B
Bin Lu 已提交
20
from paddle.nn import AdaptiveAvgPool2D
B
Bin Lu 已提交
21 22 23
from paddle.nn.initializer import KaimingNormal

from ppcls.arch.backbone.base.theseus_layer import TheseusLayer
D
dongshuilong 已提交
24
from ppcls.utils.save_load import load_dygraph_pretrain, load_dygraph_pretrain_from_url
B
Bin Lu 已提交
25 26

MODEL_URLS = {
D
dongshuilong 已提交
27 28 29 30 31 32 33 34
    "MobileNetV1_x0_25":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV1_x0_25_pretrained.pdparams",
    "MobileNetV1_x0_5":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV1_x0_5_pretrained.pdparams",
    "MobileNetV1_x0_75":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV1_x0_75_pretrained.pdparams",
    "MobileNetV1":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV1_pretrained.pdparams"
B
Bin Lu 已提交
35 36 37
}

__all__ = MODEL_URLS.keys()
D
dongshuilong 已提交
38 39


B
Bin Lu 已提交
40 41 42 43 44 45 46
class ConvBNLayer(TheseusLayer):
    def __init__(self,
                 num_channels,
                 filter_size,
                 num_filters,
                 stride,
                 padding,
B
Bin Lu 已提交
47
                 num_groups=1):
D
dongshuilong 已提交
48
        super().__init__()
B
Bin Lu 已提交
49

B
Bin Lu 已提交
50
        self.conv = Conv2D(
B
Bin Lu 已提交
51 52 53 54 55 56
            in_channels=num_channels,
            out_channels=num_filters,
            kernel_size=filter_size,
            stride=stride,
            padding=padding,
            groups=num_groups,
B
Bin Lu 已提交
57
            weight_attr=ParamAttr(initializer=KaimingNormal()),
B
Bin Lu 已提交
58
            bias_attr=False)
B
Bin Lu 已提交
59
        self.bn = BatchNorm(num_filters)
B
Bin Lu 已提交
60
        self.relu = ReLU()
B
Bin Lu 已提交
61

B
Bin Lu 已提交
62
    def forward(self, x):
B
Bin Lu 已提交
63 64 65
        x = self.conv(x)
        x = self.bn(x)
        x = self.relu(x)
B
Bin Lu 已提交
66
        return x
B
Bin Lu 已提交
67 68 69


class DepthwiseSeparable(TheseusLayer):
D
dongshuilong 已提交
70 71 72
    def __init__(self, num_channels, num_filters1, num_filters2, num_groups,
                 stride, scale):
        super().__init__()
B
Bin Lu 已提交
73

B
Bin Lu 已提交
74
        self.depthwise_conv = ConvBNLayer(
B
Bin Lu 已提交
75 76 77 78 79
            num_channels=num_channels,
            num_filters=int(num_filters1 * scale),
            filter_size=3,
            stride=stride,
            padding=1,
B
Bin Lu 已提交
80
            num_groups=int(num_groups * scale))
B
Bin Lu 已提交
81

B
Bin Lu 已提交
82
        self.pointwise_conv = ConvBNLayer(
B
Bin Lu 已提交
83 84 85 86
            num_channels=int(num_filters1 * scale),
            filter_size=1,
            num_filters=int(num_filters2 * scale),
            stride=1,
B
Bin Lu 已提交
87
            padding=0)
B
Bin Lu 已提交
88

B
Bin Lu 已提交
89
    def forward(self, x):
B
Bin Lu 已提交
90 91
        x = self.depthwise_conv(x)
        x = self.pointwise_conv(x)
B
Bin Lu 已提交
92
        return x
B
Bin Lu 已提交
93 94 95


class MobileNet(TheseusLayer):
D
dongshuilong 已提交
96 97 98 99 100 101 102 103 104
    """
    MobileNet
    Args:
        scale: float=1.0. The coefficient that controls the size of network parameters. 
        class_num: int=1000. The number of classes.
    Returns:
        model: nn.Layer. Specific MobileNet model depends on args.
    """

W
weishengyu 已提交
105
    def __init__(self, scale=1.0, class_num=1000, return_patterns=None):
D
dongshuilong 已提交
106
        super().__init__()
B
Bin Lu 已提交
107 108
        self.scale = scale

B
Bin Lu 已提交
109
        self.conv = ConvBNLayer(
B
Bin Lu 已提交
110 111 112 113
            num_channels=3,
            filter_size=3,
            num_filters=int(32 * scale),
            stride=2,
B
Bin Lu 已提交
114
            padding=1)
D
dongshuilong 已提交
115

B
Bin Lu 已提交
116
        #num_channels, num_filters1, num_filters2, num_groups, stride
D
dongshuilong 已提交
117 118 119 120 121 122 123 124 125 126 127 128
        self.cfg = [[int(32 * scale), 32, 64, 32, 1],
                    [int(64 * scale), 64, 128, 64, 2],
                    [int(128 * scale), 128, 128, 128, 1],
                    [int(128 * scale), 128, 256, 128, 2],
                    [int(256 * scale), 256, 256, 256, 1],
                    [int(256 * scale), 256, 512, 256, 2],
                    [int(512 * scale), 512, 512, 512, 1],
                    [int(512 * scale), 512, 512, 512, 1],
                    [int(512 * scale), 512, 512, 512, 1],
                    [int(512 * scale), 512, 512, 512, 1],
                    [int(512 * scale), 512, 512, 512, 1],
                    [int(512 * scale), 512, 1024, 512, 2],
B
Bin Lu 已提交
129
                    [int(1024 * scale), 1024, 1024, 1024, 1]]
D
dongshuilong 已提交
130

B
Bin Lu 已提交
131
        self.blocks = nn.Sequential(*[
D
dongshuilong 已提交
132 133 134 135 136 137 138 139
            DepthwiseSeparable(
                num_channels=params[0],
                num_filters1=params[1],
                num_filters2=params[2],
                num_groups=params[3],
                stride=params[4],
                scale=scale) for params in self.cfg
        ])
B
Bin Lu 已提交
140

B
Bin Lu 已提交
141
        self.avg_pool = AdaptiveAvgPool2D(1)
B
Bin Lu 已提交
142
        self.flatten = Flatten(start_axis=1, stop_axis=-1)
B
Bin Lu 已提交
143

B
Bin Lu 已提交
144
        self.fc = Linear(
B
Bin Lu 已提交
145
            int(1024 * scale),
B
Bin Lu 已提交
146
            class_num,
B
Bin Lu 已提交
147
            weight_attr=ParamAttr(initializer=KaimingNormal()))
W
weishengyu 已提交
148 149 150
        if return_patterns is not None:
            self.update_res(return_patterns)
            self.register_forward_post_hook(self._return_dict_hook)
D
dongshuilong 已提交
151

W
weishengyu 已提交
152
    def forward(self, x):
B
Bin Lu 已提交
153
        x = self.conv(x)
B
Bin Lu 已提交
154
        x = self.blocks(x)
B
Bin Lu 已提交
155
        x = self.avg_pool(x)
B
Bin Lu 已提交
156
        x = self.flatten(x)
B
Bin Lu 已提交
157
        x = self.fc(x)
B
Bin Lu 已提交
158
        return x
B
Bin Lu 已提交
159 160


D
dongshuilong 已提交
161 162 163 164 165 166 167
def _load_pretrained(pretrained, model, model_url, use_ssld):
    if pretrained is False:
        pass
    elif pretrained is True:
        load_dygraph_pretrain_from_url(model, model_url, use_ssld=use_ssld)
    elif isinstance(pretrained, str):
        load_dygraph_pretrain(model, pretrained)
B
Bin Lu 已提交
168 169
    else:
        raise RuntimeError(
D
dongshuilong 已提交
170 171
            "pretrained type is not available. Please use `string` or `boolean` type."
        )
B
Bin Lu 已提交
172 173


D
dongshuilong 已提交
174
def MobileNetV1_x0_25(pretrained=False, use_ssld=False, **kwargs):
B
Bin Lu 已提交
175
    """
D
dongshuilong 已提交
176 177 178 179 180 181 182
    MobileNetV1_x0_25
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `MobileNetV1_x0_25` model depends on args.
B
Bin Lu 已提交
183
    """
D
dongshuilong 已提交
184 185 186
    model = MobileNet(scale=0.25, **kwargs)
    _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV1_x0_25"],
                     use_ssld)
B
Bin Lu 已提交
187 188 189
    return model


D
dongshuilong 已提交
190
def MobileNetV1_x0_5(pretrained=False, use_ssld=False, **kwargs):
B
Bin Lu 已提交
191
    """
D
dongshuilong 已提交
192 193 194 195 196 197 198
    MobileNetV1_x0_5
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `MobileNetV1_x0_5` model depends on args.
B
Bin Lu 已提交
199
    """
D
dongshuilong 已提交
200 201 202
    model = MobileNet(scale=0.5, **kwargs)
    _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV1_x0_5"],
                     use_ssld)
B
Bin Lu 已提交
203 204 205
    return model


D
dongshuilong 已提交
206
def MobileNetV1_x0_75(pretrained=False, use_ssld=False, **kwargs):
B
Bin Lu 已提交
207
    """
D
dongshuilong 已提交
208 209 210 211 212 213 214
    MobileNetV1_x0_75
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `MobileNetV1_x0_75` model depends on args.
B
Bin Lu 已提交
215
    """
D
dongshuilong 已提交
216 217 218
    model = MobileNet(scale=0.75, **kwargs)
    _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV1_x0_75"],
                     use_ssld)
B
Bin Lu 已提交
219
    return model
B
Bin Lu 已提交
220 221


D
dongshuilong 已提交
222 223 224 225 226 227 228 229 230 231 232 233 234
def MobileNetV1(pretrained=False, use_ssld=False, **kwargs):
    """
    MobileNetV1
    Args:
        pretrained: bool=False or str. If `True` load pretrained parameters, `False` otherwise.
                    If str, means the path of the pretrained model.
        use_ssld: bool=False. Whether using distillation pretrained model when pretrained=True.
    Returns:
        model: nn.Layer. Specific `MobileNetV1` model depends on args.
    """
    model = MobileNet(scale=1.0, **kwargs)
    _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV1"], use_ssld)
    return model