xception.py 13.3 KB
Newer Older
G
gaotingquan 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
# copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# reference: https://arxiv.org/abs/1610.02357

W
WuHaobo 已提交
17
import paddle
littletomatodonkey's avatar
littletomatodonkey 已提交
18 19 20
from paddle import ParamAttr
import paddle.nn as nn
import paddle.nn.functional as F
21 22
from paddle.nn import Conv2D, BatchNorm, Linear, Dropout
from paddle.nn import AdaptiveAvgPool2D, MaxPool2D, AvgPool2D
littletomatodonkey's avatar
littletomatodonkey 已提交
23
from paddle.nn.initializer import Uniform
24
import math
C
cuicheng01 已提交
25
import sys
W
WuHaobo 已提交
26

R
root 已提交
27
from ....utils.save_load import load_dygraph_pretrain, load_dygraph_pretrain_from_url
C
cuicheng01 已提交
28 29

MODEL_URLS = {
littletomatodonkey's avatar
littletomatodonkey 已提交
30 31 32 33 34 35 36
    "Xception41":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/Xception41_pretrained.pdparams",
    "Xception65":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/Xception65_pretrained.pdparams",
    "Xception71":
    "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/Xception71_pretrained.pdparams"
}
C
cuicheng01 已提交
37 38

__all__ = list(MODEL_URLS.keys())
W
WuHaobo 已提交
39 40


littletomatodonkey's avatar
littletomatodonkey 已提交
41
class ConvBNLayer(nn.Layer):
42 43 44 45 46 47 48 49 50
    def __init__(self,
                 num_channels,
                 num_filters,
                 filter_size,
                 stride=1,
                 groups=1,
                 act=None,
                 name=None):
        super(ConvBNLayer, self).__init__()
W
WuHaobo 已提交
51

52
        self._conv = Conv2D(
littletomatodonkey's avatar
littletomatodonkey 已提交
53 54 55
            in_channels=num_channels,
            out_channels=num_filters,
            kernel_size=filter_size,
56 57 58
            stride=stride,
            padding=(filter_size - 1) // 2,
            groups=groups,
littletomatodonkey's avatar
littletomatodonkey 已提交
59
            weight_attr=ParamAttr(name=name + "_weights"),
60 61 62 63 64 65 66 67 68
            bias_attr=False)
        bn_name = "bn_" + name
        self._batch_norm = BatchNorm(
            num_filters,
            act=act,
            param_attr=ParamAttr(name=bn_name + "_scale"),
            bias_attr=ParamAttr(name=bn_name + "_offset"),
            moving_mean_name=bn_name + '_mean',
            moving_variance_name=bn_name + '_variance')
W
WuHaobo 已提交
69

70 71 72 73
    def forward(self, inputs):
        y = self._conv(inputs)
        y = self._batch_norm(y)
        return y
W
WuHaobo 已提交
74 75


littletomatodonkey's avatar
littletomatodonkey 已提交
76
class SeparableConv(nn.Layer):
77
    def __init__(self, input_channels, output_channels, stride=1, name=None):
W
wqz960 已提交
78
        super(SeparableConv, self).__init__()
79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95

        self._pointwise_conv = ConvBNLayer(
            input_channels, output_channels, 1, name=name + "_sep")
        self._depthwise_conv = ConvBNLayer(
            output_channels,
            output_channels,
            3,
            stride=stride,
            groups=output_channels,
            name=name + "_dw")

    def forward(self, inputs):
        x = self._pointwise_conv(inputs)
        x = self._depthwise_conv(x)
        return x


littletomatodonkey's avatar
littletomatodonkey 已提交
96
class EntryFlowBottleneckBlock(nn.Layer):
97 98 99 100 101 102
    def __init__(self,
                 input_channels,
                 output_channels,
                 stride=2,
                 name=None,
                 relu_first=False):
W
wqz960 已提交
103
        super(EntryFlowBottleneckBlock, self).__init__()
104 105
        self.relu_first = relu_first

106
        self._short = Conv2D(
littletomatodonkey's avatar
littletomatodonkey 已提交
107 108 109
            in_channels=input_channels,
            out_channels=output_channels,
            kernel_size=1,
W
WuHaobo 已提交
110 111
            stride=stride,
            padding=0,
littletomatodonkey's avatar
littletomatodonkey 已提交
112
            weight_attr=ParamAttr(name + "_branch1_weights"),
W
WuHaobo 已提交
113
            bias_attr=False)
W
wqz960 已提交
114
        self._conv1 = SeparableConv(
115 116 117 118
            input_channels,
            output_channels,
            stride=1,
            name=name + "_branch2a_weights")
W
wqz960 已提交
119
        self._conv2 = SeparableConv(
120 121 122 123
            output_channels,
            output_channels,
            stride=1,
            name=name + "_branch2b_weights")
124
        self._pool = MaxPool2D(kernel_size=3, stride=stride, padding=1)
125 126 127 128 129

    def forward(self, inputs):
        conv0 = inputs
        short = self._short(inputs)
        if self.relu_first:
littletomatodonkey's avatar
littletomatodonkey 已提交
130
            conv0 = F.relu(conv0)
131
        conv1 = self._conv1(conv0)
littletomatodonkey's avatar
littletomatodonkey 已提交
132
        conv2 = F.relu(conv1)
133 134
        conv2 = self._conv2(conv2)
        pool = self._pool(conv2)
135
        return paddle.add(x=short, y=pool)
W
WuHaobo 已提交
136 137


littletomatodonkey's avatar
littletomatodonkey 已提交
138
class EntryFlow(nn.Layer):
139
    def __init__(self, block_num=3):
W
wqz960 已提交
140
        super(EntryFlow, self).__init__()
W
WuHaobo 已提交
141

142 143 144 145 146 147
        name = "entry_flow"
        self.block_num = block_num
        self._conv1 = ConvBNLayer(
            3, 32, 3, stride=2, act="relu", name=name + "_conv1")
        self._conv2 = ConvBNLayer(32, 64, 3, act="relu", name=name + "_conv2")
        if block_num == 3:
W
wqz960 已提交
148
            self._conv_0 = EntryFlowBottleneckBlock(
149
                64, 128, stride=2, name=name + "_0", relu_first=False)
W
wqz960 已提交
150
            self._conv_1 = EntryFlowBottleneckBlock(
151
                128, 256, stride=2, name=name + "_1", relu_first=True)
W
wqz960 已提交
152
            self._conv_2 = EntryFlowBottleneckBlock(
153 154
                256, 728, stride=2, name=name + "_2", relu_first=True)
        elif block_num == 5:
W
wqz960 已提交
155
            self._conv_0 = EntryFlowBottleneckBlock(
156
                64, 128, stride=2, name=name + "_0", relu_first=False)
W
wqz960 已提交
157
            self._conv_1 = EntryFlowBottleneckBlock(
158
                128, 256, stride=1, name=name + "_1", relu_first=True)
W
wqz960 已提交
159
            self._conv_2 = EntryFlowBottleneckBlock(
160
                256, 256, stride=2, name=name + "_2", relu_first=True)
W
wqz960 已提交
161
            self._conv_3 = EntryFlowBottleneckBlock(
162
                256, 728, stride=1, name=name + "_3", relu_first=True)
W
wqz960 已提交
163
            self._conv_4 = EntryFlowBottleneckBlock(
164 165 166
                728, 728, stride=2, name=name + "_4", relu_first=True)
        else:
            sys.exit(-1)
W
WuHaobo 已提交
167

168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184
    def forward(self, inputs):
        x = self._conv1(inputs)
        x = self._conv2(x)

        if self.block_num == 3:
            x = self._conv_0(x)
            x = self._conv_1(x)
            x = self._conv_2(x)
        elif self.block_num == 5:
            x = self._conv_0(x)
            x = self._conv_1(x)
            x = self._conv_2(x)
            x = self._conv_3(x)
            x = self._conv_4(x)
        return x


littletomatodonkey's avatar
littletomatodonkey 已提交
185
class MiddleFlowBottleneckBlock(nn.Layer):
186
    def __init__(self, input_channels, output_channels, name):
W
wqz960 已提交
187
        super(MiddleFlowBottleneckBlock, self).__init__()
188

W
wqz960 已提交
189
        self._conv_0 = SeparableConv(
190 191
            input_channels,
            output_channels,
W
WuHaobo 已提交
192 193
            stride=1,
            name=name + "_branch2a_weights")
W
wqz960 已提交
194
        self._conv_1 = SeparableConv(
195 196
            output_channels,
            output_channels,
W
WuHaobo 已提交
197 198
            stride=1,
            name=name + "_branch2b_weights")
W
wqz960 已提交
199
        self._conv_2 = SeparableConv(
200 201
            output_channels,
            output_channels,
W
WuHaobo 已提交
202 203 204
            stride=1,
            name=name + "_branch2c_weights")

205
    def forward(self, inputs):
littletomatodonkey's avatar
littletomatodonkey 已提交
206
        conv0 = F.relu(inputs)
207
        conv0 = self._conv_0(conv0)
littletomatodonkey's avatar
littletomatodonkey 已提交
208
        conv1 = F.relu(conv0)
209
        conv1 = self._conv_1(conv1)
littletomatodonkey's avatar
littletomatodonkey 已提交
210
        conv2 = F.relu(conv1)
211
        conv2 = self._conv_2(conv2)
212
        return paddle.add(x=inputs, y=conv2)
213 214


littletomatodonkey's avatar
littletomatodonkey 已提交
215
class MiddleFlow(nn.Layer):
216
    def __init__(self, block_num=8):
W
wqz960 已提交
217
        super(MiddleFlow, self).__init__()
218 219

        self.block_num = block_num
W
wqz960 已提交
220
        self._conv_0 = MiddleFlowBottleneckBlock(
221
            728, 728, name="middle_flow_0")
W
wqz960 已提交
222
        self._conv_1 = MiddleFlowBottleneckBlock(
223
            728, 728, name="middle_flow_1")
W
wqz960 已提交
224
        self._conv_2 = MiddleFlowBottleneckBlock(
225
            728, 728, name="middle_flow_2")
W
wqz960 已提交
226
        self._conv_3 = MiddleFlowBottleneckBlock(
227
            728, 728, name="middle_flow_3")
W
wqz960 已提交
228
        self._conv_4 = MiddleFlowBottleneckBlock(
229
            728, 728, name="middle_flow_4")
W
wqz960 已提交
230
        self._conv_5 = MiddleFlowBottleneckBlock(
231
            728, 728, name="middle_flow_5")
W
wqz960 已提交
232
        self._conv_6 = MiddleFlowBottleneckBlock(
233
            728, 728, name="middle_flow_6")
W
wqz960 已提交
234
        self._conv_7 = MiddleFlowBottleneckBlock(
235 236
            728, 728, name="middle_flow_7")
        if block_num == 16:
W
wqz960 已提交
237
            self._conv_8 = MiddleFlowBottleneckBlock(
238
                728, 728, name="middle_flow_8")
W
wqz960 已提交
239
            self._conv_9 = MiddleFlowBottleneckBlock(
240
                728, 728, name="middle_flow_9")
W
wqz960 已提交
241
            self._conv_10 = MiddleFlowBottleneckBlock(
242
                728, 728, name="middle_flow_10")
W
wqz960 已提交
243
            self._conv_11 = MiddleFlowBottleneckBlock(
244
                728, 728, name="middle_flow_11")
W
wqz960 已提交
245
            self._conv_12 = MiddleFlowBottleneckBlock(
246
                728, 728, name="middle_flow_12")
W
wqz960 已提交
247
            self._conv_13 = MiddleFlowBottleneckBlock(
248
                728, 728, name="middle_flow_13")
W
wqz960 已提交
249
            self._conv_14 = MiddleFlowBottleneckBlock(
250
                728, 728, name="middle_flow_14")
W
wqz960 已提交
251
            self._conv_15 = MiddleFlowBottleneckBlock(
252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274
                728, 728, name="middle_flow_15")

    def forward(self, inputs):
        x = self._conv_0(inputs)
        x = self._conv_1(x)
        x = self._conv_2(x)
        x = self._conv_3(x)
        x = self._conv_4(x)
        x = self._conv_5(x)
        x = self._conv_6(x)
        x = self._conv_7(x)
        if self.block_num == 16:
            x = self._conv_8(x)
            x = self._conv_9(x)
            x = self._conv_10(x)
            x = self._conv_11(x)
            x = self._conv_12(x)
            x = self._conv_13(x)
            x = self._conv_14(x)
            x = self._conv_15(x)
        return x


littletomatodonkey's avatar
littletomatodonkey 已提交
275
class ExitFlowBottleneckBlock(nn.Layer):
276 277
    def __init__(self, input_channels, output_channels1, output_channels2,
                 name):
W
wqz960 已提交
278
        super(ExitFlowBottleneckBlock, self).__init__()
279

280
        self._short = Conv2D(
littletomatodonkey's avatar
littletomatodonkey 已提交
281 282 283
            in_channels=input_channels,
            out_channels=output_channels2,
            kernel_size=1,
W
WuHaobo 已提交
284 285
            stride=2,
            padding=0,
littletomatodonkey's avatar
littletomatodonkey 已提交
286
            weight_attr=ParamAttr(name + "_branch1_weights"),
W
WuHaobo 已提交
287
            bias_attr=False)
W
wqz960 已提交
288
        self._conv_1 = SeparableConv(
289 290 291 292
            input_channels,
            output_channels1,
            stride=1,
            name=name + "_branch2a_weights")
W
wqz960 已提交
293
        self._conv_2 = SeparableConv(
294 295 296 297
            output_channels1,
            output_channels2,
            stride=1,
            name=name + "_branch2b_weights")
298
        self._pool = MaxPool2D(kernel_size=3, stride=2, padding=1)
299 300 301

    def forward(self, inputs):
        short = self._short(inputs)
littletomatodonkey's avatar
littletomatodonkey 已提交
302
        conv0 = F.relu(inputs)
303
        conv1 = self._conv_1(conv0)
littletomatodonkey's avatar
littletomatodonkey 已提交
304
        conv2 = F.relu(conv1)
305 306
        conv2 = self._conv_2(conv2)
        pool = self._pool(conv2)
307
        return paddle.add(x=short, y=pool)
W
WuHaobo 已提交
308 309


littletomatodonkey's avatar
littletomatodonkey 已提交
310
class ExitFlow(nn.Layer):
littletomatodonkey's avatar
littletomatodonkey 已提交
311
    def __init__(self, class_num):
W
wqz960 已提交
312
        super(ExitFlow, self).__init__()
W
WuHaobo 已提交
313

314
        name = "exit_flow"
W
WuHaobo 已提交
315

W
wqz960 已提交
316
        self._conv_0 = ExitFlowBottleneckBlock(
317
            728, 728, 1024, name=name + "_1")
W
wqz960 已提交
318 319
        self._conv_1 = SeparableConv(1024, 1536, stride=1, name=name + "_2")
        self._conv_2 = SeparableConv(1536, 2048, stride=1, name=name + "_3")
320
        self._pool = AdaptiveAvgPool2D(1)
321 322 323
        stdv = 1.0 / math.sqrt(2048 * 1.0)
        self._out = Linear(
            2048,
littletomatodonkey's avatar
littletomatodonkey 已提交
324
            class_num,
littletomatodonkey's avatar
littletomatodonkey 已提交
325 326
            weight_attr=ParamAttr(
                name="fc_weights", initializer=Uniform(-stdv, stdv)),
327 328 329 330 331
            bias_attr=ParamAttr(name="fc_offset"))

    def forward(self, inputs):
        conv0 = self._conv_0(inputs)
        conv1 = self._conv_1(conv0)
littletomatodonkey's avatar
littletomatodonkey 已提交
332
        conv1 = F.relu(conv1)
333
        conv2 = self._conv_2(conv1)
littletomatodonkey's avatar
littletomatodonkey 已提交
334
        conv2 = F.relu(conv2)
335
        pool = self._pool(conv2)
L
littletomatodonkey 已提交
336
        pool = paddle.flatten(pool, start_axis=1, stop_axis=-1)
337 338
        out = self._out(pool)
        return out
W
WuHaobo 已提交
339

340

littletomatodonkey's avatar
littletomatodonkey 已提交
341
class Xception(nn.Layer):
342 343 344
    def __init__(self,
                 entry_flow_block_num=3,
                 middle_flow_block_num=8,
littletomatodonkey's avatar
littletomatodonkey 已提交
345
                 class_num=1000):
346 347 348
        super(Xception, self).__init__()
        self.entry_flow_block_num = entry_flow_block_num
        self.middle_flow_block_num = middle_flow_block_num
W
wqz960 已提交
349 350
        self._entry_flow = EntryFlow(entry_flow_block_num)
        self._middle_flow = MiddleFlow(middle_flow_block_num)
littletomatodonkey's avatar
littletomatodonkey 已提交
351
        self._exit_flow = ExitFlow(class_num)
352 353 354 355 356 357

    def forward(self, inputs):
        x = self._entry_flow(inputs)
        x = self._middle_flow(x)
        x = self._exit_flow(x)
        return x
W
WuHaobo 已提交
358

littletomatodonkey's avatar
littletomatodonkey 已提交
359

C
cuicheng01 已提交
360 361 362 363 364 365 366 367 368 369 370
def _load_pretrained(pretrained, model, model_url, use_ssld=False):
    if pretrained is False:
        pass
    elif pretrained is True:
        load_dygraph_pretrain_from_url(model, model_url, use_ssld=use_ssld)
    elif isinstance(pretrained, str):
        load_dygraph_pretrain(model, pretrained)
    else:
        raise RuntimeError(
            "pretrained type is not available. Please use `string` or `boolean` type."
        )
littletomatodonkey's avatar
littletomatodonkey 已提交
371

C
cuicheng01 已提交
372 373 374

def Xception41(pretrained=False, use_ssld=False, **kwargs):
    model = Xception(entry_flow_block_num=3, middle_flow_block_num=8, **kwargs)
littletomatodonkey's avatar
littletomatodonkey 已提交
375 376
    _load_pretrained(
        pretrained, model, MODEL_URLS["Xception41"], use_ssld=use_ssld)
W
WuHaobo 已提交
377 378 379
    return model


C
cuicheng01 已提交
380
def Xception65(pretrained=False, use_ssld=False, **kwargs):
littletomatodonkey's avatar
littletomatodonkey 已提交
381 382 383 384
    model = Xception(
        entry_flow_block_num=3, middle_flow_block_num=16, **kwargs)
    _load_pretrained(
        pretrained, model, MODEL_URLS["Xception65"], use_ssld=use_ssld)
W
WuHaobo 已提交
385 386 387
    return model


C
cuicheng01 已提交
388
def Xception71(pretrained=False, use_ssld=False, **kwargs):
littletomatodonkey's avatar
littletomatodonkey 已提交
389 390 391 392
    model = Xception(
        entry_flow_block_num=5, middle_flow_block_num=16, **kwargs)
    _load_pretrained(
        pretrained, model, MODEL_URLS["Xception71"], use_ssld=use_ssld)
littletomatodonkey's avatar
littletomatodonkey 已提交
393
    return model