xception.py 12.6 KB
Newer Older
W
WuHaobo 已提交
1
import paddle
littletomatodonkey's avatar
littletomatodonkey 已提交
2 3 4
from paddle import ParamAttr
import paddle.nn as nn
import paddle.nn.functional as F
5 6
from paddle.nn import Conv2D, BatchNorm, Linear, Dropout
from paddle.nn import AdaptiveAvgPool2D, MaxPool2D, AvgPool2D
littletomatodonkey's avatar
littletomatodonkey 已提交
7
from paddle.nn.initializer import Uniform
8
import math
C
cuicheng01 已提交
9
import sys
W
WuHaobo 已提交
10

C
cuicheng01 已提交
11 12 13 14 15 16 17 18 19 20

from ppcls.utils.save_load import load_dygraph_pretrain, load_dygraph_pretrain_from_url

MODEL_URLS = {
              "Xception41": "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/Xception41_pretrained.pdparams",
              "Xception65": "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/Xception65_pretrained.pdparams",
              "Xception71": "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/Xception71_pretrained.pdparams"
             }

__all__ = list(MODEL_URLS.keys())
W
WuHaobo 已提交
21 22


littletomatodonkey's avatar
littletomatodonkey 已提交
23
class ConvBNLayer(nn.Layer):
24 25 26 27 28 29 30 31 32
    def __init__(self,
                 num_channels,
                 num_filters,
                 filter_size,
                 stride=1,
                 groups=1,
                 act=None,
                 name=None):
        super(ConvBNLayer, self).__init__()
W
WuHaobo 已提交
33

34
        self._conv = Conv2D(
littletomatodonkey's avatar
littletomatodonkey 已提交
35 36 37
            in_channels=num_channels,
            out_channels=num_filters,
            kernel_size=filter_size,
38 39 40
            stride=stride,
            padding=(filter_size - 1) // 2,
            groups=groups,
littletomatodonkey's avatar
littletomatodonkey 已提交
41
            weight_attr=ParamAttr(name=name + "_weights"),
42 43 44 45 46 47 48 49 50
            bias_attr=False)
        bn_name = "bn_" + name
        self._batch_norm = BatchNorm(
            num_filters,
            act=act,
            param_attr=ParamAttr(name=bn_name + "_scale"),
            bias_attr=ParamAttr(name=bn_name + "_offset"),
            moving_mean_name=bn_name + '_mean',
            moving_variance_name=bn_name + '_variance')
W
WuHaobo 已提交
51

52 53 54 55
    def forward(self, inputs):
        y = self._conv(inputs)
        y = self._batch_norm(y)
        return y
W
WuHaobo 已提交
56 57


littletomatodonkey's avatar
littletomatodonkey 已提交
58
class SeparableConv(nn.Layer):
59
    def __init__(self, input_channels, output_channels, stride=1, name=None):
W
wqz960 已提交
60
        super(SeparableConv, self).__init__()
61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77

        self._pointwise_conv = ConvBNLayer(
            input_channels, output_channels, 1, name=name + "_sep")
        self._depthwise_conv = ConvBNLayer(
            output_channels,
            output_channels,
            3,
            stride=stride,
            groups=output_channels,
            name=name + "_dw")

    def forward(self, inputs):
        x = self._pointwise_conv(inputs)
        x = self._depthwise_conv(x)
        return x


littletomatodonkey's avatar
littletomatodonkey 已提交
78
class EntryFlowBottleneckBlock(nn.Layer):
79 80 81 82 83 84
    def __init__(self,
                 input_channels,
                 output_channels,
                 stride=2,
                 name=None,
                 relu_first=False):
W
wqz960 已提交
85
        super(EntryFlowBottleneckBlock, self).__init__()
86 87
        self.relu_first = relu_first

88
        self._short = Conv2D(
littletomatodonkey's avatar
littletomatodonkey 已提交
89 90 91
            in_channels=input_channels,
            out_channels=output_channels,
            kernel_size=1,
W
WuHaobo 已提交
92 93
            stride=stride,
            padding=0,
littletomatodonkey's avatar
littletomatodonkey 已提交
94
            weight_attr=ParamAttr(name + "_branch1_weights"),
W
WuHaobo 已提交
95
            bias_attr=False)
W
wqz960 已提交
96
        self._conv1 = SeparableConv(
97 98 99 100
            input_channels,
            output_channels,
            stride=1,
            name=name + "_branch2a_weights")
W
wqz960 已提交
101
        self._conv2 = SeparableConv(
102 103 104 105
            output_channels,
            output_channels,
            stride=1,
            name=name + "_branch2b_weights")
106
        self._pool = MaxPool2D(kernel_size=3, stride=stride, padding=1)
107 108 109 110 111

    def forward(self, inputs):
        conv0 = inputs
        short = self._short(inputs)
        if self.relu_first:
littletomatodonkey's avatar
littletomatodonkey 已提交
112
            conv0 = F.relu(conv0)
113
        conv1 = self._conv1(conv0)
littletomatodonkey's avatar
littletomatodonkey 已提交
114
        conv2 = F.relu(conv1)
115 116
        conv2 = self._conv2(conv2)
        pool = self._pool(conv2)
117
        return paddle.add(x=short, y=pool)
W
WuHaobo 已提交
118 119


littletomatodonkey's avatar
littletomatodonkey 已提交
120
class EntryFlow(nn.Layer):
121
    def __init__(self, block_num=3):
W
wqz960 已提交
122
        super(EntryFlow, self).__init__()
W
WuHaobo 已提交
123

124 125 126 127 128 129
        name = "entry_flow"
        self.block_num = block_num
        self._conv1 = ConvBNLayer(
            3, 32, 3, stride=2, act="relu", name=name + "_conv1")
        self._conv2 = ConvBNLayer(32, 64, 3, act="relu", name=name + "_conv2")
        if block_num == 3:
W
wqz960 已提交
130
            self._conv_0 = EntryFlowBottleneckBlock(
131
                64, 128, stride=2, name=name + "_0", relu_first=False)
W
wqz960 已提交
132
            self._conv_1 = EntryFlowBottleneckBlock(
133
                128, 256, stride=2, name=name + "_1", relu_first=True)
W
wqz960 已提交
134
            self._conv_2 = EntryFlowBottleneckBlock(
135 136
                256, 728, stride=2, name=name + "_2", relu_first=True)
        elif block_num == 5:
W
wqz960 已提交
137
            self._conv_0 = EntryFlowBottleneckBlock(
138
                64, 128, stride=2, name=name + "_0", relu_first=False)
W
wqz960 已提交
139
            self._conv_1 = EntryFlowBottleneckBlock(
140
                128, 256, stride=1, name=name + "_1", relu_first=True)
W
wqz960 已提交
141
            self._conv_2 = EntryFlowBottleneckBlock(
142
                256, 256, stride=2, name=name + "_2", relu_first=True)
W
wqz960 已提交
143
            self._conv_3 = EntryFlowBottleneckBlock(
144
                256, 728, stride=1, name=name + "_3", relu_first=True)
W
wqz960 已提交
145
            self._conv_4 = EntryFlowBottleneckBlock(
146 147 148
                728, 728, stride=2, name=name + "_4", relu_first=True)
        else:
            sys.exit(-1)
W
WuHaobo 已提交
149

150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166
    def forward(self, inputs):
        x = self._conv1(inputs)
        x = self._conv2(x)

        if self.block_num == 3:
            x = self._conv_0(x)
            x = self._conv_1(x)
            x = self._conv_2(x)
        elif self.block_num == 5:
            x = self._conv_0(x)
            x = self._conv_1(x)
            x = self._conv_2(x)
            x = self._conv_3(x)
            x = self._conv_4(x)
        return x


littletomatodonkey's avatar
littletomatodonkey 已提交
167
class MiddleFlowBottleneckBlock(nn.Layer):
168
    def __init__(self, input_channels, output_channels, name):
W
wqz960 已提交
169
        super(MiddleFlowBottleneckBlock, self).__init__()
170

W
wqz960 已提交
171
        self._conv_0 = SeparableConv(
172 173
            input_channels,
            output_channels,
W
WuHaobo 已提交
174 175
            stride=1,
            name=name + "_branch2a_weights")
W
wqz960 已提交
176
        self._conv_1 = SeparableConv(
177 178
            output_channels,
            output_channels,
W
WuHaobo 已提交
179 180
            stride=1,
            name=name + "_branch2b_weights")
W
wqz960 已提交
181
        self._conv_2 = SeparableConv(
182 183
            output_channels,
            output_channels,
W
WuHaobo 已提交
184 185 186
            stride=1,
            name=name + "_branch2c_weights")

187
    def forward(self, inputs):
littletomatodonkey's avatar
littletomatodonkey 已提交
188
        conv0 = F.relu(inputs)
189
        conv0 = self._conv_0(conv0)
littletomatodonkey's avatar
littletomatodonkey 已提交
190
        conv1 = F.relu(conv0)
191
        conv1 = self._conv_1(conv1)
littletomatodonkey's avatar
littletomatodonkey 已提交
192
        conv2 = F.relu(conv1)
193
        conv2 = self._conv_2(conv2)
194
        return paddle.add(x=inputs, y=conv2)
195 196


littletomatodonkey's avatar
littletomatodonkey 已提交
197
class MiddleFlow(nn.Layer):
198
    def __init__(self, block_num=8):
W
wqz960 已提交
199
        super(MiddleFlow, self).__init__()
200 201

        self.block_num = block_num
W
wqz960 已提交
202
        self._conv_0 = MiddleFlowBottleneckBlock(
203
            728, 728, name="middle_flow_0")
W
wqz960 已提交
204
        self._conv_1 = MiddleFlowBottleneckBlock(
205
            728, 728, name="middle_flow_1")
W
wqz960 已提交
206
        self._conv_2 = MiddleFlowBottleneckBlock(
207
            728, 728, name="middle_flow_2")
W
wqz960 已提交
208
        self._conv_3 = MiddleFlowBottleneckBlock(
209
            728, 728, name="middle_flow_3")
W
wqz960 已提交
210
        self._conv_4 = MiddleFlowBottleneckBlock(
211
            728, 728, name="middle_flow_4")
W
wqz960 已提交
212
        self._conv_5 = MiddleFlowBottleneckBlock(
213
            728, 728, name="middle_flow_5")
W
wqz960 已提交
214
        self._conv_6 = MiddleFlowBottleneckBlock(
215
            728, 728, name="middle_flow_6")
W
wqz960 已提交
216
        self._conv_7 = MiddleFlowBottleneckBlock(
217 218
            728, 728, name="middle_flow_7")
        if block_num == 16:
W
wqz960 已提交
219
            self._conv_8 = MiddleFlowBottleneckBlock(
220
                728, 728, name="middle_flow_8")
W
wqz960 已提交
221
            self._conv_9 = MiddleFlowBottleneckBlock(
222
                728, 728, name="middle_flow_9")
W
wqz960 已提交
223
            self._conv_10 = MiddleFlowBottleneckBlock(
224
                728, 728, name="middle_flow_10")
W
wqz960 已提交
225
            self._conv_11 = MiddleFlowBottleneckBlock(
226
                728, 728, name="middle_flow_11")
W
wqz960 已提交
227
            self._conv_12 = MiddleFlowBottleneckBlock(
228
                728, 728, name="middle_flow_12")
W
wqz960 已提交
229
            self._conv_13 = MiddleFlowBottleneckBlock(
230
                728, 728, name="middle_flow_13")
W
wqz960 已提交
231
            self._conv_14 = MiddleFlowBottleneckBlock(
232
                728, 728, name="middle_flow_14")
W
wqz960 已提交
233
            self._conv_15 = MiddleFlowBottleneckBlock(
234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256
                728, 728, name="middle_flow_15")

    def forward(self, inputs):
        x = self._conv_0(inputs)
        x = self._conv_1(x)
        x = self._conv_2(x)
        x = self._conv_3(x)
        x = self._conv_4(x)
        x = self._conv_5(x)
        x = self._conv_6(x)
        x = self._conv_7(x)
        if self.block_num == 16:
            x = self._conv_8(x)
            x = self._conv_9(x)
            x = self._conv_10(x)
            x = self._conv_11(x)
            x = self._conv_12(x)
            x = self._conv_13(x)
            x = self._conv_14(x)
            x = self._conv_15(x)
        return x


littletomatodonkey's avatar
littletomatodonkey 已提交
257
class ExitFlowBottleneckBlock(nn.Layer):
258 259
    def __init__(self, input_channels, output_channels1, output_channels2,
                 name):
W
wqz960 已提交
260
        super(ExitFlowBottleneckBlock, self).__init__()
261

262
        self._short = Conv2D(
littletomatodonkey's avatar
littletomatodonkey 已提交
263 264 265
            in_channels=input_channels,
            out_channels=output_channels2,
            kernel_size=1,
W
WuHaobo 已提交
266 267
            stride=2,
            padding=0,
littletomatodonkey's avatar
littletomatodonkey 已提交
268
            weight_attr=ParamAttr(name + "_branch1_weights"),
W
WuHaobo 已提交
269
            bias_attr=False)
W
wqz960 已提交
270
        self._conv_1 = SeparableConv(
271 272 273 274
            input_channels,
            output_channels1,
            stride=1,
            name=name + "_branch2a_weights")
W
wqz960 已提交
275
        self._conv_2 = SeparableConv(
276 277 278 279
            output_channels1,
            output_channels2,
            stride=1,
            name=name + "_branch2b_weights")
280
        self._pool = MaxPool2D(kernel_size=3, stride=2, padding=1)
281 282 283

    def forward(self, inputs):
        short = self._short(inputs)
littletomatodonkey's avatar
littletomatodonkey 已提交
284
        conv0 = F.relu(inputs)
285
        conv1 = self._conv_1(conv0)
littletomatodonkey's avatar
littletomatodonkey 已提交
286
        conv2 = F.relu(conv1)
287 288
        conv2 = self._conv_2(conv2)
        pool = self._pool(conv2)
289
        return paddle.add(x=short, y=pool)
W
WuHaobo 已提交
290 291


littletomatodonkey's avatar
littletomatodonkey 已提交
292
class ExitFlow(nn.Layer):
293
    def __init__(self, class_dim):
W
wqz960 已提交
294
        super(ExitFlow, self).__init__()
W
WuHaobo 已提交
295

296
        name = "exit_flow"
W
WuHaobo 已提交
297

W
wqz960 已提交
298
        self._conv_0 = ExitFlowBottleneckBlock(
299
            728, 728, 1024, name=name + "_1")
W
wqz960 已提交
300 301
        self._conv_1 = SeparableConv(1024, 1536, stride=1, name=name + "_2")
        self._conv_2 = SeparableConv(1536, 2048, stride=1, name=name + "_3")
302
        self._pool = AdaptiveAvgPool2D(1)
303 304 305 306
        stdv = 1.0 / math.sqrt(2048 * 1.0)
        self._out = Linear(
            2048,
            class_dim,
littletomatodonkey's avatar
littletomatodonkey 已提交
307 308
            weight_attr=ParamAttr(
                name="fc_weights", initializer=Uniform(-stdv, stdv)),
309 310 311 312 313
            bias_attr=ParamAttr(name="fc_offset"))

    def forward(self, inputs):
        conv0 = self._conv_0(inputs)
        conv1 = self._conv_1(conv0)
littletomatodonkey's avatar
littletomatodonkey 已提交
314
        conv1 = F.relu(conv1)
315
        conv2 = self._conv_2(conv1)
littletomatodonkey's avatar
littletomatodonkey 已提交
316
        conv2 = F.relu(conv2)
317
        pool = self._pool(conv2)
L
littletomatodonkey 已提交
318
        pool = paddle.flatten(pool, start_axis=1, stop_axis=-1)
319 320
        out = self._out(pool)
        return out
W
WuHaobo 已提交
321

322

littletomatodonkey's avatar
littletomatodonkey 已提交
323
class Xception(nn.Layer):
324 325 326 327 328 329 330
    def __init__(self,
                 entry_flow_block_num=3,
                 middle_flow_block_num=8,
                 class_dim=1000):
        super(Xception, self).__init__()
        self.entry_flow_block_num = entry_flow_block_num
        self.middle_flow_block_num = middle_flow_block_num
W
wqz960 已提交
331 332 333
        self._entry_flow = EntryFlow(entry_flow_block_num)
        self._middle_flow = MiddleFlow(middle_flow_block_num)
        self._exit_flow = ExitFlow(class_dim)
334 335 336 337 338 339

    def forward(self, inputs):
        x = self._entry_flow(inputs)
        x = self._middle_flow(x)
        x = self._exit_flow(x)
        return x
W
WuHaobo 已提交
340

C
cuicheng01 已提交
341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356
def _load_pretrained(pretrained, model, model_url, use_ssld=False):
    if pretrained is False:
        pass
    elif pretrained is True:
        load_dygraph_pretrain_from_url(model, model_url, use_ssld=use_ssld)
    elif isinstance(pretrained, str):
        load_dygraph_pretrain(model, pretrained)
    else:
        raise RuntimeError(
            "pretrained type is not available. Please use `string` or `boolean` type."
        )
        

def Xception41(pretrained=False, use_ssld=False, **kwargs):
    model = Xception(entry_flow_block_num=3, middle_flow_block_num=8, **kwargs)
    _load_pretrained(pretrained, model, MODEL_URLS["Xception41"], use_ssld=use_ssld)
W
WuHaobo 已提交
357 358 359
    return model


C
cuicheng01 已提交
360 361 362
def Xception65(pretrained=False, use_ssld=False, **kwargs):
    model = Xception(entry_flow_block_num=3, middle_flow_block_num=16, **kwargs)
    _load_pretrained(pretrained, model, MODEL_URLS["Xception65"], use_ssld=use_ssld)
W
WuHaobo 已提交
363 364 365
    return model


C
cuicheng01 已提交
366 367 368
def Xception71(pretrained=False, use_ssld=False, **kwargs):
    model = Xception(entry_flow_block_num=5, middle_flow_block_num=16, **kwargs)
    _load_pretrained(pretrained, model, MODEL_URLS["Xception71"], use_ssld=use_ssld)
littletomatodonkey's avatar
littletomatodonkey 已提交
369
    return model