提交 e0859f74 编写于 作者: W weishengyu

formatting

上级 f9c26589
......@@ -21,7 +21,6 @@ from paddle.nn import Layer, Conv2d, MaxPool2d, AdaptiveAvgPool2d, BatchNorm, Li
from paddle.nn.initializer import MSRA
from paddle.nn.functional import swish
__all__ = [
"ShuffleNetV2_x0_25", "ShuffleNetV2_x0_33", "ShuffleNetV2_x0_5",
"ShuffleNetV2", "ShuffleNetV2_x1_5", "ShuffleNetV2_x2_0",
......@@ -34,7 +33,8 @@ def channel_shuffle(x, groups):
channels_per_group = num_channels // groups
# reshape
x = reshape(x=x, shape=[batch_size, groups, channels_per_group, height, width])
x = reshape(
x=x, shape=[batch_size, groups, channels_per_group, height, width])
# transpose
x = transpose(x=x, perm=[0, 2, 1, 3, 4])
......@@ -54,8 +54,7 @@ class ConvBNLayer(Layer):
padding,
groups=1,
act=None,
name=None,
):
name=None, ):
super(ConvBNLayer, self).__init__()
self._conv = Conv2d(
in_channels=in_channels,
......@@ -64,9 +63,9 @@ class ConvBNLayer(Layer):
stride=stride,
padding=padding,
groups=groups,
weight_attr=ParamAttr(initializer=MSRA(), name=name + "_weights"),
bias_attr=False
)
weight_attr=ParamAttr(
initializer=MSRA(), name=name + "_weights"),
bias_attr=False)
self._batch_norm = BatchNorm(
out_channels,
......@@ -74,8 +73,7 @@ class ConvBNLayer(Layer):
bias_attr=ParamAttr(name=name + "_bn_offset"),
act=act,
moving_mean_name=name + "_bn_mean",
moving_variance_name=name + "_bn_variance"
)
moving_variance_name=name + "_bn_variance")
def forward(self, inputs):
y = self._conv(inputs)
......@@ -84,14 +82,12 @@ class ConvBNLayer(Layer):
class InvertedResidual(Layer):
def __init__(
self,
in_channels,
out_channels,
stride,
act="relu",
name=None
):
def __init__(self,
in_channels,
out_channels,
stride,
act="relu",
name=None):
super(InvertedResidual, self).__init__()
self._conv_pw = ConvBNLayer(
in_channels=in_channels // 2,
......@@ -101,8 +97,7 @@ class InvertedResidual(Layer):
padding=0,
groups=1,
act=act,
name='stage_' + name + '_conv1'
)
name='stage_' + name + '_conv1')
self._conv_dw = ConvBNLayer(
in_channels=out_channels // 2,
out_channels=out_channels // 2,
......@@ -111,8 +106,7 @@ class InvertedResidual(Layer):
padding=1,
groups=out_channels // 2,
act=None,
name='stage_' + name + '_conv2'
)
name='stage_' + name + '_conv2')
self._conv_linear = ConvBNLayer(
in_channels=out_channels // 2,
out_channels=out_channels // 2,
......@@ -121,11 +115,13 @@ class InvertedResidual(Layer):
padding=0,
groups=1,
act=act,
name='stage_' + name + '_conv3'
)
name='stage_' + name + '_conv3')
def forward(self, inputs):
x1, x2 = split(inputs, num_or_sections=[inputs.shape[1] // 2, inputs.shape[1] // 2], axis=1)
x1, x2 = split(
inputs,
num_or_sections=[inputs.shape[1] // 2, inputs.shape[1] // 2],
axis=1)
x2 = self._conv_pw(x2)
x2 = self._conv_dw(x2)
x2 = self._conv_linear(x2)
......@@ -134,14 +130,12 @@ class InvertedResidual(Layer):
class InvertedResidualDS(Layer):
def __init__(
self,
in_channels,
out_channels,
stride,
act="relu",
name=None
):
def __init__(self,
in_channels,
out_channels,
stride,
act="relu",
name=None):
super(InvertedResidualDS, self).__init__()
# branch1
......@@ -153,8 +147,7 @@ class InvertedResidualDS(Layer):
padding=1,
groups=in_channels,
act=None,
name='stage_' + name + '_conv4'
)
name='stage_' + name + '_conv4')
self._conv_linear_1 = ConvBNLayer(
in_channels=in_channels,
out_channels=out_channels // 2,
......@@ -163,8 +156,7 @@ class InvertedResidualDS(Layer):
padding=0,
groups=1,
act=act,
name='stage_' + name + '_conv5'
)
name='stage_' + name + '_conv5')
# branch2
self._conv_pw_2 = ConvBNLayer(
in_channels=in_channels,
......@@ -174,8 +166,7 @@ class InvertedResidualDS(Layer):
padding=0,
groups=1,
act=act,
name='stage_' + name + '_conv1'
)
name='stage_' + name + '_conv1')
self._conv_dw_2 = ConvBNLayer(
in_channels=out_channels // 2,
out_channels=out_channels // 2,
......@@ -184,8 +175,7 @@ class InvertedResidualDS(Layer):
padding=1,
groups=out_channels // 2,
act=None,
name='stage_' + name + '_conv2'
)
name='stage_' + name + '_conv2')
self._conv_linear_2 = ConvBNLayer(
in_channels=out_channels // 2,
out_channels=out_channels // 2,
......@@ -194,8 +184,7 @@ class InvertedResidualDS(Layer):
padding=0,
groups=1,
act=act,
name='stage_' + name + '_conv3'
)
name='stage_' + name + '_conv3')
def forward(self, inputs):
x1 = self._conv_dw_1(inputs)
......@@ -238,13 +227,8 @@ class ShuffleNet(Layer):
stride=2,
padding=1,
act=act,
name='stage1_conv'
)
self._max_pool = MaxPool2d(
kernel_size=3,
stride=2,
padding=1
)
name='stage1_conv')
self._max_pool = MaxPool2d(kernel_size=3, stride=2, padding=1)
# 2. bottleneck sequences
self._block_list = []
......@@ -258,9 +242,7 @@ class ShuffleNet(Layer):
out_channels=stage_out_channels[stage_id + 2],
stride=2,
act=act,
name=str(stage_id + 2) + '_' + str(i + 1)
)
)
name=str(stage_id + 2) + '_' + str(i + 1)))
else:
block = self.add_sublayer(
name=str(stage_id + 2) + '_' + str(i + 1),
......@@ -269,9 +251,7 @@ class ShuffleNet(Layer):
out_channels=stage_out_channels[stage_id + 2],
stride=1,
act=act,
name=str(stage_id + 2) + '_' + str(i + 1)
)
)
name=str(stage_id + 2) + '_' + str(i + 1)))
self._block_list.append(block)
# 3. last_conv
self._last_conv = ConvBNLayer(
......@@ -281,8 +261,7 @@ class ShuffleNet(Layer):
stride=1,
padding=0,
act=act,
name='conv5'
)
name='conv5')
# 4. pool
self._pool2d_avg = AdaptiveAvgPool2d(1)
self._out_c = stage_out_channels[-1]
......@@ -291,8 +270,7 @@ class ShuffleNet(Layer):
stage_out_channels[-1],
class_dim,
weight_attr=ParamAttr(name='fc6_weights'),
bias_attr=ParamAttr(name='fc6_offset')
)
bias_attr=ParamAttr(name='fc6_offset'))
def forward(self, inputs):
y = self._conv1(inputs)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册