未验证 提交 2b8e9b26 编写于 作者: littletomatodonkey's avatar littletomatodonkey 提交者: GitHub

remove name (#4870)

上级 b1693f54
...@@ -25,16 +25,14 @@ __all__ = ["ResNet"] ...@@ -25,16 +25,14 @@ __all__ = ["ResNet"]
class ConvBNLayer(nn.Layer): class ConvBNLayer(nn.Layer):
def __init__( def __init__(self,
self, in_channels,
in_channels, out_channels,
out_channels, kernel_size,
kernel_size, stride=1,
stride=1, groups=1,
groups=1, is_vd_mode=False,
is_vd_mode=False, act=None):
act=None,
name=None, ):
super(ConvBNLayer, self).__init__() super(ConvBNLayer, self).__init__()
self.is_vd_mode = is_vd_mode self.is_vd_mode = is_vd_mode
...@@ -47,19 +45,8 @@ class ConvBNLayer(nn.Layer): ...@@ -47,19 +45,8 @@ class ConvBNLayer(nn.Layer):
stride=stride, stride=stride,
padding=(kernel_size - 1) // 2, padding=(kernel_size - 1) // 2,
groups=groups, groups=groups,
weight_attr=ParamAttr(name=name + "_weights"),
bias_attr=False) bias_attr=False)
if name == "conv1": self._batch_norm = nn.BatchNorm(out_channels, act=act)
bn_name = "bn_" + name
else:
bn_name = "bn" + name[3:]
self._batch_norm = nn.BatchNorm(
out_channels,
act=act,
param_attr=ParamAttr(name=bn_name + '_scale'),
bias_attr=ParamAttr(bn_name + '_offset'),
moving_mean_name=bn_name + '_mean',
moving_variance_name=bn_name + '_variance')
def forward(self, inputs): def forward(self, inputs):
if self.is_vd_mode: if self.is_vd_mode:
...@@ -75,29 +62,25 @@ class BottleneckBlock(nn.Layer): ...@@ -75,29 +62,25 @@ class BottleneckBlock(nn.Layer):
out_channels, out_channels,
stride, stride,
shortcut=True, shortcut=True,
if_first=False, if_first=False):
name=None):
super(BottleneckBlock, self).__init__() super(BottleneckBlock, self).__init__()
self.conv0 = ConvBNLayer( self.conv0 = ConvBNLayer(
in_channels=in_channels, in_channels=in_channels,
out_channels=out_channels, out_channels=out_channels,
kernel_size=1, kernel_size=1,
act='relu', act='relu')
name=name + "_branch2a")
self.conv1 = ConvBNLayer( self.conv1 = ConvBNLayer(
in_channels=out_channels, in_channels=out_channels,
out_channels=out_channels, out_channels=out_channels,
kernel_size=3, kernel_size=3,
stride=stride, stride=stride,
act='relu', act='relu')
name=name + "_branch2b")
self.conv2 = ConvBNLayer( self.conv2 = ConvBNLayer(
in_channels=out_channels, in_channels=out_channels,
out_channels=out_channels * 4, out_channels=out_channels * 4,
kernel_size=1, kernel_size=1,
act=None, act=None)
name=name + "_branch2c")
if not shortcut: if not shortcut:
self.short = ConvBNLayer( self.short = ConvBNLayer(
...@@ -105,8 +88,7 @@ class BottleneckBlock(nn.Layer): ...@@ -105,8 +88,7 @@ class BottleneckBlock(nn.Layer):
out_channels=out_channels * 4, out_channels=out_channels * 4,
kernel_size=1, kernel_size=1,
stride=1, stride=1,
is_vd_mode=False if if_first else True, is_vd_mode=False if if_first else True)
name=name + "_branch1")
self.shortcut = shortcut self.shortcut = shortcut
...@@ -125,13 +107,13 @@ class BottleneckBlock(nn.Layer): ...@@ -125,13 +107,13 @@ class BottleneckBlock(nn.Layer):
class BasicBlock(nn.Layer): class BasicBlock(nn.Layer):
def __init__(self, def __init__(
in_channels, self,
out_channels, in_channels,
stride, out_channels,
shortcut=True, stride,
if_first=False, shortcut=True,
name=None): if_first=False, ):
super(BasicBlock, self).__init__() super(BasicBlock, self).__init__()
self.stride = stride self.stride = stride
self.conv0 = ConvBNLayer( self.conv0 = ConvBNLayer(
...@@ -139,14 +121,12 @@ class BasicBlock(nn.Layer): ...@@ -139,14 +121,12 @@ class BasicBlock(nn.Layer):
out_channels=out_channels, out_channels=out_channels,
kernel_size=3, kernel_size=3,
stride=stride, stride=stride,
act='relu', act='relu')
name=name + "_branch2a")
self.conv1 = ConvBNLayer( self.conv1 = ConvBNLayer(
in_channels=out_channels, in_channels=out_channels,
out_channels=out_channels, out_channels=out_channels,
kernel_size=3, kernel_size=3,
act=None, act=None)
name=name + "_branch2b")
if not shortcut: if not shortcut:
self.short = ConvBNLayer( self.short = ConvBNLayer(
...@@ -154,8 +134,7 @@ class BasicBlock(nn.Layer): ...@@ -154,8 +134,7 @@ class BasicBlock(nn.Layer):
out_channels=out_channels, out_channels=out_channels,
kernel_size=1, kernel_size=1,
stride=1, stride=1,
is_vd_mode=False if if_first else True, is_vd_mode=False if if_first else True)
name=name + "_branch1")
self.shortcut = shortcut self.shortcut = shortcut
...@@ -201,22 +180,19 @@ class ResNet(nn.Layer): ...@@ -201,22 +180,19 @@ class ResNet(nn.Layer):
out_channels=32, out_channels=32,
kernel_size=3, kernel_size=3,
stride=2, stride=2,
act='relu', act='relu')
name="conv1_1")
self.conv1_2 = ConvBNLayer( self.conv1_2 = ConvBNLayer(
in_channels=32, in_channels=32,
out_channels=32, out_channels=32,
kernel_size=3, kernel_size=3,
stride=1, stride=1,
act='relu', act='relu')
name="conv1_2")
self.conv1_3 = ConvBNLayer( self.conv1_3 = ConvBNLayer(
in_channels=32, in_channels=32,
out_channels=64, out_channels=64,
kernel_size=3, kernel_size=3,
stride=1, stride=1,
act='relu', act='relu')
name="conv1_3")
self.pool2d_max = nn.MaxPool2D(kernel_size=3, stride=2, padding=1) self.pool2d_max = nn.MaxPool2D(kernel_size=3, stride=2, padding=1)
self.stages = [] self.stages = []
...@@ -226,13 +202,6 @@ class ResNet(nn.Layer): ...@@ -226,13 +202,6 @@ class ResNet(nn.Layer):
block_list = [] block_list = []
shortcut = False shortcut = False
for i in range(depth[block]): for i in range(depth[block]):
if layers in [101, 152] and block == 2:
if i == 0:
conv_name = "res" + str(block + 2) + "a"
else:
conv_name = "res" + str(block + 2) + "b" + str(i)
else:
conv_name = "res" + str(block + 2) + chr(97 + i)
bottleneck_block = self.add_sublayer( bottleneck_block = self.add_sublayer(
'bb_%d_%d' % (block, i), 'bb_%d_%d' % (block, i),
BottleneckBlock( BottleneckBlock(
...@@ -241,8 +210,7 @@ class ResNet(nn.Layer): ...@@ -241,8 +210,7 @@ class ResNet(nn.Layer):
out_channels=num_filters[block], out_channels=num_filters[block],
stride=2 if i == 0 and block != 0 else 1, stride=2 if i == 0 and block != 0 else 1,
shortcut=shortcut, shortcut=shortcut,
if_first=block == i == 0, if_first=block == i == 0))
name=conv_name))
shortcut = True shortcut = True
block_list.append(bottleneck_block) block_list.append(bottleneck_block)
self.out_channels.append(num_filters[block] * 4) self.out_channels.append(num_filters[block] * 4)
...@@ -252,7 +220,6 @@ class ResNet(nn.Layer): ...@@ -252,7 +220,6 @@ class ResNet(nn.Layer):
block_list = [] block_list = []
shortcut = False shortcut = False
for i in range(depth[block]): for i in range(depth[block]):
conv_name = "res" + str(block + 2) + chr(97 + i)
basic_block = self.add_sublayer( basic_block = self.add_sublayer(
'bb_%d_%d' % (block, i), 'bb_%d_%d' % (block, i),
BasicBlock( BasicBlock(
...@@ -261,8 +228,7 @@ class ResNet(nn.Layer): ...@@ -261,8 +228,7 @@ class ResNet(nn.Layer):
out_channels=num_filters[block], out_channels=num_filters[block],
stride=2 if i == 0 and block != 0 else 1, stride=2 if i == 0 and block != 0 else 1,
shortcut=shortcut, shortcut=shortcut,
if_first=block == i == 0, if_first=block == i == 0))
name=conv_name))
shortcut = True shortcut = True
block_list.append(basic_block) block_list.append(basic_block)
self.out_channels.append(num_filters[block]) self.out_channels.append(num_filters[block])
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册