提交 b8c5848d 编写于 作者: C ceci3

add resnet

上级 72c800e9
...@@ -20,7 +20,7 @@ def conv_bn_layer(input, ...@@ -20,7 +20,7 @@ def conv_bn_layer(input,
filter_size, filter_size,
num_filters, num_filters,
stride, stride,
padding, padding='SAME',
num_groups=1, num_groups=1,
act=None, act=None,
name=None, name=None,
...@@ -51,15 +51,10 @@ def conv_bn_layer(input, ...@@ -51,15 +51,10 @@ def conv_bn_layer(input,
param_attr=ParamAttr(name=name + '_weights'), param_attr=ParamAttr(name=name + '_weights'),
bias_attr=False) bias_attr=False)
bn_name = name + '_bn' bn_name = name + '_bn'
bn = fluid.layers.batch_norm( return fluid.layers.batch_norm(
input=conv, input=conv,
act = act,
param_attr=ParamAttr(name=bn_name + '_scale'), param_attr=ParamAttr(name=bn_name + '_scale'),
bias_attr=ParamAttr(name=bn_name + '_offset'), bias_attr=ParamAttr(name=bn_name + '_offset'),
moving_mean_name=bn_name + '_mean', moving_mean_name=bn_name + '_mean',
moving_variance_name=bn_name + '_variance') moving_variance_name=bn_name + '_variance')
if act == 'relu6':
return fluid.layers.relu6(bn)
elif act == 'sigmoid':
return fluid.layers.sigmoid(bn)
else:
return bn
...@@ -28,35 +28,88 @@ __all__ = ["ResNetSpace"] ...@@ -28,35 +28,88 @@ __all__ = ["ResNetSpace"]
@SEARCHSPACE.register @SEARCHSPACE.register
class ResNetSpace(SearchSpaceBase): class ResNetSpace(SearchSpaceBase):
def __init__(self, def __init__(self, input_size, output_size, block_num, extract_feature=False, class_dim=1000):
input_size,
output_size,
block_num,
scale=1.0,
class_dim=1000):
super(ResNetSpace, self).__init__(input_size, output_size, block_num) super(ResNetSpace, self).__init__(input_size, output_size, block_num)
pass self.filter_num1 = np.array([48,64,96,128,160,192,224]) #7
self.filter_num2 = np.array([64,96,128,160,192,256,320]) #7
self.filter_num3 = np.array([128,160,192,256,320,384]) #6
self.filter_num4 = np.array([192,256,384,512,640]) #5
self.repeat1 = [2,3,4,5,6]#5
self.repeat2 = [2,3,4,5,6,7]#6
self.repeat3 = [2,3,4,5,6,7,8,10,12,14,16,20,24]#13
self.repeat4 = [2,3,4,5,6,7]#6
self.class_dim = class_dim
self.extract_feature = extract_feature
def init_tokens(self): def init_tokens(self):
return [0, 0, 0, 0, 0, 0] init_token_base = [0,0,0,0,0,0,0,0]
self.token_len = self.block_num * 2
return init_token_base[:self.token_len]
def range_table(self): def range_table(self):
return [2, 2, 2, 2, 2, 2] range_table_base = [3,3,3,3,3,3,3,3]
return range_table_base[:self.token_len]
def token2arch(self,tokens=None):
assert self.block_num < 5, 'block number must less than 5, but receive block number is {}'.format(self.block_num)
def token2arch(self, tokens=None):
if tokens is None: if tokens is None:
self.init_tokens() tokens = self.init_tokens()
def net_arch(input): def net_arch(input):
input = conv_bn_layer( depth = []
input, num_filters = []
num_filters=32, if self.block_num <= 1:
filter_size=3, filter1 = self.filter_num1[tokens[0]]
stride=2, repeat1 = self.repeat1[tokens[1]]
padding='SAME', depth.append(filter1)
act='sigmoid', num_filters.append(repeat1)
name='resnet_conv1_1') if self.block_num <= 2:
filter2 = self.filter_num2[tokens[2]]
repeat2 = self.repeat2[tokens[3]]
depth.append(filter2)
num_filters.append(repeat2)
if self.block_num <= 3:
filter3 = self.filter_num3[tokens[4]]
repeat3 = self.repeat3[tokens[5]]
depth.append(filter3)
num_filters.append(repeat3)
if self.block_num <= 4:
filter4 = self.filter_num4[tokens[6]]
repeat4 = self.repeat4[tokens[7]]
depth.append(filter4)
num_filters.append(repeat4)
return input conv = conv_bn_layer(input=input, filter_size=5, num_filters=filter1, stride=2, act='relu')
for block in range(len(depth)):
for i in range(depth[block]):
conv = self._basicneck_block(input=conv, num_filters=num_filters[block], stride=2 if i == 0 and block != 0 else 1)
if self.output_size == 1:
conv = fluid.layers.fc(
input=conv,
size=self.class_dim,
act=None,
param_attr=fluid.param_attr.ParamAttr(
initializer=fluid.initializer.NormalInitializer(0.0,0.01)),
bias_attr=fluid.param_attr.ParamAttr(
initializer=fluid.initializer.ConstantInitializer()))
return conv
return net_arch return net_arch
def _shortcut(self, input, ch_out, stride):
ch_in = input.shape[1]
if ch_in != ch_out or stride != 1:
return conv_bn_layer(input=input, filter_size=1, num_filters=ch_out, stride=stride)
else:
return input
def _basicneck_block(self, input, num_filters, stride):
conv0 = conv_bn_layer(input=input, filter_size=3, num_filters=num_filters, stride=stride, act='relu')
conv1 = conv_bn_layer(input=conv0, filter_size=3, num_filters=num_filters, stride=1, act=None)
short = self._shortcut(input, num_filters, stride)
return fluid.layers.elementwise_add(x=short, y=conv1, act='relu')
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册