提交 b4b95fb7 编写于 作者: C channingss

fix markdown style

上级 71ab0cfc
......@@ -10,5 +10,3 @@
| Normalize | [code](https://github.com/weiliu89/caffe/blob/ssd/src/caffe/layers/normalize_layer.cpp) |
| ROIPooling | [code](https://github.com/rbgirshick/caffe-fast-rcnn/blob/0dcd397b29507b8314e252e850518c5695efbb83/src/caffe/layers/roi_pooling_layer.cpp) |
| Axpy | [code](https://github.com/hujie-frank/SENet/blob/master/src/caffe/layers/axpy_layer.cpp) |
......@@ -7,7 +7,7 @@ function abort(){
trap 'abort' 0
set -e
TRAVIS_BUILD_DIR=${PWD}
cd $TRAVIS_BUILD_DIR
export PATH=/usr/bin:$PATH
pre-commit install
......
因为 它太大了无法显示 source diff 。你可以改为 查看blob
......@@ -135,7 +135,8 @@ class CaffeOpMapper(OpMapper):
if isinstance(params.kernel_size, numbers.Number):
[k_h, k_w] = [params.kernel_size] * 2
elif len(params.kernel_size) > 0:
k_h = params.kernel_h if params.kernel_h > 0 else params.kernel_size[0]
k_h = params.kernel_h if params.kernel_h > 0 else params.kernel_size[
0]
k_w = params.kernel_w if params.kernel_w > 0 else params.kernel_size[
len(params.kernel_size) - 1]
elif params.kernel_h > 0 or params.kernel_w > 0:
......@@ -156,8 +157,8 @@ class CaffeOpMapper(OpMapper):
[p_h, p_w] = [params.pad] * 2
elif len(params.pad) > 0:
p_h = params.pad_h if params.pad_h > 0 else params.pad[0]
p_w = params.pad_w if params.pad_w > 0 else params.pad[len(params.pad) -
1]
p_w = params.pad_w if params.pad_w > 0 else params.pad[
len(params.pad) - 1]
elif params.pad_h > 0 or params.pad_w > 0:
p_h = params.pad_h
p_w = params.pad_w
......@@ -225,12 +226,17 @@ class CaffeOpMapper(OpMapper):
node.layer_type, params)
if data is None:
data = []
print('The parameter of {} (type is {}) is not set. So we set the parameters as 0'.format(
node.layer_name, node.layer_type))
print(
'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
.format(node.layer_name, node.layer_type))
input_c = node.input_shape[0][1]
output_c = channel
data.append(np.zeros([output_c, input_c, kernel[0], kernel[1]]).astype('float32'))
data.append(np.zeros([output_c,])).astype('float32')
data.append(
np.zeros([output_c, input_c, kernel[0],
kernel[1]]).astype('float32'))
data.append(np.zeros([
output_c,
])).astype('float32')
else:
data = self.adjust_parameters(node)
self.weights[node.layer_name + '_weights'] = data[0]
......@@ -272,12 +278,17 @@ class CaffeOpMapper(OpMapper):
node.layer_type, params)
if data is None:
data = []
print('The parameter of {} (type is {}) is not set. So we set the parameters as 0'.format(
node.layer_name, node.layer_type))
print(
'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
.format(node.layer_name, node.layer_type))
input_c = node.input_shape[0][1]
output_c = channel
data.append(np.zeros([output_c, input_c, kernel[0], kernel[1]]).astype('float32'))
data.append(np.zeros([output_c,]).astype('float32'))
data.append(
np.zeros([output_c, input_c, kernel[0],
kernel[1]]).astype('float32'))
data.append(np.zeros([
output_c,
]).astype('float32'))
else:
data = self.adjust_parameters(node)
self.weights[node.layer_name + '_weights'] = data[0]
......@@ -369,13 +380,17 @@ class CaffeOpMapper(OpMapper):
data = node.data
params = node.layer.inner_product_param
if data is None:
print('The parameter of {} (type is {}) is not set. So we set the parameters as 0.'.format(
node.layer_name, node.layer_type))
print(
'The parameter of {} (type is {}) is not set. So we set the parameters as 0.'
.format(node.layer_name, node.layer_type))
input_c = node.input_shape[0][1]
output_c = params.num_output
data = []
data.append(np.zeros([input_c, output_c]).astype('float32').astype('float32'))
data.append(np.zeros([output_c]).astype('float32').astype('float32'))
data.append(
np.zeros([input_c,
output_c]).astype('float32').astype('float32'))
data.append(
np.zeros([output_c]).astype('float32').astype('float32'))
else:
data = self.adjust_parameters(node)
# Reshape the parameters to Paddle's ordering
......@@ -616,7 +631,8 @@ class CaffeOpMapper(OpMapper):
param_attr=attr)
def BatchNorm(self, node):
assert len(node.inputs) == 1, 'The count of BatchNorm node\'s input is not 1.'
assert len(
node.inputs) == 1, 'The count of BatchNorm node\'s input is not 1.'
input = self.graph.get_bottom_node(node, idx=0, copy=True)
params = node.layer.batch_norm_param
if hasattr(params, 'eps'):
......@@ -624,11 +640,16 @@ class CaffeOpMapper(OpMapper):
else:
eps = 1e-5
if node.data is None or len(node.data) != 3:
print('The parameter of {} (type is {}) is not set. So we set the parameters as 0'.format(
node.layer_name, node.layer_type))
print(
'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
.format(node.layer_name, node.layer_type))
input_c = node.input_shape[0][1]
mean = np.zeros([input_c,]).astype('float32')
variance = np.zeros([input_c,]).astype('float32')
mean = np.zeros([
input_c,
]).astype('float32')
variance = np.zeros([
input_c,
]).astype('float32')
scale = 0
else:
node.data = [np.squeeze(i) for i in node.data]
......@@ -655,11 +676,16 @@ class CaffeOpMapper(OpMapper):
def Scale(self, node):
if node.data is None:
print('The parameter of {} (type is {}) is not set. So we set the parameters as 0'.format(
node.layer_name, node.layer_type))
print(
'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
.format(node.layer_name, node.layer_type))
input_c = node.input_shape[0][1]
self.weights[node.layer_name + '_scale'] = np.zeros([input_c,]).astype('float32')
self.weights[node.layer_name + '_offset'] = np.zeros([input_c,]).astype('float32')
self.weights[node.layer_name + '_scale'] = np.zeros([
input_c,
]).astype('float32')
self.weights[node.layer_name + '_offset'] = np.zeros([
input_c,
]).astype('float32')
else:
self.weights[node.layer_name + '_scale'] = np.squeeze(node.data[0])
self.weights[node.layer_name + '_offset'] = np.squeeze(node.data[1])
......
......@@ -43,7 +43,8 @@ def get_kernel_parameters(params):
[p_h, p_w] = [params.pad] * 2
elif len(params.pad) > 0:
p_h = params.pad_h if params.pad_h > 0 else params.pad[0]
p_w = params.pad_w if params.pad_w > 0 else params.pad[len(params.pad) - 1]
p_w = params.pad_w if params.pad_w > 0 else params.pad[len(params.pad) -
1]
elif params.pad_h > 0 or params.pad_w > 0:
p_h = params.pad_h
p_w = params.pad_w
......
......@@ -65,4 +65,3 @@
| mNASNet | [pytorch(personal practice)](https://github.com/rwightman/gen-efficientnet-pytorch) |9|
| EfficientNet | [pytorch(personal practice)](https://github.com/rwightman/gen-efficientnet-pytorch) |9|
| SqueezeNet | [onnx official](https://s3.amazonaws.com/download.onnx/models/opset_9/squeezenet.tar.gz) |9|
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册