提交 b4b95fb7 编写于 作者: C channingss

fix markdown style

上级 71ab0cfc
...@@ -10,5 +10,3 @@ ...@@ -10,5 +10,3 @@
| Normalize | [code](https://github.com/weiliu89/caffe/blob/ssd/src/caffe/layers/normalize_layer.cpp) | | Normalize | [code](https://github.com/weiliu89/caffe/blob/ssd/src/caffe/layers/normalize_layer.cpp) |
| ROIPooling | [code](https://github.com/rbgirshick/caffe-fast-rcnn/blob/0dcd397b29507b8314e252e850518c5695efbb83/src/caffe/layers/roi_pooling_layer.cpp) | | ROIPooling | [code](https://github.com/rbgirshick/caffe-fast-rcnn/blob/0dcd397b29507b8314e252e850518c5695efbb83/src/caffe/layers/roi_pooling_layer.cpp) |
| Axpy | [code](https://github.com/hujie-frank/SENet/blob/master/src/caffe/layers/axpy_layer.cpp) | | Axpy | [code](https://github.com/hujie-frank/SENet/blob/master/src/caffe/layers/axpy_layer.cpp) |
...@@ -7,7 +7,7 @@ function abort(){ ...@@ -7,7 +7,7 @@ function abort(){
trap 'abort' 0 trap 'abort' 0
set -e set -e
TRAVIS_BUILD_DIR=${PWD}
cd $TRAVIS_BUILD_DIR cd $TRAVIS_BUILD_DIR
export PATH=/usr/bin:$PATH export PATH=/usr/bin:$PATH
pre-commit install pre-commit install
......
此差异已折叠。
...@@ -135,7 +135,8 @@ class CaffeOpMapper(OpMapper): ...@@ -135,7 +135,8 @@ class CaffeOpMapper(OpMapper):
if isinstance(params.kernel_size, numbers.Number): if isinstance(params.kernel_size, numbers.Number):
[k_h, k_w] = [params.kernel_size] * 2 [k_h, k_w] = [params.kernel_size] * 2
elif len(params.kernel_size) > 0: elif len(params.kernel_size) > 0:
k_h = params.kernel_h if params.kernel_h > 0 else params.kernel_size[0] k_h = params.kernel_h if params.kernel_h > 0 else params.kernel_size[
0]
k_w = params.kernel_w if params.kernel_w > 0 else params.kernel_size[ k_w = params.kernel_w if params.kernel_w > 0 else params.kernel_size[
len(params.kernel_size) - 1] len(params.kernel_size) - 1]
elif params.kernel_h > 0 or params.kernel_w > 0: elif params.kernel_h > 0 or params.kernel_w > 0:
...@@ -156,8 +157,8 @@ class CaffeOpMapper(OpMapper): ...@@ -156,8 +157,8 @@ class CaffeOpMapper(OpMapper):
[p_h, p_w] = [params.pad] * 2 [p_h, p_w] = [params.pad] * 2
elif len(params.pad) > 0: elif len(params.pad) > 0:
p_h = params.pad_h if params.pad_h > 0 else params.pad[0] p_h = params.pad_h if params.pad_h > 0 else params.pad[0]
p_w = params.pad_w if params.pad_w > 0 else params.pad[len(params.pad) - p_w = params.pad_w if params.pad_w > 0 else params.pad[
1] len(params.pad) - 1]
elif params.pad_h > 0 or params.pad_w > 0: elif params.pad_h > 0 or params.pad_w > 0:
p_h = params.pad_h p_h = params.pad_h
p_w = params.pad_w p_w = params.pad_w
...@@ -225,12 +226,17 @@ class CaffeOpMapper(OpMapper): ...@@ -225,12 +226,17 @@ class CaffeOpMapper(OpMapper):
node.layer_type, params) node.layer_type, params)
if data is None: if data is None:
data = [] data = []
print('The parameter of {} (type is {}) is not set. So we set the parameters as 0'.format( print(
node.layer_name, node.layer_type)) 'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
.format(node.layer_name, node.layer_type))
input_c = node.input_shape[0][1] input_c = node.input_shape[0][1]
output_c = channel output_c = channel
data.append(np.zeros([output_c, input_c, kernel[0], kernel[1]]).astype('float32')) data.append(
data.append(np.zeros([output_c,])).astype('float32') np.zeros([output_c, input_c, kernel[0],
kernel[1]]).astype('float32'))
data.append(np.zeros([
output_c,
])).astype('float32')
else: else:
data = self.adjust_parameters(node) data = self.adjust_parameters(node)
self.weights[node.layer_name + '_weights'] = data[0] self.weights[node.layer_name + '_weights'] = data[0]
...@@ -272,12 +278,17 @@ class CaffeOpMapper(OpMapper): ...@@ -272,12 +278,17 @@ class CaffeOpMapper(OpMapper):
node.layer_type, params) node.layer_type, params)
if data is None: if data is None:
data = [] data = []
print('The parameter of {} (type is {}) is not set. So we set the parameters as 0'.format( print(
node.layer_name, node.layer_type)) 'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
.format(node.layer_name, node.layer_type))
input_c = node.input_shape[0][1] input_c = node.input_shape[0][1]
output_c = channel output_c = channel
data.append(np.zeros([output_c, input_c, kernel[0], kernel[1]]).astype('float32')) data.append(
data.append(np.zeros([output_c,]).astype('float32')) np.zeros([output_c, input_c, kernel[0],
kernel[1]]).astype('float32'))
data.append(np.zeros([
output_c,
]).astype('float32'))
else: else:
data = self.adjust_parameters(node) data = self.adjust_parameters(node)
self.weights[node.layer_name + '_weights'] = data[0] self.weights[node.layer_name + '_weights'] = data[0]
...@@ -369,13 +380,17 @@ class CaffeOpMapper(OpMapper): ...@@ -369,13 +380,17 @@ class CaffeOpMapper(OpMapper):
data = node.data data = node.data
params = node.layer.inner_product_param params = node.layer.inner_product_param
if data is None: if data is None:
print('The parameter of {} (type is {}) is not set. So we set the parameters as 0.'.format( print(
node.layer_name, node.layer_type)) 'The parameter of {} (type is {}) is not set. So we set the parameters as 0.'
.format(node.layer_name, node.layer_type))
input_c = node.input_shape[0][1] input_c = node.input_shape[0][1]
output_c = params.num_output output_c = params.num_output
data = [] data = []
data.append(np.zeros([input_c, output_c]).astype('float32').astype('float32')) data.append(
data.append(np.zeros([output_c]).astype('float32').astype('float32')) np.zeros([input_c,
output_c]).astype('float32').astype('float32'))
data.append(
np.zeros([output_c]).astype('float32').astype('float32'))
else: else:
data = self.adjust_parameters(node) data = self.adjust_parameters(node)
# Reshape the parameters to Paddle's ordering # Reshape the parameters to Paddle's ordering
...@@ -616,7 +631,8 @@ class CaffeOpMapper(OpMapper): ...@@ -616,7 +631,8 @@ class CaffeOpMapper(OpMapper):
param_attr=attr) param_attr=attr)
def BatchNorm(self, node): def BatchNorm(self, node):
assert len(node.inputs) == 1, 'The count of BatchNorm node\'s input is not 1.' assert len(
node.inputs) == 1, 'The count of BatchNorm node\'s input is not 1.'
input = self.graph.get_bottom_node(node, idx=0, copy=True) input = self.graph.get_bottom_node(node, idx=0, copy=True)
params = node.layer.batch_norm_param params = node.layer.batch_norm_param
if hasattr(params, 'eps'): if hasattr(params, 'eps'):
...@@ -624,11 +640,16 @@ class CaffeOpMapper(OpMapper): ...@@ -624,11 +640,16 @@ class CaffeOpMapper(OpMapper):
else: else:
eps = 1e-5 eps = 1e-5
if node.data is None or len(node.data) != 3: if node.data is None or len(node.data) != 3:
print('The parameter of {} (type is {}) is not set. So we set the parameters as 0'.format( print(
node.layer_name, node.layer_type)) 'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
.format(node.layer_name, node.layer_type))
input_c = node.input_shape[0][1] input_c = node.input_shape[0][1]
mean = np.zeros([input_c,]).astype('float32') mean = np.zeros([
variance = np.zeros([input_c,]).astype('float32') input_c,
]).astype('float32')
variance = np.zeros([
input_c,
]).astype('float32')
scale = 0 scale = 0
else: else:
node.data = [np.squeeze(i) for i in node.data] node.data = [np.squeeze(i) for i in node.data]
...@@ -655,11 +676,16 @@ class CaffeOpMapper(OpMapper): ...@@ -655,11 +676,16 @@ class CaffeOpMapper(OpMapper):
def Scale(self, node): def Scale(self, node):
if node.data is None: if node.data is None:
print('The parameter of {} (type is {}) is not set. So we set the parameters as 0'.format( print(
node.layer_name, node.layer_type)) 'The parameter of {} (type is {}) is not set. So we set the parameters as 0'
.format(node.layer_name, node.layer_type))
input_c = node.input_shape[0][1] input_c = node.input_shape[0][1]
self.weights[node.layer_name + '_scale'] = np.zeros([input_c,]).astype('float32') self.weights[node.layer_name + '_scale'] = np.zeros([
self.weights[node.layer_name + '_offset'] = np.zeros([input_c,]).astype('float32') input_c,
]).astype('float32')
self.weights[node.layer_name + '_offset'] = np.zeros([
input_c,
]).astype('float32')
else: else:
self.weights[node.layer_name + '_scale'] = np.squeeze(node.data[0]) self.weights[node.layer_name + '_scale'] = np.squeeze(node.data[0])
self.weights[node.layer_name + '_offset'] = np.squeeze(node.data[1]) self.weights[node.layer_name + '_offset'] = np.squeeze(node.data[1])
......
...@@ -43,7 +43,8 @@ def get_kernel_parameters(params): ...@@ -43,7 +43,8 @@ def get_kernel_parameters(params):
[p_h, p_w] = [params.pad] * 2 [p_h, p_w] = [params.pad] * 2
elif len(params.pad) > 0: elif len(params.pad) > 0:
p_h = params.pad_h if params.pad_h > 0 else params.pad[0] p_h = params.pad_h if params.pad_h > 0 else params.pad[0]
p_w = params.pad_w if params.pad_w > 0 else params.pad[len(params.pad) - 1] p_w = params.pad_w if params.pad_w > 0 else params.pad[len(params.pad) -
1]
elif params.pad_h > 0 or params.pad_w > 0: elif params.pad_h > 0 or params.pad_w > 0:
p_h = params.pad_h p_h = params.pad_h
p_w = params.pad_w p_w = params.pad_w
......
...@@ -65,4 +65,3 @@ ...@@ -65,4 +65,3 @@
| mNASNet | [pytorch(personal practice)](https://github.com/rwightman/gen-efficientnet-pytorch) |9| | mNASNet | [pytorch(personal practice)](https://github.com/rwightman/gen-efficientnet-pytorch) |9|
| EfficientNet | [pytorch(personal practice)](https://github.com/rwightman/gen-efficientnet-pytorch) |9| | EfficientNet | [pytorch(personal practice)](https://github.com/rwightman/gen-efficientnet-pytorch) |9|
| SqueezeNet | [onnx official](https://s3.amazonaws.com/download.onnx/models/opset_9/squeezenet.tar.gz) |9| | SqueezeNet | [onnx official](https://s3.amazonaws.com/download.onnx/models/opset_9/squeezenet.tar.gz) |9|
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册