Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
X2Paddle
提交
9bdc02ef
X
X2Paddle
项目概览
PaddlePaddle
/
X2Paddle
大约 1 年 前同步成功
通知
328
Star
698
Fork
167
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
26
列表
看板
标记
里程碑
合并请求
4
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
X
X2Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
26
Issue
26
列表
看板
标记
里程碑
合并请求
4
合并请求
4
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
9bdc02ef
编写于
7月 24, 2020
作者:
driftcloudy
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
caffe2paddle, add relu6/upsample layer,support LeakReLU,repair axpy/dropout
上级
842108b5
变更
6
显示空白变更内容
内联
并排
Showing
6 changed file
with
159 addition
and
3 deletion
+159
-3
x2paddle/decoder/caffe_decoder.py
x2paddle/decoder/caffe_decoder.py
+43
-0
x2paddle/op_mapper/caffe_custom_layer/__init__.py
x2paddle/op_mapper/caffe_custom_layer/__init__.py
+2
-0
x2paddle/op_mapper/caffe_custom_layer/axpy.py
x2paddle/op_mapper/caffe_custom_layer/axpy.py
+2
-2
x2paddle/op_mapper/caffe_custom_layer/relu6.py
x2paddle/op_mapper/caffe_custom_layer/relu6.py
+24
-0
x2paddle/op_mapper/caffe_custom_layer/upsample.py
x2paddle/op_mapper/caffe_custom_layer/upsample.py
+64
-0
x2paddle/op_mapper/caffe_op_mapper.py
x2paddle/op_mapper/caffe_op_mapper.py
+24
-1
未找到文件。
x2paddle/decoder/caffe_decoder.py
浏览文件 @
9bdc02ef
...
...
@@ -88,6 +88,49 @@ class CaffeGraph(Graph):
# filter them out here.
if
(
not
exclude
)
and
(
phase
==
'test'
):
exclude
=
(
type_str
==
'Dropout'
)
'''
如果要去除Dropout Layer的话,原先这里写的不对,因为还得修正下一层Layer的bottom指向
例如:
layer {
name: "pool_8x8_s1"
type: "Pooling"
bottom: "inception_c2_concat"
top: "pool_8x8_s1"
pooling_param {
pool: AVE
global_pooling: true
}
}
layer {
name: "pool_8x8_s1_drop"
type: "Dropout"
bottom: "pool_8x8_s1"
top: "pool_8x8_s1_drop"
dropout_param {
dropout_ratio: 0.2
}
}
layer {
name: "classifier"
type: "InnerProduct"
bottom: "pool_8x8_s1_drop"
}
这种prototxt形式下,直接去除pool_8x8_s1_drop不行
会导致dropout的下一层找不到正确的bottom而报错
需要将下一层里的bottom指向dropout的上一层
'''
if
layer
.
type
==
'Dropout'
:
drop_layer_top
=
layer
.
top
[
0
]
drop_layer_bottom
=
layer
.
bottom
[
0
]
if
drop_layer_top
!=
drop_layer_bottom
:
for
next_layer
in
layers
:
for
next_layer_bottom_idx
,
next_layer_bottom
in
enumerate
(
next_layer
.
bottom
):
if
drop_layer_top
==
next_layer_bottom
:
next_layer
.
bottom
.
remove
(
drop_layer_top
)
next_layer
.
bottom
.
insert
(
next_layer_bottom_idx
,
drop_layer_bottom
)
if
not
exclude
:
filtered_layers
.
append
(
layer
)
# Guard against dupes.
...
...
x2paddle/op_mapper/caffe_custom_layer/__init__.py
浏览文件 @
9bdc02ef
...
...
@@ -10,6 +10,8 @@ from . import select
from
.
import
shufflechannel
from
.
import
convolutiondepthwise
from
.
import
axpy
from
.
import
upsample
from
.
import
relu6
#custom layer import ends
custom_layers
=
get_registered_layers
()
...
...
x2paddle/op_mapper/caffe_custom_layer/axpy.py
浏览文件 @
9bdc02ef
...
...
@@ -2,7 +2,7 @@ from .register import register
from
x2paddle.core.util
import
*
def
axpy_shape
(
input_shape
):
def
axpy_shape
(
input_shape
s
):
assert
len
(
input_shapes
)
==
3
,
"not valid input shape for axpy layer"
assert
len
(
input_shapes
[
0
])
==
len
(
input_shapes
[
1
]),
'should have same dims'
output_shape
=
input_shapes
[
1
]
...
...
@@ -18,7 +18,7 @@ def axpy_layer(inputs, input_shape=None, name=None):
y
=
inputs
[
2
]
out
=
fluid
.
layers
.
elementwise_mul
(
x
,
alpha
,
axis
=
0
)
out
=
fluid
.
layers
.
elementwise_add
(
out
,
y
,
name
=
name
)
print
(
out
)
return
out
def
axpy_weights
(
name
,
data
=
None
):
...
...
x2paddle/op_mapper/caffe_custom_layer/relu6.py
0 → 100644
浏览文件 @
9bdc02ef
from
.register
import
register
from
x2paddle.core.util
import
*
def
relu6_shape
(
input_shape
):
return
input_shape
def
relu6_layer
(
inputs
,
input_shape
=
None
,
name
=
None
):
input
=
inputs
[
0
]
out
=
fluid
.
layers
.
relu6
(
x
=
input
)
return
out
def
relu6_weights
(
name
,
data
=
None
):
weights_name
=
[]
return
weights_name
register
(
kind
=
'ReLU6'
,
shape
=
relu6_shape
,
layer
=
relu6_layer
,
weights
=
relu6_weights
)
x2paddle/op_mapper/caffe_custom_layer/upsample.py
0 → 100644
浏览文件 @
9bdc02ef
# -*- coding: utf-8 -*-
################################################################################
#
# Copyright (c) 2020 Baidu.com, Inc. All Rights Reserved
#
################################################################################
"""
Author: Drift
Email: wutuobang@baidu.com
Date: 2020/04/22 18:45
"""
from
.register
import
register
from
x2paddle.core.util
import
*
def
upsample_shape
(
input_shapes
,
scale
):
"""
:param input_shapes:
:param scale:
:return:
"""
assert
len
(
input_shapes
)
==
1
,
"not valid input shape for upsample layer"
assert
type
(
scale
)
is
int
input_shape
=
input_shapes
[
0
]
new_h
=
scale
*
input_shape
[
2
]
new_w
=
scale
*
input_shape
[
3
]
output_shape
=
[
input_shape
[
0
],
input_shape
[
1
],
new_h
,
new_w
]
return
[
output_shape
]
def
upsample_layer
(
inputs
,
scale
,
input_shape
=
None
,
name
=
None
):
"""
:param inputs:
:param scale:
:param input_shape:
:param name:
:return:
"""
x
=
inputs
[
0
]
out
=
fluid
.
layers
.
resize_nearest
(
x
,
align_corners
=
False
,
scale
=
scale
,
name
=
name
)
return
out
def
upsample_weights
(
name
,
data
=
None
):
"""
:param name:
:param data:
:return:
"""
weights_name
=
[]
return
weights_name
register
(
kind
=
'Upsample'
,
shape
=
upsample_shape
,
layer
=
upsample_layer
,
weights
=
upsample_weights
)
x2paddle/op_mapper/caffe_op_mapper.py
浏览文件 @
9bdc02ef
...
...
@@ -23,7 +23,6 @@ from x2paddle.op_mapper.caffe_custom_layer import *
class
CaffeOpMapper
(
OpMapper
):
directly_map_ops
=
{
'ReLU'
:
'relu'
,
'AbsVal'
:
'abs'
,
'Sigmoid'
:
'sigmoid'
,
'TanH'
:
'tanh'
,
...
...
@@ -435,6 +434,30 @@ class CaffeOpMapper(OpMapper):
node
.
fluid_code
.
add_layer
(
"concat"
,
inputs
=
inputs
,
output
=
node
,
param_attr
=
attr
)
def
ReLU
(
self
,
node
):
"""
:param node:
:return:
"""
assert
len
(
node
.
inputs
)
==
1
,
'The count of ReLU node
\'
s input is not 1.'
input
=
self
.
graph
.
get_bottom_node
(
node
,
idx
=
0
,
copy
=
True
)
# 如果存在negative_slope,则相当于 LeakyReLU
params
=
node
.
layer
.
relu_param
if
params
.
HasField
(
'negative_slope'
)
and
params
.
negative_slope
!=
0
:
negative_slope
=
float
(
params
.
negative_slope
)
attr
=
{
'alpha'
:
negative_slope
}
node
.
fluid_code
.
add_layer
(
'leaky_relu'
,
inputs
=
input
,
output
=
node
,
param_attr
=
attr
)
else
:
node
.
fluid_code
.
add_layer
(
'relu'
,
inputs
=
input
,
output
=
node
)
def
PReLU
(
self
,
node
):
assert
len
(
node
.
inputs
)
==
1
,
'The count of PReLU node
\'
s input is not 1.'
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录