Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
X2Paddle
提交
ddb9aea6
X
X2Paddle
项目概览
PaddlePaddle
/
X2Paddle
大约 1 年 前同步成功
通知
328
Star
698
Fork
167
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
26
列表
看板
标记
里程碑
合并请求
4
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
X
X2Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
26
Issue
26
列表
看板
标记
里程碑
合并请求
4
合并请求
4
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
ddb9aea6
编写于
1月 29, 2021
作者:
S
SunAhong1993
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
fix the caffe
上级
3d35ebd0
变更
4
显示空白变更内容
内联
并排
Showing
4 changed file
with
52 addition
and
31 deletion
+52
-31
x2paddle/op_mapper/dygraph/caffe2paddle/caffe_custom_layer/normalize.py
...pper/dygraph/caffe2paddle/caffe_custom_layer/normalize.py
+13
-9
x2paddle/op_mapper/dygraph/caffe2paddle/caffe_op_mapper.py
x2paddle/op_mapper/dygraph/caffe2paddle/caffe_op_mapper.py
+17
-9
x2paddle/op_mapper/static/caffe2paddle/caffe_custom_layer/normalize.py
...apper/static/caffe2paddle/caffe_custom_layer/normalize.py
+14
-5
x2paddle/op_mapper/static/caffe2paddle/caffe_op_mapper.py
x2paddle/op_mapper/static/caffe2paddle/caffe_op_mapper.py
+8
-8
未找到文件。
x2paddle/op_mapper/dygraph/caffe2paddle/caffe_custom_layer/normalize.py
浏览文件 @
ddb9aea6
...
...
@@ -16,17 +16,21 @@ import paddle
import
paddle.fluid
as
fluid
class
Normalize
(
object
):
def
__init__
(
self
,
axis
,
param_name
,
param_shape
):
def
__init__
(
self
,
axis
):
self
.
axis
=
axis
self
.
param_name
=
param_name
self
.
param_shape
=
param_shape
def
__call__
(
self
,
x
):
l2
=
fluid
.
layers
.
prior_box
(
x
=
x
,
p
=
2
,
axis
=
1
)
attr
=
fluid
.
ParamAttr
(
name
=
self
.
param_name
,
trainable
=
False
)
param
=
paddle
.
nn
.
Layer
.
create_parameter
(
shape
=
self
.
param_shape
,
attr
=
atr
)
out
=
paddle
.
multiply
(
x
=
l2
,
y
=
param
,
axis
=
self
.
axis
)
def
__call__
(
self
,
x
,
param
):
l2_norm
=
fluid
.
layers
.
l2_normalize
(
x
=
x
,
axis
=
1
)
param
=
paddle
.
reshape
(
param
,
[
param
.
shape
[
-
1
]])
perm
=
list
(
range
(
len
(
l2_norm
.
shape
)))
perm
.
pop
(
self
.
axis
)
perm
=
perm
+
[
self
.
axis
]
l2_norm
=
paddle
.
transpose
(
l2_norm
,
perm
=
perm
)
out
=
paddle
.
multiply
(
x
=
l2_norm
,
y
=
param
)
perm
=
list
(
range
(
len
(
l2_norm
.
shape
)))
dim
=
perm
.
pop
(
-
1
)
perm
.
insert
(
self
.
axis
,
dim
)
out
=
paddle
.
transpose
(
out
,
perm
=
perm
)
return
out
\ No newline at end of file
x2paddle/op_mapper/dygraph/caffe2paddle/caffe_op_mapper.py
浏览文件 @
ddb9aea6
...
...
@@ -782,7 +782,7 @@ class CaffeOpMapper(OpMapper):
out_max_val
=
params
.
out_max_val
if
hasattr
(
params
,
out_max_val
)
else
False
top_k
=
params
.
top_k
if
hasattr
(
params
,
top_k
)
else
1
axis
=
par
ma
s
.
axis
if
hasattr
(
params
,
axis
)
else
-
1
axis
=
par
am
s
.
axis
if
hasattr
(
params
,
axis
)
else
-
1
if
axis
<
0
:
axis
+=
len
(
input_shape
)
if
out_max_val
is
True
:
...
...
@@ -1018,22 +1018,30 @@ class CaffeOpMapper(OpMapper):
node
.
inputs
)
==
1
,
"The count of Normalize node
\'
s input is not 1."
input
=
self
.
graph
.
get_input_node
(
node
,
idx
=
0
,
copy
=
True
)
params
=
node
.
layer
.
norm_param
param_name
=
node
.
layer_name
+
"_scale"
if
node
.
data
is
None
or
len
(
node
.
data
)
!=
1
:
print
(
"The parameter of {} (type is {}) is not set. So we set the parameters as 0"
.
format
(
node
.
layer_name
,
node
.
layer_type
))
self
.
par
mas
[
node
.
layer_name
+
".scale"
]
=
\
np
.
zeros
([
1
]
if
params
.
channel_shared
else
[
1
,
1
,
1
,
node
.
in_shapes
[
0
][
1
]]).
astype
(
"float32"
)
self
.
par
ams
[
param_name
]
=
\
np
.
zeros
([
1
]
if
params
.
channel_shared
else
[
node
.
in_shapes
[
0
][
1
]]).
astype
(
"float32"
)
else
:
self
.
par
mas
[
node
.
layer_name
+
".scale"
]
=
_adjust_parameters
(
node
)[
0
]
self
.
par
ams
[
param_name
]
=
_adjust_parameters
(
node
)[
0
]
self
.
paddle_graph
.
add_layer
(
"self.create_parameter"
,
inputs
=
{},
outputs
=
[
param_name
],
shape
=
self
.
params
[
param_name
].
shape
,
attr
=
string
(
param_name
))
inputs_dict
=
{}
layer_attrs
=
{
"axis"
:
-
1
if
params
.
channel_shared
else
1
,
"param_name"
:
node
.
layer_name
+
".scale"
,
"param_shape"
:
self
.
parmas
[
node
.
layer_name
+
".scale"
].
shape
}
self
.
pd_pdgraph
.
add_layer
(
"axis"
:
-
1
if
params
.
channel_shared
else
1
}
self
.
paddle_graph
.
add_layer
(
"custom_layer:Normalize"
,
inputs
=
{
"x"
:
input
.
name
},
inputs
=
{
"x"
:
input
.
name
,
"param"
:
param_name
},
outputs
=
layer_outputs
,
**
layer_attrs
)
...
...
x2paddle/op_mapper/static/caffe2paddle/caffe_custom_layer/normalize.py
浏览文件 @
ddb9aea6
...
...
@@ -13,12 +13,21 @@
# limitations under the License.
import
paddle
import
paddle.fluid
as
fluid
def
normalize
(
x
,
axis
,
param_name
,
param_shape
,
param_dtype
):
l2
=
fluid
.
layers
.
prior_box
(
x
=
x
,
p
=
2
,
axis
=
1
)
l2
_norm
=
paddle
.
fluid
.
layers
.
l2_normalize
(
x
=
x
,
axis
=
1
)
param
=
paddle
.
static
.
nn
.
create_parameter
(
shape
=
param_shape
,
dtype
=
string
(
param_dtype
),
name
=
string
(
param_name
))
out
=
paddle
.
multiply
(
x
=
l2
,
y
=
param
,
axis
=
axis
)
dtype
=
param_dtype
,
name
=
param_name
)
param
=
paddle
.
reshape
(
param
,
[
param
.
shape
[
-
1
]])
perm
=
list
(
range
(
len
(
l2_norm
.
shape
)))
perm
.
pop
(
axis
)
perm
=
perm
+
[
axis
]
l2_norm
=
paddle
.
transpose
(
l2_norm
,
perm
=
perm
)
out
=
paddle
.
multiply
(
x
=
l2_norm
,
y
=
param
)
perm
=
list
(
range
(
len
(
l2_norm
.
shape
)))
dim
=
perm
.
pop
(
-
1
)
perm
.
insert
(
axis
,
dim
)
out
=
paddle
.
transpose
(
out
,
perm
=
perm
)
return
out
\ No newline at end of file
x2paddle/op_mapper/static/caffe2paddle/caffe_op_mapper.py
浏览文件 @
ddb9aea6
...
...
@@ -855,7 +855,7 @@ class CaffeOpMapper(OpMapper):
out_max_val
=
params
.
out_max_val
if
hasattr
(
params
,
out_max_val
)
else
False
top_k
=
params
.
top_k
if
hasattr
(
params
,
top_k
)
else
1
axis
=
par
ma
s
.
axis
if
hasattr
(
params
,
axis
)
else
-
1
axis
=
par
am
s
.
axis
if
hasattr
(
params
,
axis
)
else
-
1
if
axis
<
0
:
axis
+=
len
(
in_shapes
)
if
out_max_val
is
True
:
...
...
@@ -1090,17 +1090,17 @@ class CaffeOpMapper(OpMapper):
print
(
"The parameter of {} (type is {}) is not set. So we set the parameters as 0"
.
format
(
scale_name
,
node
.
layer_type
))
self
.
par
ma
s
[
scale_name
]
=
\
np
.
zeros
([
1
]
if
params
.
channel_shared
else
[
1
,
1
,
1
,
node
.
in_shapes
[
0
][
1
]]).
astype
(
"float32"
)
self
.
par
am
s
[
scale_name
]
=
\
np
.
zeros
([
1
]
if
params
.
channel_shared
else
[
node
.
in_shapes
[
0
][
1
]]).
astype
(
"float32"
)
else
:
self
.
par
ma
s
[
scale_name
]
=
_adjust_parameters
(
node
)[
0
]
self
.
par
am
s
[
scale_name
]
=
_adjust_parameters
(
node
)[
0
]
layer_attrs
=
{
"axis"
:
-
1
if
params
.
channel_shared
else
1
,
"param_name"
:
s
cale_name
,
"param_shape"
:
self
.
par
ma
s
[
scale_name
].
shape
,
"param_dtype"
:
str
(
self
.
parma
s
[
scale_name
].
dtype
)}
self
.
p
d_pd
graph
.
add_layer
(
"param_name"
:
s
tring
(
scale_name
)
,
"param_shape"
:
self
.
par
am
s
[
scale_name
].
shape
,
"param_dtype"
:
str
ing
(
self
.
param
s
[
scale_name
].
dtype
)}
self
.
p
addle_
graph
.
add_layer
(
"custom_layer:normalize"
,
inputs
=
{
"x"
:
input
.
name
},
outputs
=
[
node
.
name
],
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录