Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
X2Paddle
提交
d823b87f
X
X2Paddle
项目概览
PaddlePaddle
/
X2Paddle
大约 1 年 前同步成功
通知
328
Star
698
Fork
167
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
26
列表
看板
标记
里程碑
合并请求
4
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
X
X2Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
26
Issue
26
列表
看板
标记
里程碑
合并请求
4
合并请求
4
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
d823b87f
编写于
5月 08, 2023
作者:
W
WJJ1995
提交者:
GitHub
5月 08, 2023
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
[Bug]Support pnc model (#962)
* support pnc model * fixed code style
上级
3844a5a5
变更
1
显示空白变更内容
内联
并排
Showing
1 changed file
with
54 addition
and
21 deletion
+54
-21
x2paddle/op_mapper/onnx2paddle/opset_legacy.py
x2paddle/op_mapper/onnx2paddle/opset_legacy.py
+54
-21
未找到文件。
x2paddle/op_mapper/onnx2paddle/opset_legacy.py
浏览文件 @
d823b87f
...
...
@@ -213,6 +213,11 @@ class OpSet():
attrs_name_map_dict
=
op_info
[
1
]
for
onnx_attr_name
,
pd_attr_name
in
attrs_name_map_dict
.
items
():
if
onnx_attr_name
in
onnx_attrs
:
# convert for dynamic code, mv 0 to False, 1 to True
if
pd_attr_name
==
"keepdim"
:
keepdims
=
False
if
onnx_attrs
[
onnx_attr_name
]
==
0
else
True
onnx_attrs
[
onnx_attr_name
]
=
keepdims
layer_attrs
[
pd_attr_name
]
=
onnx_attrs
[
onnx_attr_name
]
else
:
layer_attrs
[
pd_attr_name
]
=
op_info
[
2
][
onnx_attr_name
]
...
...
@@ -340,7 +345,7 @@ class OpSet():
elif
len
(
node
.
layer
.
input
)
==
3
:
# opset 11
try
:
#to avoid the error causeed by NULL value of resize inputs.
#
to avoid the error causeed by NULL value of resize inputs.
val_scales
=
self
.
graph
.
get_input_node
(
node
,
idx
=
2
,
copy
=
True
)
except
:
...
...
@@ -920,7 +925,7 @@ class OpSet():
outputs
=
[
node
.
name
],
axis
=
axis
)
# deal with indice is scalar(0D) Tensor
if
isinstance
(
indices_values
,
int
)
and
len
(
val_x_shape
)
>
1
:
if
isinstance
(
indices_values
,
int
)
and
len
(
val_x_shape
)
!=
1
:
self
.
paddle_graph
.
add_layer
(
'paddle.squeeze'
,
inputs
=
{
'x'
:
node
.
name
},
...
...
@@ -1150,7 +1155,18 @@ class OpSet():
ends_value
[
idx
]
=
val_x
.
out_shapes
[
0
][
axes
[
idx
]]
elif
ends_value
[
idx
]
>
2
**
31
-
1
:
ends_value
[
idx
]
=
2
**
31
-
1
elif
ends_value
[
idx
]
<
-
2
**
31
:
ends_value
[
idx
]
=
-
2
**
31
# If stride is -1 and starts and ends meet the conditions, just reverse it directly
if
steps
==
[
-
1
]
and
len
(
starts_value
)
==
1
and
len
(
ends_value
)
==
1
and
starts_value
[
0
]
==
-
1
and
ends_value
[
0
]
==
-
2
**
31
:
self
.
paddle_graph
.
add_layer
(
"paddle.flip"
,
inputs
=
{
"x"
:
val_x
.
name
},
outputs
=
[
node
.
name
],
axis
=
axes
)
return
layer_attrs
=
{
"axes"
:
axes
,
"starts"
:
starts_value
,
...
...
@@ -1186,6 +1202,8 @@ class OpSet():
for
idx
in
range
(
len
(
ends
)):
if
ends
[
idx
]
>
2
**
31
-
1
:
ends
[
idx
]
=
2
**
31
-
1
elif
ends
[
idx
]
<
-
2
**
31
:
ends
[
idx
]
=
0
layer_attrs
=
{
"axes"
:
axes
,
"starts"
:
starts
,
"ends"
:
ends
}
if
steps
is
not
None
:
...
...
@@ -1396,13 +1414,12 @@ class OpSet():
@
print_mapping_info
def
Split
(
self
,
node
):
val_x
=
self
.
graph
.
get_input_node
(
node
,
idx
=
0
,
copy
=
True
)
paddle_op
=
'split'
split
=
node
.
get_attr
(
'split'
)
axis
=
node
.
get_attr
(
'axis'
,
0
)
if
split
is
None
:
split_num
=
len
(
node
.
layer
.
output
)
try
:
#split is an input of this node
#
split is an input of this node
split_node
=
self
.
graph
.
get_input_node
(
node
,
idx
=
1
,
copy
=
True
)
split_value
=
_const_weight_or_none
(
split_node
)
layer_attrs
=
{
...
...
@@ -1419,7 +1436,7 @@ class OpSet():
if
hasattr
(
node
,
'index'
):
outputs_list
.
append
(
"{}_p{}"
.
format
(
node
.
layer_name
,
i
))
else
:
outputs_list
.
append
(
"{}"
.
format
(
node
.
layer
_name
))
outputs_list
.
append
(
"{}"
.
format
(
node
.
layer
.
output
[
i
]
))
if
split_num
>
1
:
self
.
paddle_graph
.
add_layer
(
'paddle.split'
,
...
...
@@ -2233,7 +2250,6 @@ class OpSet():
kernel_shape
=
node
.
get_attr
(
'kernel_shape'
)
convnd
=
len
(
kernel_shape
)
assert
2
<=
convnd
<=
3
,
'only Conv2D and Conv3D is supported'
num_out_channels
=
val_w
.
out_shapes
[
0
][
0
]
num_in_channels
=
val_w
.
out_shapes
[
0
][
1
]
paddle_op
=
'paddle.nn.Conv{}D'
.
format
(
convnd
)
...
...
@@ -2379,7 +2395,8 @@ class OpSet():
output_size
[
1
])
if
auto_pad
==
"SAME_UPPER"
:
for
i
in
range
(
len
(
total_paddings
)):
paddings
[
2
*
i
]
=
total_paddings
[
0
]
-
total_paddings
[
0
]
//
2
paddings
[
2
*
i
]
=
total_paddings
[
0
]
-
\
total_paddings
[
0
]
//
2
paddings
[
2
*
i
+
1
]
=
total_paddings
[
0
]
//
2
else
:
for
i
in
range
(
len
(
total_paddings
)):
...
...
@@ -2540,6 +2557,9 @@ class OpSet():
if
input_nums
>
5
and
node
.
layer
.
input
[
5
]
!=
''
:
init_h
=
self
.
graph
.
get_input_node
(
node
,
idx
=
exist_input_nums
,
copy
=
True
)
init_h_shape
=
init_h
.
out_shapes
[
0
]
if
len
(
init_h_shape
)
!=
0
and
reduce
(
lambda
x
,
y
:
x
*
y
,
init_h_shape
)
not
in
[
1
,
-
1
]:
self
.
paddle_graph
.
add_layer
(
'paddle.reshape'
,
inputs
=
{
"x"
:
init_h
.
name
},
...
...
@@ -2549,6 +2569,9 @@ class OpSet():
if
input_nums
>
6
and
node
.
layer
.
input
[
6
]
!=
''
:
init_c
=
self
.
graph
.
get_input_node
(
node
,
idx
=
exist_input_nums
,
copy
=
True
)
init_c_shape
=
init_c
.
out_shapes
[
0
]
if
len
(
init_c_shape
)
!=
0
and
reduce
(
lambda
x
,
y
:
x
*
y
,
init_c_shape
)
not
in
[
1
,
-
1
]:
self
.
paddle_graph
.
add_layer
(
'paddle.reshape'
,
inputs
=
{
"x"
:
init_c
.
name
},
...
...
@@ -2592,8 +2615,10 @@ class OpSet():
def
generate_paddle_param_names
(
op_name
,
suffix
=
''
):
param_names
=
[]
param_names
.
extend
([
'{}.weight_ih_l0{}'
,
'{}.weight_hh_l0{}'
])
if
have_bias
!=
False
:
param_names
.
append
(
'{}.bias_ih_l0{}'
)
if
have_bias
!=
False
:
param_names
.
append
(
'{}.bias_hh_l0{}'
)
if
have_bias
!=
False
:
param_names
.
append
(
'{}.bias_ih_l0{}'
)
if
have_bias
!=
False
:
param_names
.
append
(
'{}.bias_hh_l0{}'
)
param_names
=
[
x
.
format
(
op_name
,
suffix
)
for
x
in
param_names
]
return
param_names
...
...
@@ -2639,6 +2664,14 @@ class OpSet():
def
TopK
(
self
,
node
):
val_x
=
self
.
graph
.
get_input_node
(
node
,
idx
=
0
,
copy
=
True
)
val_k
=
self
.
graph
.
get_input_node
(
node
,
idx
=
1
,
copy
=
True
)
# If the topk result is the entire graph output, modify the graph result
graph_output_new
=
list
()
if
node
.
layer_name
in
self
.
graph
.
output_nodes
:
graph_output_new
=
[
"{}_p{}"
.
format
(
node
.
layer_name
,
0
)
if
x
==
node
.
layer_name
else
x
for
x
in
self
.
graph
.
output_nodes
]
self
.
paddle_graph
.
outputs
=
graph_output_new
layer_attrs
=
dict
()
layer_attrs
[
"axis"
]
=
node
.
get_attr
(
'axis'
,
-
1
)
layer_attrs
[
"largest"
]
=
True
if
node
.
get_attr
(
'largest'
,
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录