Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
X2Paddle
提交
015d8f2c
X
X2Paddle
项目概览
PaddlePaddle
/
X2Paddle
大约 1 年 前同步成功
通知
328
Star
698
Fork
167
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
26
列表
看板
标记
里程碑
合并请求
4
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
X
X2Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
26
Issue
26
列表
看板
标记
里程碑
合并请求
4
合并请求
4
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
015d8f2c
编写于
1月 05, 2021
作者:
S
SunAhong1993
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
add onnx op
上级
6d86e8ea
变更
6
隐藏空白更改
内联
并排
Showing
6 changed file
with
232 addition
and
51 deletion
+232
-51
x2paddle/core/program.py
x2paddle/core/program.py
+3
-0
x2paddle/op_mapper/dygraph/onnx2paddle/onnx_custom_layer/__init__.py
..._mapper/dygraph/onnx2paddle/onnx_custom_layer/__init__.py
+17
-0
x2paddle/op_mapper/dygraph/onnx2paddle/onnx_custom_layer/one_hot.py
...p_mapper/dygraph/onnx2paddle/onnx_custom_layer/one_hot.py
+51
-0
x2paddle/op_mapper/dygraph/onnx2paddle/onnx_custom_layer/pad.py
...le/op_mapper/dygraph/onnx2paddle/onnx_custom_layer/pad.py
+33
-0
x2paddle/op_mapper/dygraph/onnx2paddle/opset9/opset.py
x2paddle/op_mapper/dygraph/onnx2paddle/opset9/opset.py
+128
-49
x2paddle/op_mapper/dygraph/pytorch2paddle/pytorch_custom_layer/gather.py
...per/dygraph/pytorch2paddle/pytorch_custom_layer/gather.py
+0
-2
未找到文件。
x2paddle/core/program.py
浏览文件 @
015d8f2c
...
@@ -471,6 +471,9 @@ class PaddleGraph(object):
...
@@ -471,6 +471,9 @@ class PaddleGraph(object):
elif
self
.
source_type
==
"pytorch"
:
elif
self
.
source_type
==
"pytorch"
:
custom_import
=
"from x2paddle.op_mapper.dygraph.pytorch2paddle "
+
\
custom_import
=
"from x2paddle.op_mapper.dygraph.pytorch2paddle "
+
\
"import pytorch_custom_layer as x2paddle_nn"
"import pytorch_custom_layer as x2paddle_nn"
elif
self
.
source_type
==
"onnx"
:
custom_import
=
"from x2paddle.op_mapper.dygraph.onnx2paddle "
+
\
"import onnx_custom_layer as x2paddle_nn"
else
:
else
:
custom_import
=
""
custom_import
=
""
self
.
head
=
gen_codes
(
self
.
head
=
gen_codes
(
...
...
x2paddle/op_mapper/dygraph/onnx2paddle/onnx_custom_layer/__init__.py
0 → 100644
浏览文件 @
015d8f2c
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from
.one_hot
import
OneHot
from
.pad
import
CustomPad
\ No newline at end of file
x2paddle/op_mapper/dygraph/onnx2paddle/onnx_custom_layer/one_hot.py
0 → 100644
浏览文件 @
015d8f2c
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import
paddle
class
OneHot
(
object
):
def
__init__
(
self
,
axis
):
self
.
axis
=
axis
def
__call__
(
self
,
indices
,
depth
,
values
):
indices_shape
=
paddle
.
shape
(
indices
)
tmp
=
paddle
.
ones_like
(
indices_shape
,
dtype
=
"int32"
)
rank
=
paddle
.
sum
(
tmp
)
depth_range
=
paddle
.
arange
(
end
=
depth
)
zero
=
paddle
.
zeros
([
1
],
dtype
=
"int32"
)
one
=
paddle
.
ones
([
1
],
dtype
=
"int32"
)
axis
=
self
.
axis
*
one
new_axis
=
axis
+
rank
+
1
cond
=
paddle
.
less_than
(
axis
,
zero
)
real_axis
=
paddle
.
where
(
cond
,
new_axis
,
axis
)
ls
=
paddle
.
slice
(
indices_shape
,
axes
=
[
0
],
starts
=
[
0
],
ends
=
real_axis
)
rs
=
paddle
.
slice
(
indices_shape
,
axes
=
[
0
],
starts
=
real_axis
,
ends
=
rank
)
tmp
=
paddle
.
ones_like
(
ls
,
dtype
=
"int32"
)
ls_len
=
paddle
.
sum
(
tmp
)
ls_list
=
paddle
.
ones
(
ls_len
,
dtype
=
"int32"
)
tmp
=
paddle
.
ones_like
(
rs
,
dtype
=
"int32"
)
rs_len
=
paddle
.
sum
(
tmp
)
rs_list
=
paddle
.
ones
(
rs_len
,
dtype
=
"int32"
)
depth_range_shape
=
paddle
.
shape
(
depth_range
)
targets_shape
=
paddle
.
concat
([
ls_list
,
depth_range_shape
,
rs_list
],
axis
=
0
)
targets
=
paddle
.
reshape
(
depth_range
,
targets_shape
)
mod
=
paddle
.
mod
(
indices
,
depth
)
v_shape
=
paddle
.
concat
([
ls
,
paddle
.
shape
(
one
),
rs
],
axis
=
0
)
v
=
paddle
.
reshape
(
mod
,
v_shape
)
out
=
targets
==
v
out
=
paddle
.
cast
(
out
,
"float32"
)
on_value
=
paddle
.
slice
(
values
,
axes
=
[
0
],
starts
=
[
1
],
ends
=
[
2
])
off_value
=
paddle
.
slice
(
values
,
axes
=
[
0
],
starts
=
[
0
],
ends
=
[
1
])
out
=
out
*
(
on_value
-
off_value
)
+
off_value
return
out
\ No newline at end of file
x2paddle/op_mapper/dygraph/onnx2paddle/onnx_custom_layer/pad.py
0 → 100644
浏览文件 @
015d8f2c
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import
paddle
class
CustomPad
(
object
):
def
__init__
(
self
,
value
,
mode
):
self
.
value
=
value
self
.
assume_pad2d
=
assume_pad2d
self
.
layer_attrs
=
{}
self
.
layer_attrs
[
'mode'
]
=
string
(
mode
)
self
.
layer_attrs
[
'data_format'
]
=
string
(
'NCHW'
)
self
.
layer_attrs
[
'value'
]
=
value
def
__call__
(
self
,
x
,
pad
):
pad
=
paddle
.
reshape
(
pad
,
shape
=
[
2
,
-
1
])
pad
=
paddle
.
transpose
(
pad
,
perm
=
[
1
,
0
])
pad
=
paddle
.
reverse
(
pad
,
axis
=
[
0
])
pad
=
paddle
.
flatten
(
pad
)
out
=
paddle
.
nn
.
functional
.
pad
(
x
=
x
,
pad
=
pad
,
**
self
.
layer_attrs
)
return
out
\ No newline at end of file
x2paddle/op_mapper/dygraph/onnx2paddle/opset9/opset.py
浏览文件 @
015d8f2c
...
@@ -104,6 +104,9 @@ class OpSet9():
...
@@ -104,6 +104,9 @@ class OpSet9():
'ReduceMax'
:
[
'paddle.max'
,
'ReduceMax'
:
[
'paddle.max'
,
dict
(
axes
=
'axis'
,
keepdims
=
'keepdim'
),
dict
(
axes
=
'axis'
,
keepdims
=
'keepdim'
),
dict
(
keepdim
=
1
)],
dict
(
keepdim
=
1
)],
'ReduceProd'
:
[
'paddle.prod'
,
dict
(
axes
=
'axis'
,
keepdims
=
'keepdim'
),
dict
(
keepdim
=
1
)],
# active function
# active function
'Relu'
:
[
'paddle.nn.ReLU'
],
'Relu'
:
[
'paddle.nn.ReLU'
],
'LeakyRelu'
:
[
'paddle.nn.LeakyReLU'
,
'LeakyRelu'
:
[
'paddle.nn.LeakyReLU'
,
...
@@ -379,6 +382,14 @@ class OpSet9():
...
@@ -379,6 +382,14 @@ class OpSet9():
def
Pad
(
self
,
node
,
op_independent
=
True
):
def
Pad
(
self
,
node
,
op_independent
=
True
):
val_x
=
self
.
graph
.
get_input_node
(
node
,
idx
=
0
,
copy
=
True
)
val_x
=
self
.
graph
.
get_input_node
(
node
,
idx
=
0
,
copy
=
True
)
pads
=
node
.
get_attr
(
'pads'
)
pads
=
node
.
get_attr
(
'pads'
)
is_pads_attr
=
True
if
pads
is
None
:
val_pad
=
self
.
graph
.
get_input_node
(
node
,
idx
=
1
,
copy
=
True
)
pad_shape
=
val_pad
.
out_shapes
[
0
]
is_pads_attr
=
False
pads
=
_const_weight_or_none
(
val_pad
)
if
pads
is
not
None
:
is_pads_attr
=
True
mode
=
node
.
get_attr
(
'mode'
,
'constant'
)
mode
=
node
.
get_attr
(
'mode'
,
'constant'
)
value
=
node
.
get_attr
(
'value'
,
0.
)
value
=
node
.
get_attr
(
'value'
,
0.
)
data_shape
=
val_x
.
out_shapes
[
0
]
data_shape
=
val_x
.
out_shapes
[
0
]
...
@@ -386,56 +397,77 @@ class OpSet9():
...
@@ -386,56 +397,77 @@ class OpSet9():
assume_pad2d
=
False
assume_pad2d
=
False
layer_attrs
=
{}
layer_attrs
=
{}
layer_attrs
[
'mode'
]
=
string
(
mode
)
layer_attrs
[
'mode'
]
=
string
(
mode
)
paddings
=
[]
if
is_pads_attr
:
if
len
(
pads
)
==
4
:
paddings
=
[]
assume_pad2d
|=
mode
!=
'constant'
if
len
(
pads
)
==
4
:
if
data_shape
:
assume_pad2d
|=
mode
!=
'constant'
assume_pad2d
|=
data_shape
and
len
(
data_shape
)
==
4
# NCHW
if
data_shape
:
if
output_shape
:
assume_pad2d
|=
data_shape
and
len
(
data_shape
)
==
4
# NCHW
assume_pad2d
|=
output_shape
and
len
(
output_shape
)
==
4
# NCHW
if
output_shape
:
if
assume_pad2d
:
assume_pad2d
|=
output_shape
and
len
(
output_shape
)
==
4
# NCHW
paddle_op
=
'paddle.nn.Pad2D'
if
assume_pad2d
:
layer_attrs
[
'data_format'
]
=
string
(
'NCHW'
)
layer_attrs
[
'value'
]
=
value
else
:
paddle_op
=
'paddle.fluid.layers.pad'
layer_attrs
[
"pad_value"
]
=
value
if
len
(
pads
)
==
4
:
paddings
=
np
.
array
(
pads
).
reshape
(
(
-
1
,
2
)).
transpose
().
flatten
().
tolist
()
# SSEE -> SESE
elif
len
(
pads
)
==
8
:
paddings
=
np
.
array
(
pads
).
reshape
(
(
-
1
,
4
)).
transpose
().
flatten
().
tolist
()
# SSEE -> SESE
if
sum
(
paddings
[:
4
])
==
0
:
paddle_op
=
'paddle.nn.Pad2D'
paddle_op
=
'paddle.nn.Pad2D'
paddings
=
paddings
[
4
:]
layer_attrs
[
'data_format'
]
=
string
(
'NCHW'
)
layer_attrs
[
'value'
]
=
value
layer_attrs
[
'value'
]
=
value
if
'pad_value'
in
layer_attrs
:
else
:
layer_attrs
.
pop
(
'pad_value'
)
paddle_op
=
'paddle.fluid.layers.pad'
tmp_paddings
=
copy
.
deepcopy
(
paddings
)
layer_attrs
[
"pad_value"
]
=
value
paddings
[
0
]
=
tmp_paddings
[
2
]
if
len
(
pads
)
==
4
:
paddings
[
1
]
=
tmp_paddings
[
3
]
paddings
=
np
.
array
(
pads
).
reshape
(
paddings
[
2
]
=
tmp_paddings
[
0
]
(
-
1
,
2
)).
transpose
().
flatten
().
tolist
()
# SSEE -> SESE
paddings
[
3
]
=
tmp_paddings
[
1
]
elif
len
(
pads
)
==
8
:
if
paddle_op
==
'paddle.nn.Pad2D'
:
paddings
=
np
.
array
(
pads
).
reshape
(
layer_attrs
[
'padding'
]
=
paddings
(
-
1
,
4
)).
transpose
().
flatten
().
tolist
()
# SSEE -> SESE
nn_op_name
=
name_generator
(
"pad2d"
,
self
.
nn_name2id
)
if
sum
(
paddings
[:
4
])
==
0
:
else
:
paddle_op
=
'paddle.nn.Pad2D'
layer_attrs
[
'paddings'
]
=
paddings
paddings
=
paddings
[
4
:]
if
op_independent
:
layer_attrs
[
'value'
]
=
value
self
.
paddle_graph
.
add_layer
(
if
'pad_value'
in
layer_attrs
:
paddle_op
,
layer_attrs
.
pop
(
'pad_value'
)
inputs
=
{
'x'
:
val_x
.
name
},
tmp_paddings
=
copy
.
deepcopy
(
paddings
)
outputs
=
[
nn_op_name
,
node
.
name
]
if
paddle_op
==
'paddle.nn.Pad2D'
else
[
node
.
name
],
paddings
[
0
]
=
tmp_paddings
[
2
]
**
layer_attrs
)
paddings
[
1
]
=
tmp_paddings
[
3
]
paddings
[
2
]
=
tmp_paddings
[
0
]
paddings
[
3
]
=
tmp_paddings
[
1
]
if
paddle_op
==
'paddle.nn.Pad2D'
:
layer_attrs
[
'padding'
]
=
paddings
nn_op_name
=
name_generator
(
"pad2d"
,
self
.
nn_name2id
)
else
:
layer_attrs
[
'paddings'
]
=
paddings
if
op_independent
:
self
.
paddle_graph
.
add_layer
(
paddle_op
,
inputs
=
{
'x'
:
val_x
.
name
},
outputs
=
[
nn_op_name
,
node
.
name
]
if
paddle_op
==
'paddle.nn.Pad2D'
else
[
node
.
name
],
**
layer_attrs
)
else
:
self
.
paddle_graph
.
add_layer
(
paddle_op
,
inputs
=
{
'x'
:
val_x
.
name
},
outputs
=
[
nn_op_name
,
node
.
name
+
'_paded'
]
if
paddle_op
==
'paddle.nn.Pad2D'
\
else
[
node
.
name
+
'_paded'
],
**
layer_attrs
)
return
node
.
name
+
'_paded'
else
:
else
:
if
pad_shape
[
0
]
==
4
:
assume_pad2d
|=
mode
!=
'constant'
if
data_shape
:
assume_pad2d
|=
data_shape
and
len
(
data_shape
)
==
4
# NCHW
if
output_shape
:
assume_pad2d
|=
output_shape
and
len
(
output_shape
)
==
4
# NCHW
if
pad_shape
[
0
]
==
8
or
not
assume_pad2d
:
raise
Exception
(
"When the pad shape is 8 and pad is tensor, the op is not supported yet!"
)
nn_op_name
=
name_generator
(
"custom_pad"
,
self
.
nn_name2id
)
output_name
=
node
.
name
+
'_paded'
layer_outputs
=
[
nn_op_name
,
output_name
]
layer_attrs
[
'value'
]
=
value
self
.
paddle_graph
.
add_layer
(
self
.
paddle_graph
.
add_layer
(
paddle_op
,
"custom_layer:CustomPad"
,
inputs
=
{
'x'
:
val_x
.
name
},
inputs
=
{
'x'
:
val_x
.
name
,
'pad'
:
val_pad
.
name
},
outputs
=
[
nn_op_name
,
node
.
name
+
'_paded'
]
if
paddle_op
==
'paddle.nn.Pad2D'
\
outputs
=
layer_outputs
,
else
[
node
.
name
+
'_paded'
],
**
layer_attrs
)
**
layer_attrs
)
return
node
.
name
+
'_paded'
if
not
op_independent
:
return
node
.
name
+
'_paded'
@
print_mapping_info
@
print_mapping_info
def
Unsqueeze
(
self
,
node
):
def
Unsqueeze
(
self
,
node
):
...
@@ -637,7 +669,7 @@ class OpSet9():
...
@@ -637,7 +669,7 @@ class OpSet9():
self
.
paddle_graph
.
add_layer
(
self
.
paddle_graph
.
add_layer
(
'paddle.cast'
,
'paddle.cast'
,
inputs
=
{
"x"
:
indices
.
name
},
inputs
=
{
"x"
:
indices
.
name
},
outputs
=
indices_cast
,
outputs
=
[
indices_cast
]
,
dtype
=
string
(
'int64'
))
dtype
=
string
(
'int64'
))
op_name
=
name_generator
(
"embedding"
,
self
.
nn_name2id
)
op_name
=
name_generator
(
"embedding"
,
self
.
nn_name2id
)
output_name
=
node
.
name
output_name
=
node
.
name
...
@@ -832,7 +864,7 @@ class OpSet9():
...
@@ -832,7 +864,7 @@ class OpSet9():
"starts"
:
starts
.
name
,
"starts"
:
starts
.
name
,
"ends"
:
ends
.
name
"ends"
:
ends
.
name
}
}
if
starts_value
is
not
None
and
ends_value
is
not
None
:
if
starts_value
is
not
None
and
ends_value
is
not
None
and
axes
is
not
None
:
starts_value
=
starts_value
.
copy
()
starts_value
=
starts_value
.
copy
()
ends_value
=
ends_value
.
copy
()
ends_value
=
ends_value
.
copy
()
#for idx in range(len(ends_value)):
#for idx in range(len(ends_value)):
...
@@ -862,6 +894,8 @@ class OpSet9():
...
@@ -862,6 +894,8 @@ class OpSet9():
layer_attrs
[
'starts'
]
=
starts_cast
layer_attrs
[
'starts'
]
=
starts_cast
if
ends
.
dtype
!=
'int32'
:
if
ends
.
dtype
!=
'int32'
:
ends_cast
=
ends
.
name
+
'_cast'
ends_cast
=
ends
.
name
+
'_cast'
else
:
ends_cast
=
ends
.
name
self
.
paddle_graph
.
add_layer
(
self
.
paddle_graph
.
add_layer
(
'paddle.cast'
,
'paddle.cast'
,
inputs
=
{
"x"
:
ends
.
name
},
inputs
=
{
"x"
:
ends
.
name
},
...
@@ -1006,7 +1040,7 @@ class OpSet9():
...
@@ -1006,7 +1040,7 @@ class OpSet9():
'paddle.reshape'
,
'paddle.reshape'
,
inputs
=
{
'x'
:
val_x
.
name
,
inputs
=
{
'x'
:
val_x
.
name
,
'shape'
:
val_shape
.
name
},
'shape'
:
val_shape
.
name
},
outputs
=
node
)
outputs
=
[
node
.
name
]
)
@
print_mapping_info
@
print_mapping_info
def
Cast
(
self
,
node
):
def
Cast
(
self
,
node
):
...
@@ -1633,4 +1667,49 @@ class OpSet9():
...
@@ -1633,4 +1667,49 @@ class OpSet9():
'paddle.argmax'
,
'paddle.argmax'
,
inputs
=
{
"x"
:
val_x
.
name
},
inputs
=
{
"x"
:
val_x
.
name
},
outputs
=
[
node
.
name
],
outputs
=
[
node
.
name
],
**
layer_attrs
)
**
layer_attrs
)
\ No newline at end of file
@
print_mapping_info
def
Size
(
self
,
node
):
val_x
=
self
.
graph
.
get_input_node
(
node
,
idx
=
0
,
copy
=
True
)
self
.
paddle_graph
.
add_layer
(
"paddle.shape"
,
inputs
=
{
"x"
:
val_x
.
name
},
outputs
=
[
node
.
name
])
self
.
paddle_graph
.
add_layer
(
"paddle.prod"
,
inputs
=
{
"x"
:
node
.
name
},
outputs
=
[
node
.
name
])
@
print_mapping_info
def
Sign
(
self
,
node
):
val_x
=
self
.
graph
.
get_input_node
(
node
,
idx
=
0
,
copy
=
True
)
self
.
paddle_graph
.
add_layer
(
"paddle.sign"
,
inputs
=
{
"x"
:
val_x
.
name
},
outputs
=
[
node
.
name
])
@
print_mapping_info
def
OneHot
(
self
,
node
):
nn_op_name
=
name_generator
(
"onehot"
,
self
.
nn_name2id
)
output_name
=
node
.
name
layer_outputs
=
[
nn_op_name
,
output_name
]
indices
=
self
.
graph
.
get_input_node
(
node
,
idx
=
0
,
copy
=
True
)
depth
=
self
.
graph
.
get_input_node
(
node
,
idx
=
1
,
copy
=
True
)
values
=
self
.
graph
.
get_input_node
(
node
,
idx
=
2
,
copy
=
True
)
axis
=
node
.
get_attr
(
'axis'
,
-
1
)
self
.
paddle_graph
.
add_layer
(
"custom_layer:OneHot"
,
inputs
=
{
"indices"
:
indices
.
name
,
"depth"
:
depth
.
name
,
"values"
:
values
.
name
},
outputs
=
layer_outputs
,
axis
=
axis
)
@
print_mapping_info
def
Reciprocal
(
self
,
node
):
val_x
=
self
.
graph
.
get_input_node
(
node
,
idx
=
0
,
copy
=
True
)
self
.
paddle_graph
.
add_layer
(
"paddle.reciprocal"
,
inputs
=
{
"x"
:
val_x
.
name
},
outputs
=
[
node
.
name
])
x2paddle/op_mapper/dygraph/pytorch2paddle/pytorch_custom_layer/gather.py
浏览文件 @
015d8f2c
...
@@ -13,8 +13,6 @@
...
@@ -13,8 +13,6 @@
# limitations under the License.
# limitations under the License.
import
paddle
import
paddle
from
itertools
import
product
import
numpy
as
np
class
Gather
(
object
):
class
Gather
(
object
):
def
__init__
(
self
,
dim
):
def
__init__
(
self
,
dim
):
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录