Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
X2Paddle
提交
379ce426
X
X2Paddle
项目概览
PaddlePaddle
/
X2Paddle
大约 1 年 前同步成功
通知
328
Star
698
Fork
167
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
26
列表
看板
标记
里程碑
合并请求
4
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
X
X2Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
26
Issue
26
列表
看板
标记
里程碑
合并请求
4
合并请求
4
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
未验证
提交
379ce426
编写于
7月 20, 2021
作者:
W
WJJ1995
提交者:
GitHub
7月 20, 2021
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Add solar_trace support (#642)
上级
efcd3d94
变更
1
显示空白变更内容
内联
并排
Showing
1 changed file
with
21 addition
and
37 deletion
+21
-37
x2paddle/op_mapper/pytorch2paddle/aten.py
x2paddle/op_mapper/pytorch2paddle/aten.py
+21
-37
未找到文件。
x2paddle/op_mapper/pytorch2paddle/aten.py
浏览文件 @
379ce426
...
...
@@ -988,8 +988,8 @@ def aten_constant_pad_nd(mapper, graph, node):
if
inputs_name
[
1
]
in
mapper
.
attrs
:
layer_attrs
[
"padding"
]
=
mapper
.
attrs
[
inputs_name
[
1
]]
else
:
mapper
.
_check_input
(
graph
,
inputs_node
[
1
],
inputs_name
[
1
],
current_outputs
,
scope_name
)
mapper
.
_check_input
(
graph
,
inputs_node
[
1
],
inputs_name
[
1
],
current_outputs
,
scope_name
)
layer_inputs
[
"pad"
]
=
inputs_name
[
1
]
is_padding_tensor
=
True
# 获取当前节点输入的list
...
...
@@ -1021,8 +1021,7 @@ def aten_constant_pad_nd(mapper, graph, node):
outputs
=
[
inputs_name
[
0
]
+
"_if"
,
output_name
],
scope_name
=
scope_name
)
if_layer
=
graph
.
layers
[
list
(
graph
.
layers
.
keys
())[
-
1
]]
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
.
add_layer
(
"prim.sub"
,
inputs
=
{
"y"
:
inputs_name
[
0
]
+
"_len"
},
...
...
@@ -1054,8 +1053,7 @@ def aten_constant_pad_nd(mapper, graph, node):
outputs
=
[
output_name
],
scope_name
=
scope_name
)
if_layer
.
add_block
(
block
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
layer_inputs
[
"input"
]
=
inputs_name
[
0
]
block
.
add_layer
(
kernel
,
...
...
@@ -1860,11 +1858,10 @@ def aten_expand_as(mapper, graph, node):
y
=
paddle_dtypes
.
t_bool
)
graph
.
add_layer
(
"prim.if"
,
{
'input'
:
inputs_name
[
0
]
+
"_cond"
},
outputs
=
[
inputs_name
[
0
]
+
"_if1"
],
outputs
=
[
inputs_name
[
0
]
+
"_if1"
,
inputs_name
[
0
]
],
scope_name
=
scope_name
)
if_layer
=
graph
.
layers
[
list
(
graph
.
layers
.
keys
())[
-
1
]]
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
.
add_layer
(
"prim.type"
,
inputs
=
{
"input"
:
inputs_name
[
1
]},
...
...
@@ -1877,8 +1874,7 @@ def aten_expand_as(mapper, graph, node):
scope_name
=
scope_name
,
dtype
=
inputs_name
[
1
]
+
"_type"
)
if_layer
.
add_block
(
block
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
if_layer
.
add_block
(
block
)
if_layer
.
inputs
[
"input-0"
]
=
inputs_name
[
0
]
if_layer
.
inputs
[
"input-1"
]
=
inputs_name
[
1
]
...
...
@@ -1889,11 +1885,10 @@ def aten_expand_as(mapper, graph, node):
scope_name
=
scope_name
)
graph
.
add_layer
(
"prim.if"
,
{
'input'
:
inputs_name
[
0
]
+
"_cond"
},
outputs
=
[
inputs_name
[
0
]
+
"_if2"
],
outputs
=
[
inputs_name
[
0
]
+
"_if2"
,
output_name
],
scope_name
=
scope_name
)
if_layer
=
graph
.
layers
[
list
(
graph
.
layers
.
keys
())[
-
1
]]
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
.
add_layer
(
"paddle.cast"
,
inputs
=
{
"x"
:
layer_outputs
[
0
]},
...
...
@@ -1901,8 +1896,7 @@ def aten_expand_as(mapper, graph, node):
scope_name
=
scope_name
,
dtype
=
string
(
"bool"
))
if_layer
.
add_block
(
block
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
if_layer
.
add_block
(
block
)
if_layer
.
inputs
[
"input-0"
]
=
layer_outputs
[
0
]
# TODO(syf): check expand_as
...
...
@@ -2104,16 +2098,14 @@ def aten_floor(mapper, graph, node):
outputs
=
[
inputs_name
[
0
]
+
"_if"
],
scope_name
=
scope_name
)
if_layer
=
graph
.
layers
[
list
(
graph
.
layers
.
keys
())[
-
1
]]
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
.
add_layer
(
"paddle.floor"
,
inputs
=
copy
.
deepcopy
(
layer_inputs
),
outputs
=
copy
.
deepcopy
(
layer_outputs
),
scope_name
=
scope_name
)
if_layer
.
add_block
(
block
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
.
add_layer
(
"prim.floor"
,
inputs
=
copy
.
deepcopy
(
layer_inputs
),
...
...
@@ -3210,16 +3202,14 @@ def aten_masked_fill_(mapper, graph, node):
outputs
=
[
inputs_name
[
2
]
+
"_if"
],
scope_name
=
scope_name
)
if_layer
=
graph
.
layers
[
list
(
graph
.
layers
.
keys
())[
-
1
]]
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
.
add_layer
(
"prim.equal"
,
inputs
=
{
"input"
:
inputs_name
[
1
]
+
"_mask"
},
outputs
=
[
inputs_name
[
2
]
+
"_1"
],
scope_name
=
scope_name
)
if_layer
.
add_block
(
block
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
.
add_layer
(
"prim.mul"
,
inputs
=
{
"x"
:
inputs_name
[
1
]
+
"_mask"
,
...
...
@@ -3322,16 +3312,14 @@ def aten_masked_fill(mapper, graph, node):
outputs
=
[
inputs_name
[
2
]
+
"_if"
],
scope_name
=
scope_name
)
if_layer
=
graph
.
layers
[
list
(
graph
.
layers
.
keys
())[
-
1
]]
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
.
add_layer
(
"prim.equal"
,
inputs
=
{
"input"
:
inputs_name
[
1
]
+
"_mask"
},
outputs
=
[
inputs_name
[
2
]
+
"_1"
],
scope_name
=
scope_name
)
if_layer
.
add_block
(
block
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
.
add_layer
(
"prim.mul"
,
inputs
=
{
"x"
:
inputs_name
[
1
]
+
"_mask"
,
...
...
@@ -5436,16 +5424,14 @@ def aten_upsample_bilinear2d(mapper, graph, node):
outputs
=
[
inputs_name
[
0
]
+
"_if1"
],
scope_name
=
scope_name
)
if_layer
=
graph
.
layers
[
list
(
graph
.
layers
.
keys
())[
-
1
]]
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
.
add_layer
(
"prim.var2list"
,
inputs
=
{
"input"
:
inputs_name
[
1
]},
outputs
=
[
inputs_name
[
1
]],
scope_name
=
scope_name
)
if_layer
.
add_block
(
block
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
if_layer
.
add_block
(
block
)
if_layer
.
inputs
[
"input-0"
]
=
inputs_name
[
1
]
# 处理输入2,即%5421
...
...
@@ -5516,16 +5502,14 @@ def aten_upsample_nearest2d(mapper, graph, node):
outputs
=
[
inputs_name
[
0
]
+
"_if1"
],
scope_name
=
scope_name
)
if_layer
=
graph
.
layers
[
list
(
graph
.
layers
.
keys
())[
-
1
]]
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
.
add_layer
(
"prim.var2list"
,
inputs
=
{
"input"
:
inputs_name
[
1
]},
outputs
=
[
inputs_name
[
1
]],
scope_name
=
scope_name
)
if_layer
.
add_block
(
block
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
block
=
PaddleGraph
(
source_type
=
"pytorch"
,
parent_layer
=
if_layer
)
if_layer
.
add_block
(
block
)
if_layer
.
inputs
[
"input-0"
]
=
inputs_name
[
1
]
if
"size"
in
layer_attrs
and
layer_attrs
[
"size"
]
is
None
:
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录