Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
X2Paddle
提交
bee47161
X
X2Paddle
项目概览
PaddlePaddle
/
X2Paddle
大约 1 年 前同步成功
通知
328
Star
698
Fork
167
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
26
列表
看板
标记
里程碑
合并请求
4
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
X
X2Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
26
Issue
26
列表
看板
标记
里程碑
合并请求
4
合并请求
4
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
bee47161
编写于
1月 13, 2021
作者:
S
SunAhong1993
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
fix
上级
2186c6d6
变更
6
显示空白变更内容
内联
并排
Showing
6 changed file
with
24 addition
and
21 deletion
+24
-21
x2paddle/core/program.py
x2paddle/core/program.py
+8
-8
x2paddle/op_mapper/dygraph/pytorch2paddle/aten.py
x2paddle/op_mapper/dygraph/pytorch2paddle/aten.py
+6
-7
x2paddle/op_mapper/dygraph/pytorch2paddle/prim2code.py
x2paddle/op_mapper/dygraph/pytorch2paddle/prim2code.py
+1
-1
x2paddle/optimizer/fusion/dygraph/interpolate_bilinear_fuser.py
...le/optimizer/fusion/dygraph/interpolate_bilinear_fuser.py
+1
-0
x2paddle/optimizer/optimizer.py
x2paddle/optimizer/optimizer.py
+5
-5
x2paddle/optimizer/pattern_matcher.py
x2paddle/optimizer/pattern_matcher.py
+3
-0
未找到文件。
x2paddle/core/program.py
浏览文件 @
bee47161
...
@@ -285,12 +285,12 @@ class PaddleGraph(object):
...
@@ -285,12 +285,12 @@ class PaddleGraph(object):
hierarchical_tree
.
save_source_files
(
save_dir
)
hierarchical_tree
.
save_source_files
(
save_dir
)
self
.
dump_dygraph_parameter
(
save_dir
)
self
.
dump_dygraph_parameter
(
save_dir
)
else
:
else
:
if
self
.
source_type
==
"pytorch"
:
#
if self.source_type == "pytorch":
from
x2paddle.optimizer.pytorch_code_optimizer
import
ModuleGraph
#
from x2paddle.optimizer.pytorch_code_optimizer import ModuleGraph
module_graph
=
ModuleGraph
(
self
)
#
module_graph = ModuleGraph(self)
module_graph
.
save_source_files
(
save_dir
)
#
module_graph.save_source_files(save_dir)
self
.
dump_dygraph_parameter
(
save_dir
)
#
self.dump_dygraph_parameter(save_dir)
else
:
#
else:
self
.
gen_dygraph_code
(
save_dir
)
self
.
gen_dygraph_code
(
save_dir
)
self
.
dump_dygraph_parameter
(
save_dir
)
self
.
dump_dygraph_parameter
(
save_dir
)
# 动转静
# 动转静
...
...
x2paddle/op_mapper/dygraph/pytorch2paddle/aten.py
浏览文件 @
bee47161
...
@@ -1310,7 +1310,6 @@ def aten_dim(mapper, graph, node):
...
@@ -1310,7 +1310,6 @@ def aten_dim(mapper, graph, node):
"""
"""
scope_name
=
mapper
.
normalize_scope_name
(
node
)
scope_name
=
mapper
.
normalize_scope_name
(
node
)
output_name
=
mapper
.
_get_outputs_name
(
node
)[
0
]
output_name
=
mapper
.
_get_outputs_name
(
node
)[
0
]
layer_outputs
=
[
output_name
]
layer_inputs
=
{}
layer_inputs
=
{}
inputs_name
,
inputs_node
=
mapper
.
_get_inputs_name
(
node
)
inputs_name
,
inputs_node
=
mapper
.
_get_inputs_name
(
node
)
# 获取当前节点输出的list
# 获取当前节点输出的list
...
@@ -1322,9 +1321,9 @@ def aten_dim(mapper, graph, node):
...
@@ -1322,9 +1321,9 @@ def aten_dim(mapper, graph, node):
current_inputs
=
list
(
layer_inputs
.
values
())
current_inputs
=
list
(
layer_inputs
.
values
())
graph
.
add_layer
(
graph
.
add_layer
(
"prim.shape"
,
inputs
=
layer_inputs
,
outputs
=
layer_outputs
,
scope_name
=
scope_name
)
"prim.shape"
,
inputs
=
layer_inputs
,
outputs
=
[
output_name
]
,
scope_name
=
scope_name
)
graph
.
add_layer
(
graph
.
add_layer
(
"prim.len"
,
inputs
=
{
"input"
:
output_name
},
outputs
=
layer_outputs
,
scope_name
=
scope_name
)
"prim.len"
,
inputs
=
{
"input"
:
output_name
},
outputs
=
[
output_name
]
,
scope_name
=
scope_name
)
return
current_inputs
,
current_outputs
return
current_inputs
,
current_outputs
...
@@ -4512,10 +4511,10 @@ def aten_upsample_bilinear2d(mapper, graph, node):
...
@@ -4512,10 +4511,10 @@ def aten_upsample_bilinear2d(mapper, graph, node):
current_outputs
,
scope_name
)
current_outputs
,
scope_name
)
layer_inputs
[
"align_corners"
]
=
inputs_name
[
2
]
layer_inputs
[
"align_corners"
]
=
inputs_name
[
2
]
current_inputs
.
append
(
inputs_name
[
2
])
current_inputs
.
append
(
inputs_name
[
2
])
if
"size"
in
layer_attrs
and
layer_attrs
[
"size"
]
is
None
:
#
if "size" in layer_attrs and layer_attrs["size"] is None:
mapper
.
_check_input
(
graph
,
inputs_node
[
3
],
inputs_name
[
3
],
#
mapper._check_input(graph, inputs_node[3], inputs_name[3],
current_outputs
,
scope_name
)
#
current_outputs, scope_name)
layer_inputs
[
"scale_factor"
]
=
inputs_name
[
3
]
#
layer_inputs["scale_factor"] = inputs_name[3]
layer_attrs
[
"align_mode"
]
=
0
layer_attrs
[
"align_mode"
]
=
0
layer_attrs
[
"mode"
]
=
string
(
"bilinear"
)
layer_attrs
[
"mode"
]
=
string
(
"bilinear"
)
graph
.
add_layer
(
graph
.
add_layer
(
...
...
x2paddle/op_mapper/dygraph/pytorch2paddle/prim2code.py
浏览文件 @
bee47161
...
@@ -471,7 +471,7 @@ def prim_str(layer, indent=1, init_func=[], forward_func=[], layer_id=None, diff
...
@@ -471,7 +471,7 @@ def prim_str(layer, indent=1, init_func=[], forward_func=[], layer_id=None, diff
def
prim_sub
(
layer
,
indent
=
1
,
init_func
=
[],
forward_func
=
[],
layer_id
=
None
,
different_attrs
=
None
):
def
prim_sub
(
layer
,
indent
=
1
,
init_func
=
[],
forward_func
=
[],
layer_id
=
None
,
different_attrs
=
None
):
if
int
(
get_value
(
layer
,
"alpha"
,
different_attrs
))
==
1
:
if
int
(
float
(
get_value
(
layer
,
"alpha"
,
different_attrs
)
))
==
1
:
line
=
"{} = {} - {}"
.
format
(
layer
.
outputs
[
0
],
line
=
"{} = {} - {}"
.
format
(
layer
.
outputs
[
0
],
get_value
(
layer
,
"x"
,
different_attrs
),
get_value
(
layer
,
"x"
,
different_attrs
),
get_value
(
layer
,
"y"
,
different_attrs
))
get_value
(
layer
,
"y"
,
different_attrs
))
...
...
x2paddle/optimizer/fusion/dygraph/interpolate_bilinear_fuser.py
浏览文件 @
bee47161
...
@@ -186,6 +186,7 @@ class DygraphInterpolateBilinearFuser(FuseBase):
...
@@ -186,6 +186,7 @@ class DygraphInterpolateBilinearFuser(FuseBase):
inputs
=
{
inputs
=
{
"input"
:
"interpolate-input-0"
,
"input"
:
"interpolate-input-0"
,
"size"
:
"interpolate-input-3"
,
"size"
:
"interpolate-input-3"
,
# "scale_factor": gen_name(21)
},
},
outputs
=
[
gen_name
(
23
)])
outputs
=
[
gen_name
(
23
)])
pattern_block_block
.
add_layer
(
pattern_block_block
.
add_layer
(
...
...
x2paddle/optimizer/optimizer.py
浏览文件 @
bee47161
...
@@ -28,11 +28,11 @@ class GraphOptimizer(object):
...
@@ -28,11 +28,11 @@ class GraphOptimizer(object):
"dygraph_constant_fuse_pass"
,
"dygraph_constant_fuse_pass"
,
"dygraph_batchnorm2d_fuse_pass"
,
"dygraph_batchnorm2d_fuse_pass"
,
"dygraph_interpolate_bilinear_fuse_pass"
,
"dygraph_interpolate_bilinear_fuse_pass"
,
"dygraph_fc_fuse_pass"
,
#
"dygraph_fc_fuse_pass",
"dygraph_adaptive_pool2d_fuse_pass"
,
#
"dygraph_adaptive_pool2d_fuse_pass",
"dygraph_reshape_fuse_pass"
,
#
"dygraph_reshape_fuse_pass",
"dygraph_dropout_fuse_pass"
,
#
"dygraph_dropout_fuse_pass",
"dygraph_if_fuse_pass"
#
"dygraph_if_fuse_pass"
]
]
elif
source_frame
==
"caffe"
:
elif
source_frame
==
"caffe"
:
if
paddle_type
==
"dygraph"
:
if
paddle_type
==
"dygraph"
:
...
...
x2paddle/optimizer/pattern_matcher.py
浏览文件 @
bee47161
...
@@ -130,6 +130,8 @@ class PatternMatcher(object):
...
@@ -130,6 +130,8 @@ class PatternMatcher(object):
if
is_pop
:
if
is_pop
:
subgraph_id2layers
.
pop
(
layer_id
)
subgraph_id2layers
.
pop
(
layer_id
)
continue
continue
if
layer_id
not
in
subgraph_id2layers
:
continue
# 当为控制流时的处理
# 当为控制流时的处理
if
layer
.
kernel
==
"prim.if"
or
layer
.
kernel
==
"prim.loop"
:
if
layer
.
kernel
==
"prim.if"
or
layer
.
kernel
==
"prim.loop"
:
if
len
(
pattern_layer
.
blocks
)
!=
len
(
layer
.
blocks
):
if
len
(
pattern_layer
.
blocks
)
!=
len
(
layer
.
blocks
):
...
@@ -154,6 +156,7 @@ class PatternMatcher(object):
...
@@ -154,6 +156,7 @@ class PatternMatcher(object):
if
pattern_index
==
0
or
is_subblock
:
if
pattern_index
==
0
or
is_subblock
:
return
False
return
False
else
:
else
:
print
(
subgraph_id2layers
.
keys
())
index
=
list
(
subgraph_id2layers
.
keys
()).
index
(
index
=
list
(
subgraph_id2layers
.
keys
()).
index
(
layer_id
)
layer_id
)
for
key
in
list
(
subgraph_id2layers
.
keys
())[
for
key
in
list
(
subgraph_id2layers
.
keys
())[
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录