Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
s920243400
PaddleDetection
提交
b7669541
P
PaddleDetection
项目概览
s920243400
/
PaddleDetection
与 Fork 源项目一致
Fork自
PaddlePaddle / PaddleDetection
通知
2
Star
0
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleDetection
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
b7669541
编写于
1月 07, 2019
作者:
M
minqiyang
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Polish log
test=develop
上级
1bfbc0d9
变更
1
隐藏空白更改
内联
并排
Showing
1 changed file
with
14 addition
and
16 deletion
+14
-16
paddle/fluid/framework/ir/lock_free_optimize_pass.cc
paddle/fluid/framework/ir/lock_free_optimize_pass.cc
+14
-16
未找到文件。
paddle/fluid/framework/ir/lock_free_optimize_pass.cc
浏览文件 @
b7669541
...
@@ -80,7 +80,7 @@ std::unique_ptr<ir::Graph> LockFreeOptimizePass::ApplyImpl(
...
@@ -80,7 +80,7 @@ std::unique_ptr<ir::Graph> LockFreeOptimizePass::ApplyImpl(
if
(
IsVarNameEndsWith
(
merged_grad_var
,
kGradVarSuffix
)
&&
if
(
IsVarNameEndsWith
(
merged_grad_var
,
kGradVarSuffix
)
&&
merged_grad_var
->
outputs
.
size
()
==
1u
)
{
merged_grad_var
->
outputs
.
size
()
==
1u
)
{
ir
::
Node
*
opt_node
=
merged_grad_var
->
outputs
[
0
];
ir
::
Node
*
opt_node
=
merged_grad_var
->
outputs
[
0
];
LOG
(
ERROR
)
<<
"Found opt node "
<<
opt_node
->
Name
();
VLOG
(
3
)
<<
"Found opt node "
<<
opt_node
->
Name
();
// find the backward op connected with sum op
// find the backward op connected with sum op
for
(
ir
::
Node
*
unmerged_grad_var
:
node
->
inputs
)
{
for
(
ir
::
Node
*
unmerged_grad_var
:
node
->
inputs
)
{
...
@@ -88,13 +88,13 @@ std::unique_ptr<ir::Graph> LockFreeOptimizePass::ApplyImpl(
...
@@ -88,13 +88,13 @@ std::unique_ptr<ir::Graph> LockFreeOptimizePass::ApplyImpl(
unmerged_grad_var
->
inputs
.
size
()
==
1u
)
{
unmerged_grad_var
->
inputs
.
size
()
==
1u
)
{
ir
::
Node
*
backward_op
=
unmerged_grad_var
->
inputs
[
0
];
ir
::
Node
*
backward_op
=
unmerged_grad_var
->
inputs
[
0
];
LOG
(
ERROR
)
<<
"Found backward_op "
<<
backward_op
->
Name
();
VLOG
(
3
)
<<
"Found backward_op "
<<
backward_op
->
Name
();
// find the forward op related to the backward op
// find the forward op related to the backward op
ir
::
Node
*
forward_op
=
ir
::
Node
*
forward_op
=
FindForwardOpViaBackwardOp
(
graph
.
get
(),
backward_op
);
FindForwardOpViaBackwardOp
(
graph
.
get
(),
backward_op
);
LOG
(
ERROR
)
<<
"Found forward_op "
<<
forward_op
->
Name
();
VLOG
(
3
)
<<
"Found forward_op "
<<
forward_op
->
Name
();
PADDLE_ENFORCE
(
forward_op
);
PADDLE_ENFORCE
(
forward_op
);
...
@@ -114,29 +114,28 @@ std::unique_ptr<ir::Graph> LockFreeOptimizePass::ApplyImpl(
...
@@ -114,29 +114,28 @@ std::unique_ptr<ir::Graph> LockFreeOptimizePass::ApplyImpl(
for
(
Node
*
optimize_op
:
sum_op_output
->
outputs
)
{
for
(
Node
*
optimize_op
:
sum_op_output
->
outputs
)
{
if
(
optimize_op
->
NodeType
()
==
Node
::
Type
::
kOperation
&&
if
(
optimize_op
->
NodeType
()
==
Node
::
Type
::
kOperation
&&
optimize_op
->
Name
()
==
kOptimizerType
)
{
optimize_op
->
Name
()
==
kOptimizerType
)
{
LOG
(
ERROR
)
<<
"remove optimize_op: "
<<
optimize_op
->
Name
()
<<
"_"
VLOG
(
3
)
<<
"remove optimize_op: "
<<
optimize_op
->
Name
()
<<
"_"
<<
optimize_op
->
id
();
<<
optimize_op
->
id
();
graph
->
RemoveNode
(
optimize_op
);
graph
->
RemoveNode
(
optimize_op
);
}
}
}
}
LOG
(
ERROR
)
<<
"remove sum_op_output: "
<<
sum_op_output
->
Name
()
<<
"_"
VLOG
(
3
)
<<
"remove sum_op_output: "
<<
sum_op_output
->
Name
()
<<
"_"
<<
sum_op_output
->
id
();
<<
sum_op_output
->
id
();
graph
->
RemoveNode
(
sum_op_output
);
graph
->
RemoveNode
(
sum_op_output
);
}
}
LOG
(
ERROR
)
<<
"remove sum_op: "
<<
sum_op
->
Name
()
<<
"_"
<<
sum_op
->
id
();
VLOG
(
3
)
<<
"remove sum_op: "
<<
sum_op
->
Name
()
<<
"_"
<<
sum_op
->
id
();
graph
->
RemoveNode
(
sum_op
);
graph
->
RemoveNode
(
sum_op
);
}
}
for
(
auto
*
node
:
graph
->
Nodes
())
{
for
(
auto
*
node
:
graph
->
Nodes
())
{
for
(
Node
*
output_node
:
node
->
outputs
)
{
for
(
Node
*
output_node
:
node
->
outputs
)
{
if
(
output_node
->
Name
()
==
"sgd"
)
{
if
(
output_node
->
Name
()
==
"sgd"
)
{
LOG
(
ERROR
)
<<
"Node link to SGD: "
<<
node
->
Name
()
<<
"_"
<<
node
->
id
()
VLOG
(
3
)
<<
"Node link to SGD: "
<<
node
->
Name
()
<<
"_"
<<
node
->
id
()
<<
" --> "
<<
output_node
->
Name
()
<<
"_"
<<
" --> "
<<
output_node
->
Name
()
<<
"_"
<<
output_node
->
id
();
<<
output_node
->
id
();
for
(
Node
*
input_node
:
node
->
inputs
)
{
for
(
Node
*
input_node
:
node
->
inputs
)
{
LOG
(
ERROR
)
<<
"SGD Input link: "
<<
input_node
->
Name
()
<<
"_"
VLOG
(
3
)
<<
"SGD Input link: "
<<
input_node
->
Name
()
<<
"_"
<<
input_node
->
id
()
<<
" --> "
<<
node
->
Name
()
<<
"_"
<<
input_node
->
id
()
<<
" --> "
<<
node
->
Name
()
<<
"_"
<<
node
->
id
();
<<
node
->
id
();
}
}
}
}
}
}
...
@@ -226,8 +225,7 @@ ir::Node* LockFreeOptimizePass::CreateNewSGDNode(
...
@@ -226,8 +225,7 @@ ir::Node* LockFreeOptimizePass::CreateNewSGDNode(
}
}
}
}
LOG
(
ERROR
)
<<
"Create new opt node"
<<
sgd_node
->
Name
()
<<
"_"
VLOG
(
3
)
<<
"Create new opt node"
<<
sgd_node
->
Name
()
<<
"_"
<<
sgd_node
->
id
();
<<
sgd_node
->
id
();
return
sgd_node
;
return
sgd_node
;
}
}
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录