Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
机器未来
Paddle
提交
8f962f74
P
Paddle
项目概览
机器未来
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
8f962f74
编写于
1月 09, 2018
作者:
F
fengjiayi
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Update
上级
32b09b51
变更
1
显示空白变更内容
内联
并排
Showing
1 changed file
with
42 addition
and
37 deletion
+42
-37
paddle/operators/while_op.cc
paddle/operators/while_op.cc
+42
-37
未找到文件。
paddle/operators/while_op.cc
浏览文件 @
8f962f74
...
@@ -211,59 +211,64 @@ class WhileGradOpDescMaker : public framework::SingleGradOpDescMaker {
...
@@ -211,59 +211,64 @@ class WhileGradOpDescMaker : public framework::SingleGradOpDescMaker {
protected:
protected:
std
::
unique_ptr
<
framework
::
OpDesc
>
Apply
()
const
override
{
std
::
unique_ptr
<
framework
::
OpDesc
>
Apply
()
const
override
{
auto
*
grad
=
new
framework
::
OpDesc
();
auto
*
while_grad
=
new
framework
::
OpDesc
();
grad
->
SetType
(
"while_grad"
);
while_grad
->
SetType
(
"while_grad"
);
grad
->
SetInput
(
kX
,
Input
(
kX
));
while_grad
->
SetInput
(
kX
,
Input
(
kX
));
while_grad
->
SetInput
(
kOutputs
,
Output
(
kOutputs
));
while_grad
->
SetInput
(
kStepScopes
,
Output
(
kStepScopes
));
auto
*
grad_block
=
this
->
grad_block_
[
0
];
auto
*
fwd_block
=
grad_block
->
ParentBlock
();
// auto *parent_block = fwd_block->ParentBlock();
// Not all of IGs will be generated by inner gradient operators of while op.
// Not all of IGs will be generated by inner gradient operators of while op.
// Ignore IGs that is not generated by the inside block.
// Ignore IGs that is not generated by the inside block.
auto
igs
=
InputGrad
(
kX
,
/*do not drop empty gradient*/
false
)
;
std
::
unordered_set
<
std
::
string
>
inner_op_outputs
;
std
::
unordered_set
<
std
::
string
>
all_outs
;
LOG
(
INFO
)
<<
"FUCK1"
;
for
(
size_t
i
=
0
;
i
<
grad_block_
[
0
]
->
OpSize
();
++
i
)
{
for
(
const
auto
*
op
:
grad_block
->
AllOps
()
)
{
for
(
auto
&
oname
:
grad_block_
[
0
]
->
Op
(
i
)
->
OutputArgumentNames
())
{
for
(
auto
&
oname
:
op
->
OutputArgumentNames
())
{
all_o
uts
.
insert
(
oname
);
inner_op_outp
uts
.
insert
(
oname
);
}
}
}
}
LOG
(
INFO
)
<<
"FUCK2"
;
auto
igs
=
InputGrad
(
kX
,
/*do not drop empty gradient*/
false
);
for
(
auto
&
each_ig
:
igs
)
{
for
(
auto
&
each_ig
:
igs
)
{
if
(
all_outs
.
find
(
each_ig
)
==
all_o
uts
.
end
())
{
if
(
inner_op_outputs
.
find
(
each_ig
)
==
inner_op_outp
uts
.
end
())
{
VLOG
(
10
)
<<
"Ignore "
<<
each_ig
;
VLOG
(
10
)
<<
"Ignore "
<<
each_ig
;
each_ig
=
framework
::
kEmptyVarName
;
each_ig
=
framework
::
kEmptyVarName
;
}
}
}
}
while_grad
->
SetOutput
(
framework
::
GradVarName
(
kX
),
igs
);
grad
->
SetOutput
(
framework
::
GradVarName
(
kX
),
igs
);
grad
->
SetInput
(
kOutputs
,
Output
(
kOutputs
));
// OG should be re-calculated by step blocks, since many outputs of while op
// OG should be re-calculated by step blocks, since many outputs of while op
// do not need to calculate gradients.
// do not need to calculate gradients.
std
::
unordered_set
<
std
::
string
>
block_ins
;
std
::
unordered_set
<
std
::
string
>
block_ins
;
auto
*
fwd_block
=
this
->
grad_block_
[
0
]
->
ParentBlock
();
std
::
copy
(
Input
(
kX
).
begin
(),
Input
(
kX
).
end
(),
{
std
::
inserter
(
block_ins
,
block_ins
.
end
()));
for
(
auto
&
p
:
Input
(
kX
))
{
std
::
copy
(
Output
(
kOutputs
).
begin
(),
Output
(
kOutputs
).
end
(),
block_ins
.
insert
(
p
);
std
::
inserter
(
block_ins
,
block_ins
.
end
()));
}
for
(
auto
&
o
:
Output
(
kOutputs
))
{
block_ins
.
insert
(
o
);
}
}
std
::
unordered_set
<
std
::
string
>
extra_inputs
;
std
::
unordered_set
<
std
::
string
>
extra_inputs
;
for
(
size_t
i
=
0
;
i
<
grad_block_
[
0
]
->
OpSize
();
++
i
)
{
for
(
const
auto
*
op
:
grad_block
->
AllOps
())
{
for
(
auto
&
input_name
:
grad_block_
[
0
]
->
Op
(
i
)
->
InputArgumentNames
())
{
for
(
auto
&
input_name
:
op
->
InputArgumentNames
())
{
if
(
block_ins
.
find
(
input_name
)
!=
block_ins
.
end
())
{
// If the input of Op has been recorded or is generated by the forward
// block, do not make it as input again.
if
(
block_ins
.
find
(
input_name
)
!=
block_ins
.
end
()
||
fwd_block
->
FindVar
(
input_name
)
!=
nullptr
)
{
continue
;
continue
;
}
}
// If the input of Op is generated by the forward block, do not make it
/*
// as input again.
if (parent_block->FindVarRecursive(input_name) == nullptr) {
if
(
fwd_block
->
FindVar
(
input_name
)
!=
nullptr
)
{
VLOG(5) << "WARNING! Variable '" << input_name
<< "' is the input of '" << op->Type()
<< "'. But can not be found in any block.";
continue;
continue;
}
}
*/
extra_inputs
.
insert
(
input_name
);
extra_inputs
.
insert
(
input_name
);
}
}
for
(
auto
&
output_name
:
op
->
OutputArgumentNames
())
{
for
(
auto
&
output_name
:
grad_block_
[
0
]
->
Op
(
i
)
->
OutputArgumentNames
())
{
block_ins
.
insert
(
output_name
);
block_ins
.
insert
(
output_name
);
}
}
}
}
...
@@ -272,15 +277,15 @@ class WhileGradOpDescMaker : public framework::SingleGradOpDescMaker {
...
@@ -272,15 +277,15 @@ class WhileGradOpDescMaker : public framework::SingleGradOpDescMaker {
extra_inputs_list
.
resize
(
extra_inputs
.
size
());
extra_inputs_list
.
resize
(
extra_inputs
.
size
());
std
::
copy
(
extra_inputs
.
begin
(),
extra_inputs
.
end
(),
std
::
copy
(
extra_inputs
.
begin
(),
extra_inputs
.
end
(),
extra_inputs_list
.
begin
());
extra_inputs_list
.
begin
());
grad
->
SetInput
(
framework
::
GradVarName
(
kOutputs
),
extra_inputs_list
);
while_
grad
->
SetInput
(
framework
::
GradVarName
(
kOutputs
),
extra_inputs_list
);
grad
->
SetInput
(
kStepScopes
,
Output
(
kStepScopes
));
grad
->
SetAttrMap
(
this
->
Attrs
());
while_
grad
->
SetAttrMap
(
this
->
Attrs
());
grad
->
SetBlockAttr
(
kStepBlock
,
*
grad_block_
[
0
]
);
while_grad
->
SetBlockAttr
(
kStepBlock
,
*
grad_block
);
// record the original output gradient names, since the gradient name of
// record the original output gradient names, since the gradient name of
// while operator could be renamed.
// while operator could be renamed.
grad
->
SetAttr
(
"original_output_grad"
,
extra_inputs_list
);
while_
grad
->
SetAttr
(
"original_output_grad"
,
extra_inputs_list
);
return
std
::
unique_ptr
<
framework
::
OpDesc
>
(
grad
);
return
std
::
unique_ptr
<
framework
::
OpDesc
>
(
while_
grad
);
}
}
};
};
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录