Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
Crayon鑫
Paddle
提交
61a7df2e
P
Paddle
项目概览
Crayon鑫
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1
Issue
1
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
61a7df2e
编写于
12月 20, 2017
作者:
F
fengjiayi
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
update
上级
590e6111
变更
2
显示空白变更内容
内联
并排
Showing
2 changed file
with
35 addition
and
13 deletion
+35
-13
paddle/pybind/protobuf.cc
paddle/pybind/protobuf.cc
+4
-1
python/paddle/v2/fluid/backward.py
python/paddle/v2/fluid/backward.py
+31
-12
未找到文件。
paddle/pybind/protobuf.cc
浏览文件 @
61a7df2e
...
...
@@ -157,7 +157,10 @@ void BindBlockDesc(py::module &m) {
.
def_property_readonly
(
"parent"
,
&
BlockDescBind
::
Parent
)
.
def
(
"append_op"
,
&
BlockDescBind
::
AppendOp
,
py
::
return_value_policy
::
reference
)
.
def
(
"append_allocated_op"
,
&
BlockDescBind
::
AppendAllocatedOp
)
.
def
(
"append_allocated_op"
,
[](
BlockDescBind
&
self
,
OpDescBind
*
op_desc
)
{
self
.
AppendAllocatedOp
(
std
::
unique_ptr
<
OpDescBind
>
(
op_desc
));
})
.
def
(
"prepend_op"
,
&
BlockDescBind
::
PrependOp
,
py
::
return_value_policy
::
reference
)
.
def
(
"var"
,
...
...
python/paddle/v2/fluid/backward.py
浏览文件 @
61a7df2e
from
paddle.v2.fluid
import
framework
as
framework
from
.
import
core
import
collections
import
pdb
__all__
=
[
'append_backward_ops'
]
...
...
@@ -15,7 +16,8 @@ def rename_arg(op_desc_list, old_name, new_name, begin_idx=None, end_idx=None):
op_desc_list
[
i
].
rename_output
(
old_name
,
new_name
)
def
backward_impl
(
block
,
def
backward_impl
(
target
,
block
,
target_block
,
no_grad_set
,
grad_info_map
,
...
...
@@ -29,8 +31,8 @@ def backward_impl(block,
sub_block_idx
=
each_op
.
block_attr
(
"sub_block"
)
sub_block
=
program
.
block
(
sub_block_idx
)
grad_sub_block
=
program
.
create_block
(
parent_idx
=
sub_block_idx
)
backward_impl
(
sub_block
,
grad_sub_block
,
no_grad_set
,
grad_info_map
,
callback
)
backward_impl
(
target
,
sub_block
,
grad_sub_block
,
no_grad_set
,
grad_info_map
,
callback
)
grad_sub_block_list
.
append
(
grad_sub_block
)
grad_op_desc
=
core
.
get_grad_op_desc
(
each_op
.
desc
,
no_grad_set
[
block
.
idx
],
...
...
@@ -46,6 +48,7 @@ def backward_impl(block,
for
pos
,
op_desc
in
enumerate
(
grad_op_descs
):
for
var_name
in
op_desc
.
input_arg_names
():
if
len
(
var_inputs
[
var_name
])
>
1
:
pdb
.
set_trace
()
pending_sum_ops
.
append
((
core
.
OpDesc
(
type
=
"sum_op"
,
inputs
=
var_inputs
[
var_name
],
...
...
@@ -55,7 +58,7 @@ def backward_impl(block,
for
var_name
in
op_desc
.
output_arg_names
():
if
len
(
var_inputs
[
var_name
])
==
0
:
# it's the first time we get the variable
var_inputs
[
var_name
]
=
var_name
var_inputs
[
var_name
]
=
[
var_name
]
else
:
if
len
(
var_inputs
[
var_name
]
==
1
):
new_name
=
var_name
+
"@RENAME@"
+
\
...
...
@@ -73,8 +76,9 @@ def backward_impl(block,
var_inputs
[
var_name
].
append
(
new_name
)
for
var_name
,
inputs
in
var_inputs
.
iteritems
():
if
len
(
inputs
)
>
1
:
pending_sum_ops
.
append
((
core
.
OpDesc
(
type
=
"sum_op"
,
inputs
=
inputs
,
outputs
=
var_name
,
attrs
=
{}),
pdb
.
set_trace
()
pending_sum_ops
.
append
((
core
.
OpDesc
(
"sum_op"
,
{
"X"
:
inputs
},
{
"Out"
:
var_name
},
{}),
len
(
grad_op_descs
)))
# TODO: remove op in no grad set
...
...
@@ -84,6 +88,7 @@ def backward_impl(block,
# create new gradient variables in the target block desc
for
op_desc
in
grad_op_descs
:
for
grad_var_name
in
op_desc
.
output_arg_names
():
grad_var_name
=
grad_var_name
.
encode
(
"ascii"
)
if
target_block
.
desc
.
has_var
(
grad_var_name
)
or
grad_var_name
==
core
.
get_empty_var_name
(
):
...
...
@@ -93,6 +98,16 @@ def backward_impl(block,
continue
grad_info_map
[
grad_to_var
[
grad_var_name
]]
=
(
grad_var_name
,
target_block
)
if
target_block
.
idx
==
0
:
grad_target_name
=
(
target
.
name
+
"@GRAD"
)
target_block
.
desc
.
var
(
grad_target_name
)
grad_op_descs
.
insert
(
0
,
core
.
OpDesc
(
u
"fill_constant"
,
{},
{
u
"Out"
:
[
unicode
(
grad_target_name
,
"ascii"
)]
},
{
u
"shape"
:
(
1
),
u
"value"
:
1.0
,
u
"dtype"
:
core
.
DataType
.
FP32
}))
# insert backward operators to target_block
for
op_desc
in
grad_op_descs
:
target_block
.
desc
.
append_allocated_op
(
op_desc
)
...
...
@@ -118,18 +133,22 @@ def append_backward_ops(loss, parameter_list=None, no_grad_set=None):
assert
isinstance
(
loss
,
framework
.
Variable
)
if
no_grad_set
is
None
:
no_grad_set
=
dict
()
program
=
loss
.
block
.
program
assert
isinstance
(
program
,
framework
.
Program
)
no_grad_set
=
list
()
for
block
in
program
.
blocks
:
assert
isinstance
(
block
,
framework
.
Block
)
block_no_grad_set
=
set
()
for
var
in
block
.
vars
.
itervalues
():
assert
isinstance
(
var
,
framework
.
Variable
)
if
var
.
stop_gradient
:
no_grad_set
.
appen
d
(
var
.
name
)
no_grad_set
=
set
(
no_grad_set
)
block_no_grad_set
.
ad
d
(
var
.
name
)
no_grad_set
[
block
.
idx
]
=
block_no_grad_set
param_grad_map
=
loss
.
block
.
program
.
append_backward
(
loss
,
no_grad_set
)
grad_info_map
=
dict
()
root_block
=
loss
.
block
.
program
.
block
(
0
)
backward_impl
(
loss
,
root_block
,
root_block
,
no_grad_set
,
grad_info_map
)
pdb
.
set_trace
()
if
parameter_list
is
not
None
:
parameters
=
parameter_list
else
:
...
...
@@ -137,9 +156,9 @@ def append_backward_ops(loss, parameter_list=None, no_grad_set=None):
parameters
=
[
param
.
name
for
param
in
params
]
params_and_grads
=
[]
for
param
in
parameters
:
if
param
not
in
param_grad
_map
:
if
param
not
in
grad_info
_map
:
raise
ValueError
(
"param %s is not in map"
%
param
)
grad_info
=
param_grad
_map
[
param
]
grad_info
=
grad_info
_map
[
param
]
grad_block
=
loss
.
block
.
program
.
block
(
grad_info
[
1
])
if
not
grad_block
.
has_var
(
grad_info
[
0
]):
raise
ValueError
(
"grad block[{0}] did not have grad var {1}"
.
format
(
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录