Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
BaiXuePrincess
Paddle
提交
a15a3fc3
P
Paddle
项目概览
BaiXuePrincess
/
Paddle
与 Fork 源项目一致
Fork自
PaddlePaddle / Paddle
通知
1
Star
1
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
a15a3fc3
编写于
2月 23, 2019
作者:
M
minqiyang
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Polish code
test=develop
上级
9dc64edf
变更
6
隐藏空白更改
内联
并排
Showing
6 changed file
with
31 addition
and
36 deletion
+31
-36
paddle/fluid/framework/block_desc.cc
paddle/fluid/framework/block_desc.cc
+1
-1
paddle/fluid/framework/block_desc.h
paddle/fluid/framework/block_desc.h
+1
-1
paddle/fluid/imperative/layer.cc
paddle/fluid/imperative/layer.cc
+0
-27
paddle/fluid/imperative/layer.h
paddle/fluid/imperative/layer.h
+25
-2
paddle/fluid/imperative/tracer.cc
paddle/fluid/imperative/tracer.cc
+3
-3
paddle/fluid/pybind/protobuf.cc
paddle/fluid/pybind/protobuf.cc
+1
-2
未找到文件。
paddle/fluid/framework/block_desc.cc
浏览文件 @
a15a3fc3
...
...
@@ -163,7 +163,7 @@ std::vector<OpDesc *> BlockDesc::AllOps() const {
return
res
;
}
void
BlockDesc
::
Clear
Block
()
{
void
BlockDesc
::
Clear
()
{
// clear all ops
ops_
.
clear
();
...
...
paddle/fluid/framework/block_desc.h
浏览文件 @
a15a3fc3
...
...
@@ -97,7 +97,7 @@ class BlockDesc {
std
::
vector
<
OpDesc
*>
AllOps
()
const
;
void
Clear
Block
();
void
Clear
();
size_t
OpSize
()
const
{
return
ops_
.
size
();
}
...
...
paddle/fluid/imperative/layer.cc
浏览文件 @
a15a3fc3
...
...
@@ -205,33 +205,6 @@ framework::LoDTensor& VarBase::GradValue() {
return
*
(
grads_
->
var_
->
GetMutable
<
framework
::
LoDTensor
>
());
}
void
VarBase
::
ClearGradient
()
{
VLOG
(
1
)
<<
"clear gradient of "
<<
var_desc_
->
Name
();
if
(
grads_
&&
grads_
->
var_
&&
grads_
->
var_
->
IsInitialized
())
{
auto
grads_t
=
grads_
->
var_
->
GetMutable
<
framework
::
LoDTensor
>
();
operators
::
math
::
set_constant
(
*
(
platform
::
DeviceContextPool
::
Instance
().
Get
(
grads_
->
var_
->
Get
<
framework
::
LoDTensor
>
().
place
())),
grads_t
,
0.0
);
}
}
void
VarBase
::
RunBackward
()
{
if
(
!
pre_op_
)
return
;
VLOG
(
3
)
<<
"start backward"
;
auto
grads_t
=
grads_
->
var_
->
GetMutable
<
framework
::
LoDTensor
>
();
operators
::
math
::
set_constant
(
*
(
platform
::
DeviceContextPool
::
Instance
().
Get
(
var_
->
GetMutable
<
framework
::
LoDTensor
>
()
->
place
())),
grads_t
,
1.0
);
PADDLE_ENFORCE
(
grads_
==
pre_op_
->
output_vars_
[
pre_op_out_name_
][
pre_op_out_idx_
]
->
grads_
);
Autograd
().
RunBackward
(
this
);
}
std
::
map
<
std
::
string
,
std
::
vector
<
VarBase
*>>
OpBase
::
ApplyGrad
()
{
if
(
grad_op_descs_
.
empty
()
&&
backward_id_
<=
0
)
{
VLOG
(
3
)
<<
"op with no grad: "
<<
op_desc_
->
Type
();
...
...
paddle/fluid/imperative/layer.h
浏览文件 @
a15a3fc3
...
...
@@ -150,9 +150,32 @@ class VarBase {
}
}
void
RunBackward
();
void
RunBackward
()
{
if
(
!
pre_op_
)
return
;
void
ClearGradient
();
VLOG
(
3
)
<<
"start backward"
;
auto
grads_t
=
grads_
->
var_
->
GetMutable
<
framework
::
LoDTensor
>
();
operators
::
math
::
set_constant
(
*
(
platform
::
DeviceContextPool
::
Instance
().
Get
(
var_
->
GetMutable
<
framework
::
LoDTensor
>
()
->
place
())),
grads_t
,
1.0
);
PADDLE_ENFORCE
(
grads_
==
pre_op_
->
output_vars_
[
pre_op_out_name_
][
pre_op_out_idx_
]
->
grads_
);
Autograd
().
RunBackward
(
this
);
}
void
ClearGradient
()
{
VLOG
(
1
)
<<
"clear gradient of "
<<
var_desc_
->
Name
();
if
(
grads_
&&
grads_
->
var_
&&
grads_
->
var_
->
IsInitialized
())
{
auto
grads_t
=
grads_
->
var_
->
GetMutable
<
framework
::
LoDTensor
>
();
operators
::
math
::
set_constant
(
*
(
platform
::
DeviceContextPool
::
Instance
().
Get
(
grads_
->
var_
->
Get
<
framework
::
LoDTensor
>
().
place
())),
grads_t
,
0.0
);
}
}
framework
::
LoDTensor
&
GradValue
();
...
...
paddle/fluid/imperative/tracer.cc
浏览文件 @
a15a3fc3
...
...
@@ -145,7 +145,7 @@ std::set<std::string> Tracer::Trace(OpBase* op, const VarBasePtrMap& inputs,
prepared_op
.
func
(
framework
::
ExecutionContext
(
prepared_op
.
op
,
scope
,
*
prepared_op
.
dev_ctx
,
prepared_op
.
ctx
));
std
::
set
<
std
::
string
>
grad_deps_var
;
std
::
set
<
std
::
string
>
vars_saved_for_backward
;
if
(
!
stop_gradient
)
{
std
::
unique_ptr
<
std
::
unordered_map
<
std
::
string
,
std
::
string
>>
grad_to_var
(
...
...
@@ -166,7 +166,7 @@ std::set<std::string> Tracer::Trace(OpBase* op, const VarBasePtrMap& inputs,
PADDLE_ENFORCE
(
fwd_var_it
!=
vars
.
end
());
// Forward inputs or outputs.
grad_in_vars
.
push_back
(
fwd_var_it
->
second
->
var_
);
grad_deps_var
.
insert
(
it
.
first
);
vars_saved_for_backward
.
insert
(
it
.
first
);
}
else
{
VarBase
*
var
=
vars
[
var_it
->
second
];
if
(
!
var
->
grads_
->
var_
->
IsInitialized
())
{
...
...
@@ -200,7 +200,7 @@ std::set<std::string> Tracer::Trace(OpBase* op, const VarBasePtrMap& inputs,
}
op
->
block_
=
block
;
return
grad_deps_var
;
return
vars_saved_for_backward
;
}
std
::
vector
<
VarBase
*>
Tracer
::
PyTrace
(
OpBase
*
op
,
...
...
paddle/fluid/pybind/protobuf.cc
浏览文件 @
a15a3fc3
...
...
@@ -189,8 +189,7 @@ void BindBlockDesc(pybind11::module *m) {
return
self
.
HasVar
(
name
);
},
pybind11
::
return_value_policy
::
reference
)
.
def
(
"_clear_block"
,
[](
pd
::
BlockDesc
&
self
)
{
return
self
.
ClearBlock
();
},
.
def
(
"_clear_block"
,
[](
pd
::
BlockDesc
&
self
)
{
return
self
.
Clear
();
},
pybind11
::
return_value_policy
::
reference
)
.
def
(
"_rename_var"
,
[](
pd
::
BlockDesc
&
self
,
const
pybind11
::
bytes
&
byte_name
,
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录