Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
s920243400
PaddleDetection
提交
c3236f82
P
PaddleDetection
项目概览
s920243400
/
PaddleDetection
与 Fork 源项目一致
Fork自
PaddlePaddle / PaddleDetection
通知
2
Star
0
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleDetection
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
c3236f82
编写于
12月 02, 2018
作者:
X
Xin Pan
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
polish
上级
e5d64fd4
变更
1
隐藏空白更改
内联
并排
Showing
1 changed file
with
5 addition
and
15 deletion
+5
-15
paddle/fluid/imperative/layer.cc
paddle/fluid/imperative/layer.cc
+5
-15
未找到文件。
paddle/fluid/imperative/layer.cc
浏览文件 @
c3236f82
...
@@ -44,16 +44,12 @@ class Autograd {
...
@@ -44,16 +44,12 @@ class Autograd {
public:
public:
explicit
Autograd
(
framework
::
Scope
*
scope
)
:
scope_
(
scope
)
{}
explicit
Autograd
(
framework
::
Scope
*
scope
)
:
scope_
(
scope
)
{}
void
RunBackward
(
VarBase
*
var
,
framework
::
Variable
*
grad
)
{
void
RunBackward
(
VarBase
*
var
)
{
if
(
!
var
->
pre_op_
)
{
var
->
ApplyGrad
(
scope_
,
grad
);
return
;
}
PADDLE_ENFORCE
(
var
->
pre_op_
->
op_desc_
);
PADDLE_ENFORCE
(
var
->
pre_op_
->
op_desc_
);
// TODO(panyx0718): Only create vars that "require_grad"
// TODO(panyx0718): Only create vars that "require_grad"
std
::
vector
<
Variable
*>
op_grads
=
std
::
vector
<
Variable
*>
op_grads
=
CreateOpGrads
(
var
->
pre_op_
->
output_vars_
->
size
());
CreateOpGrads
(
var
->
pre_op_
->
output_vars_
->
size
());
op_grads
[
var
->
pre_op_out_idx_
]
=
grad
;
op_grads
[
var
->
pre_op_out_idx_
]
=
var
->
grads_
;
std
::
deque
<
std
::
pair
<
OpBase
*
,
std
::
vector
<
Variable
*>>>
ready
;
std
::
deque
<
std
::
pair
<
OpBase
*
,
std
::
vector
<
Variable
*>>>
ready
;
ready
.
push_back
(
std
::
make_pair
(
var
->
pre_op_
,
op_grads
));
ready
.
push_back
(
std
::
make_pair
(
var
->
pre_op_
,
op_grads
));
...
@@ -238,8 +234,6 @@ std::vector<Variable*> OpBase::ApplyGrad(framework::Scope* scope) {
...
@@ -238,8 +234,6 @@ std::vector<Variable*> OpBase::ApplyGrad(framework::Scope* scope) {
framework
::
Variable
*
var
=
scope
->
FindVar
(
outvar
);
framework
::
Variable
*
var
=
scope
->
FindVar
(
outvar
);
LOG
(
ERROR
)
<<
"apply grad "
<<
outvar
<<
" with origin "
LOG
(
ERROR
)
<<
"apply grad "
<<
outvar
<<
" with origin "
<<
origin_var
;
<<
origin_var
;
// TODO(panyx0718): Accumulate.
// origin_in_var->grads_ = var;
origin_in_var
->
ApplyGrad
(
scope
,
var
);
origin_in_var
->
ApplyGrad
(
scope
,
var
);
ret
[
i
]
=
var
;
ret
[
i
]
=
var
;
// TODO(panyx0718): There might be 2 var with the same name. We
// TODO(panyx0718): There might be 2 var with the same name. We
...
@@ -254,15 +248,11 @@ std::vector<Variable*> OpBase::ApplyGrad(framework::Scope* scope) {
...
@@ -254,15 +248,11 @@ std::vector<Variable*> OpBase::ApplyGrad(framework::Scope* scope) {
}
}
void
VarBase
::
RunBackward
(
framework
::
Scope
*
scope
)
{
void
VarBase
::
RunBackward
(
framework
::
Scope
*
scope
)
{
// TODO(panyx0718): Might not be 0th, need to detect.
grads_
=
CreateVariable
(
framework
::
GradVarName
(
var_desc_
->
Name
()),
grads_
=
CreateVariable
(
pre_op_
->
grad_op_desc_
->
InputArgumentNames
()[
0
],
var_
->
Get
<
framework
::
LoDTensor
>
().
dims
(),
1.0
,
scope
,
var_
->
Get
<
framework
::
LoDTensor
>
().
dims
(),
1.0
,
scope
,
false
);
false
);
framework
::
Variable
*
grad
=
if
(
!
pre_op_
)
return
;
CreateVariable
(
"init@imperative_grad"
,
Autograd
(
scope
).
RunBackward
(
this
);
var_
->
Get
<
framework
::
LoDTensor
>
().
dims
(),
1.0
,
scope
);
Autograd
(
scope
).
RunBackward
(
this
,
grad
);
}
}
}
// namespace imperative
}
// namespace imperative
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录