Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
PaddlePaddle
Paddle
提交
edb22c2f
P
Paddle
项目概览
PaddlePaddle
/
Paddle
大约 1 年 前同步成功
通知
2299
Star
20931
Fork
5422
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
1423
列表
看板
标记
里程碑
合并请求
543
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
Paddle
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
1,423
Issue
1,423
列表
看板
标记
里程碑
合并请求
543
合并请求
543
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
edb22c2f
编写于
11月 10, 2017
作者:
Y
Yu Yang
提交者:
Yang Yang(Tony)
11月 10, 2017
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
Add Scope::Rename (#5534)
it is useful in gradient phase of an operator with block
上级
2378679a
变更
3
隐藏空白更改
内联
并排
Showing
3 changed file
with
34 addition
and
17 deletion
+34
-17
paddle/framework/scope.cc
paddle/framework/scope.cc
+18
-0
paddle/framework/scope.h
paddle/framework/scope.h
+8
-1
paddle/operators/recurrent_op.cc
paddle/operators/recurrent_op.cc
+8
-16
未找到文件。
paddle/framework/scope.cc
浏览文件 @
edb22c2f
...
@@ -98,5 +98,23 @@ void Scope::DeleteScope(Scope* scope) {
...
@@ -98,5 +98,23 @@ void Scope::DeleteScope(Scope* scope) {
delete
scope
;
delete
scope
;
}
}
void
Scope
::
Rename
(
const
std
::
string
&
origin_name
,
const
std
::
string
&
new_name
)
const
{
auto
origin_it
=
vars_
.
find
(
origin_name
);
PADDLE_ENFORCE
(
origin_it
!=
vars_
.
end
(),
"Cannot find original variable with name %s"
,
origin_name
);
auto
new_it
=
vars_
.
find
(
new_name
);
PADDLE_ENFORCE
(
new_it
==
vars_
.
end
(),
"The variable with name %s is already in the scope"
,
new_name
);
vars_
[
new_name
]
=
origin_it
->
second
;
vars_
.
erase
(
origin_it
);
}
std
::
string
Scope
::
Rename
(
const
std
::
string
&
origin_name
)
const
{
auto
var_name
=
string
::
Sprintf
(
"%p.%d"
,
this
,
vars_
.
size
());
Rename
(
origin_name
,
var_name
);
return
var_name
;
}
}
// namespace framework
}
// namespace framework
}
// namespace paddle
}
// namespace paddle
paddle/framework/scope.h
浏览文件 @
edb22c2f
...
@@ -68,11 +68,18 @@ class Scope {
...
@@ -68,11 +68,18 @@ class Scope {
// enumerate all the variables current contains.
// enumerate all the variables current contains.
std
::
vector
<
std
::
string
>
GetAllNames
(
bool
recursive
=
false
)
const
;
std
::
vector
<
std
::
string
>
GetAllNames
(
bool
recursive
=
false
)
const
;
// Rename variable to a new name
void
Rename
(
const
std
::
string
&
origin_name
,
const
std
::
string
&
new_name
)
const
;
// Rename variable to a new name and return the new name
std
::
string
Rename
(
const
std
::
string
&
origin_name
)
const
;
private:
private:
// Call Scope::NewScope for a sub-scope.
// Call Scope::NewScope for a sub-scope.
explicit
Scope
(
Scope
const
*
parent
)
:
parent_
(
parent
)
{}
explicit
Scope
(
Scope
const
*
parent
)
:
parent_
(
parent
)
{}
std
::
unordered_map
<
std
::
string
,
Variable
*>
vars_
;
mutable
std
::
unordered_map
<
std
::
string
,
Variable
*>
vars_
;
mutable
std
::
list
<
Scope
*>
kids_
;
mutable
std
::
list
<
Scope
*>
kids_
;
Scope
const
*
parent_
{
nullptr
};
Scope
const
*
parent_
{
nullptr
};
...
...
paddle/operators/recurrent_op.cc
浏览文件 @
edb22c2f
...
@@ -387,8 +387,8 @@ class RecurrentGradOp : public RecurrentBase {
...
@@ -387,8 +387,8 @@ class RecurrentGradOp : public RecurrentBase {
auto
&
p_names
=
Inputs
(
kParameters
);
auto
&
p_names
=
Inputs
(
kParameters
);
PADDLE_ENFORCE_EQ
(
pg_names
.
size
(),
p_names
.
size
());
PADDLE_ENFORCE_EQ
(
pg_names
.
size
(),
p_names
.
size
());
for
(
size_t
p
rog_id
=
0
;
prog_id
<
pg_names
.
size
();
++
prog
_id
)
{
for
(
size_t
p
aram_id
=
0
;
param_id
<
pg_names
.
size
();
++
param
_id
)
{
auto
inside_grad_name
=
framework
::
GradVarName
(
p_names
[
p
rog
_id
]);
auto
inside_grad_name
=
framework
::
GradVarName
(
p_names
[
p
aram
_id
]);
// If does not compute gradient of that variable inside rnn, just
// If does not compute gradient of that variable inside rnn, just
// continue
// continue
...
@@ -406,27 +406,19 @@ class RecurrentGradOp : public RecurrentBase {
...
@@ -406,27 +406,19 @@ class RecurrentGradOp : public RecurrentBase {
attrs
[
"value"
]
=
0.0
f
;
attrs
[
"value"
]
=
0.0
f
;
auto
zero_op
=
framework
::
OpRegistry
::
CreateOp
(
auto
zero_op
=
framework
::
OpRegistry
::
CreateOp
(
"fill_constant"
,
{},
{{
"Out"
,
{
pg_names
[
p
rog
_id
]}}},
attrs
);
"fill_constant"
,
{},
{{
"Out"
,
{
pg_names
[
p
aram
_id
]}}},
attrs
);
zero_op
->
Run
(
scope
,
dev_ctx
);
zero_op
->
Run
(
scope
,
dev_ctx
);
}
}
auto
new_inside_name
=
cur_scope
.
Rename
(
inside_grad_name
);
// sum gradient
// sum gradient
auto
*
outside_var
=
scope
.
FindVar
(
pg_names
[
prog_id
]);
PADDLE_ENFORCE
(
outside_var
!=
nullptr
);
auto
&
outside_tensor
=
*
outside_var
->
GetMutable
<
framework
::
LoDTensor
>
();
std
::
string
result_var_name
;
auto
*
local_result_var
=
cur_scope
.
Var
(
&
result_var_name
);
auto
&
local_result_tensor
=
*
local_result_var
->
GetMutable
<
framework
::
LoDTensor
>
();
local_result_tensor
.
ShareDataWith
(
outside_tensor
);
auto
sum_op
=
framework
::
OpRegistry
::
CreateOp
(
auto
sum_op
=
framework
::
OpRegistry
::
CreateOp
(
"sum"
,
{{
"X"
,
{
result_var_name
,
inside_grad
_name
}}},
"sum"
,
{{
"X"
,
{
pg_names
[
param_id
],
new_inside
_name
}}},
{{
"Out"
,
{
result_var_name
}}},
{});
{{
"Out"
,
{
pg_names
[
param_id
]
}}},
{});
sum_op
->
Run
(
cur_scope
,
dev_ctx
);
sum_op
->
Run
(
cur_scope
,
dev_ctx
);
cur_scope
.
Rename
(
new_inside_name
,
inside_grad_name
);
}
}
}
}
VLOG
(
5
)
<<
"Accumulate Parameter finished "
;
VLOG
(
5
)
<<
"Accumulate Parameter finished "
;
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录