Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
s920243400
PaddleDetection
提交
aea5ccca
P
PaddleDetection
项目概览
s920243400
/
PaddleDetection
与 Fork 源项目一致
Fork自
PaddlePaddle / PaddleDetection
通知
2
Star
0
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
P
PaddleDetection
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
aea5ccca
编写于
12月 19, 2017
作者:
Y
Yang Yang
浏览文件
操作
浏览文件
下载
电子邮件补丁
差异文件
revise typo
上级
973aec2c
变更
2
隐藏空白更改
内联
并排
Showing
2 changed file
with
18 addition
and
18 deletion
+18
-18
paddle/operators/recurrent_op.cc
paddle/operators/recurrent_op.cc
+9
-9
paddle/operators/while_op.cc
paddle/operators/while_op.cc
+9
-9
未找到文件。
paddle/operators/recurrent_op.cc
浏览文件 @
aea5ccca
...
...
@@ -22,7 +22,7 @@ constexpr char kInputs[] = "inputs";
constexpr
char
kInitialStates
[]
=
"initial_states"
;
constexpr
char
kParameters
[]
=
"parameters"
;
constexpr
char
kOutputs
[]
=
"outputs"
;
constexpr
char
k
Parallel
Scopes
[]
=
"step_scopes"
;
constexpr
char
k
Step
Scopes
[]
=
"step_scopes"
;
constexpr
char
kExStates
[]
=
"ex_states"
;
constexpr
char
kStates
[]
=
"states"
;
constexpr
char
kStepBlock
[]
=
"sub_block"
;
...
...
@@ -234,7 +234,7 @@ class RecurrentOp : public RecurrentBase {
auto
reverse
=
Attr
<
bool
>
(
kReverse
);
framework
::
Executor
executor
(
dev_ctx
);
auto
*
block
=
Attr
<
framework
::
BlockDescBind
*>
(
k
Parallel
Block
);
auto
*
block
=
Attr
<
framework
::
BlockDescBind
*>
(
k
Step
Block
);
auto
*
program
=
block
->
Program
();
for
(
size_t
i
=
0
;
i
<
seq_len
;
++
i
)
{
...
...
@@ -295,7 +295,7 @@ class RecurrentOp : public RecurrentBase {
private:
StepScopes
CreateStepScopes
(
const
framework
::
Scope
&
scope
,
size_t
seq_len
)
const
{
auto
*
var
=
scope
.
FindVar
(
Output
(
k
Parallel
Scopes
));
auto
*
var
=
scope
.
FindVar
(
Output
(
k
Step
Scopes
));
PADDLE_ENFORCE
(
var
!=
nullptr
);
return
StepScopes
(
scope
,
var
->
GetMutable
<
StepScopeVar
>
(),
Attr
<
bool
>
(
kIsTrain
),
seq_len
);
...
...
@@ -317,7 +317,7 @@ class RecurrentGradOp : public RecurrentBase {
auto
reverse
=
Attr
<
bool
>
(
kReverse
);
framework
::
Executor
executor
(
dev_ctx
);
auto
*
block
=
Attr
<
framework
::
BlockDescBind
*>
(
k
Parallel
Block
);
auto
*
block
=
Attr
<
framework
::
BlockDescBind
*>
(
k
Step
Block
);
auto
*
program
=
block
->
Program
();
for
(
size_t
step_id
=
0
;
step_id
<
seq_len
;
++
step_id
)
{
...
...
@@ -465,7 +465,7 @@ class RecurrentGradOp : public RecurrentBase {
private:
StepScopes
CreateStepScopes
(
const
framework
::
Scope
&
scope
,
size_t
seq_len
)
const
{
auto
*
var
=
scope
.
FindVar
(
Input
(
k
Parallel
Scopes
));
auto
*
var
=
scope
.
FindVar
(
Input
(
k
Step
Scopes
));
PADDLE_ENFORCE
(
var
!=
nullptr
);
return
StepScopes
(
scope
,
var
->
GetMutable
<
StepScopeVar
>
(),
Attr
<
bool
>
(
kIsTrain
),
seq_len
,
true
/*is_backward*/
);
...
...
@@ -510,7 +510,7 @@ class RecurrentOpProtoMaker : public framework::OpProtoAndCheckerMaker {
AddOutput
(
kOutputs
,
"The output sequence of RNN. The sequence length must be same."
)
.
AsDuplicable
();
AddOutput
(
k
Parallel
Scopes
,
AddOutput
(
k
Step
Scopes
,
"StepScopes contain all local variables in each time step."
);
AddAttr
<
std
::
vector
<
std
::
string
>>
(
kExStates
,
string
::
Sprintf
(
...
...
@@ -523,7 +523,7 @@ The ex-state means the state value in the ex-timestep or the previous time step
string
::
Sprintf
(
"The state variable names. [%s, %s, %s] must be the same order"
,
kExStates
,
kStates
,
kInitStateGrads
));
AddAttr
<
framework
::
BlockDescBind
*>
(
k
Parallel
Block
,
AddAttr
<
framework
::
BlockDescBind
*>
(
k
Step
Block
,
"The step block inside RNN"
);
AddAttr
<
bool
>
(
kReverse
,
R"DOC(Calculate RNN reversely or not.
By default reverse=False
...
...
@@ -576,7 +576,7 @@ class RecurrentGradOpDescMaker : public framework::SingleGradOpDescMaker {
}
for
(
auto
&
output_param
:
this
->
OutputNames
())
{
if
(
output_param
==
k
Parallel
Scopes
)
{
if
(
output_param
==
k
Step
Scopes
)
{
grad
->
SetInput
(
output_param
,
this
->
Output
(
output_param
));
grad
->
SetInput
(
framework
::
GradVarName
(
output_param
),
this
->
Output
(
output_param
));
...
...
@@ -587,7 +587,7 @@ class RecurrentGradOpDescMaker : public framework::SingleGradOpDescMaker {
}
}
grad
->
SetAttrMap
(
this
->
Attrs
());
grad
->
SetBlockAttr
(
k
Parallel
Block
,
*
grad_block_
[
0
]);
grad
->
SetBlockAttr
(
k
Step
Block
,
*
grad_block_
[
0
]);
return
std
::
unique_ptr
<
framework
::
OpDescBind
>
(
grad
);
}
...
...
paddle/operators/while_op.cc
浏览文件 @
aea5ccca
...
...
@@ -27,7 +27,7 @@ using LoDTensor = framework::LoDTensor;
constexpr
char
kStepBlock
[]
=
"sub_block"
;
constexpr
char
kCondition
[]
=
"Condition"
;
constexpr
char
k
Parallel
Scopes
[]
=
"StepScopes"
;
constexpr
char
k
Step
Scopes
[]
=
"StepScopes"
;
constexpr
char
kParameters
[]
=
"X"
;
constexpr
char
kParamGrads
[]
=
"X@GRAD"
;
constexpr
char
kOutputs
[]
=
"Out"
;
...
...
@@ -46,11 +46,11 @@ class WhileOp : public framework::OperatorBase {
PADDLE_ENFORCE_EQ
(
cond
.
dims
(),
paddle
::
framework
::
make_ddim
({
1
}));
framework
::
Executor
executor
(
dev_ctx
);
auto
*
block
=
Attr
<
framework
::
BlockDescBind
*>
(
k
Parallel
Block
);
auto
*
block
=
Attr
<
framework
::
BlockDescBind
*>
(
k
Step
Block
);
auto
*
program
=
block
->
Program
();
auto
step_scopes
=
scope
.
FindVar
(
Output
(
k
Parallel
Scopes
))
->
GetMutable
<
StepScopeVar
>
();
scope
.
FindVar
(
Output
(
k
Step
Scopes
))
->
GetMutable
<
StepScopeVar
>
();
while
(
cond
.
data
<
bool
>
()[
0
])
{
auto
&
current_scope
=
scope
.
NewScope
();
...
...
@@ -78,11 +78,11 @@ class WhileOpMaker : public framework::OpProtoAndCheckerMaker {
"A set of variables, which will be assigned with values "
"generated by the operators inside the block of While Op."
)
.
AsDuplicable
();
AddOutput
(
k
Parallel
Scopes
,
AddOutput
(
k
Step
Scopes
,
"(StepScopeVar) A vector of local scope, which size equals the "
"step number of While Op. The i'th scope storages temporary "
"variables generated in the i'th step."
);
AddAttr
<
framework
::
BlockDescBind
*>
(
k
Parallel
Block
,
AddAttr
<
framework
::
BlockDescBind
*>
(
k
Step
Block
,
"The step block inside WhileOp"
);
AddComment
(
R"DOC(
)DOC"
);
...
...
@@ -99,11 +99,11 @@ class WhileGradOp : public framework::OperatorBase {
void
Run
(
const
framework
::
Scope
&
scope
,
const
platform
::
DeviceContext
&
dev_ctx
)
const
override
{
framework
::
Executor
executor
(
dev_ctx
);
auto
*
block
=
Attr
<
framework
::
BlockDescBind
*>
(
k
Parallel
Block
);
auto
*
block
=
Attr
<
framework
::
BlockDescBind
*>
(
k
Step
Block
);
auto
*
program
=
block
->
Program
();
auto
*
step_scopes
=
scope
.
FindVar
(
Input
(
k
Parallel
Scopes
))
->
GetMutable
<
StepScopeVar
>
();
scope
.
FindVar
(
Input
(
k
Step
Scopes
))
->
GetMutable
<
StepScopeVar
>
();
auto
outside_og_names
=
Inputs
(
framework
::
GradVarName
(
kOutputs
));
auto
inside_og_names
=
...
...
@@ -272,9 +272,9 @@ class WhileGradOpDescMaker : public framework::SingleGradOpDescMaker {
std
::
copy
(
extra_inputs
.
begin
(),
extra_inputs
.
end
(),
extra_inputs_list
.
begin
());
grad
->
SetInput
(
framework
::
GradVarName
(
kOutputs
),
extra_inputs_list
);
grad
->
SetInput
(
k
ParallelScopes
,
Output
(
kParallel
Scopes
));
grad
->
SetInput
(
k
StepScopes
,
Output
(
kStep
Scopes
));
grad
->
SetAttrMap
(
this
->
Attrs
());
grad
->
SetBlockAttr
(
k
Parallel
Block
,
*
grad_block_
[
0
]);
grad
->
SetBlockAttr
(
k
Step
Block
,
*
grad_block_
[
0
]);
// record the original output gradient names, since the gradient name of
// while operator could be renamed.
grad
->
SetAttr
(
"original_output_grad"
,
extra_inputs_list
);
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录