From dafd449c68c23c642ba117a55135c823c6594772 Mon Sep 17 00:00:00 2001 From: fengjiayi Date: Thu, 14 Dec 2017 15:43:14 +0800 Subject: [PATCH] Unify `step_block` and `block` to `sub_block` --- paddle/framework/backward.cc | 4 ++-- paddle/operators/conditional_block_op.cc | 8 ++++---- paddle/operators/recurrent_op.cc | 2 +- paddle/operators/while_op.cc | 2 +- python/paddle/v2/fluid/layers.py | 6 +++--- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/paddle/framework/backward.cc b/paddle/framework/backward.cc index a17036c65..faf6e60cb 100644 --- a/paddle/framework/backward.cc +++ b/paddle/framework/backward.cc @@ -430,14 +430,14 @@ std::vector> MakeBlockBackward( std::vector> op_grads; if ((*it)->Type() == "recurrent" || (*it)->Type() == "while") { - int step_block_idx = (*it)->GetBlockAttr("step_block"); + int step_block_idx = (*it)->GetBlockAttr("sub_block"); BlockDescBind* backward_block = CreateStepBlock( program_desc, no_grad_vars, grad_to_var, step_block_idx); op_grads = MakeOpGrad(*it, no_grad_vars, grad_to_var, {backward_block}); } else if ((*it)->Type() == "conditional_block") { BlockDescBind* backward_block = CreateStepBlock(program_desc, no_grad_vars, grad_to_var, - (*it)->GetBlockAttr("block")); + (*it)->GetBlockAttr("sub_block")); op_grads = MakeOpGrad(*it, no_grad_vars, grad_to_var, {backward_block}); } else { op_grads = MakeOpGrad(*it, no_grad_vars, grad_to_var); diff --git a/paddle/operators/conditional_block_op.cc b/paddle/operators/conditional_block_op.cc index 03c58a7ea..6f2ef9174 100644 --- a/paddle/operators/conditional_block_op.cc +++ b/paddle/operators/conditional_block_op.cc @@ -65,7 +65,7 @@ class ConditionalBlockOp : public ConditionalOp { scopes->front() = &scope.NewScope(); auto &cur_scope = *scopes->front(); - auto *block = Attr("block"); + auto *block = Attr("sub_block"); framework::Executor exec(dev_ctx); exec.Run(*block->Program(), &cur_scope, block->ID(), false); } @@ -88,7 +88,7 @@ class ConditionalBlockOpProtoMaker : public framework::OpProtoAndCheckerMaker { "unify the conditional block, rnn and while op, the type of " "scope is std::vector"); AddAttr( - "block", "The step block of conditional block operator"); + "sub_block", "The step block of conditional block operator"); AddComment(R"DOC(Conditional block operator Run the sub-block if X is not empty. Params is the other inputs and Out is the @@ -117,7 +117,7 @@ class ConditionalBlockGradOp : public ConditionalOp { auto &scopes = scope_var->Get>(); framework::Scope &cur_scope = *scopes[0]; - auto *block = Attr("block"); + auto *block = Attr("sub_block"); framework::Executor exec(dev_ctx); exec.Run(*block->Program(), &cur_scope, block->ID(), false); @@ -181,7 +181,7 @@ class ConditionalBlockGradMaker : public framework::SingleGradOpDescMaker { grad_op->SetInput("Scope", Output("Scope")); grad_op->SetOutput(framework::GradVarName("X"), InputGrad("X")); grad_op->SetOutput(framework::GradVarName("Params"), InputGrad("Params")); - grad_op->SetBlockAttr("block", *this->grad_block_[0]); + grad_op->SetBlockAttr("sub_block", *this->grad_block_[0]); return std::unique_ptr(grad_op); } }; diff --git a/paddle/operators/recurrent_op.cc b/paddle/operators/recurrent_op.cc index 29f916364..232d926f7 100644 --- a/paddle/operators/recurrent_op.cc +++ b/paddle/operators/recurrent_op.cc @@ -25,7 +25,7 @@ constexpr char kOutputs[] = "outputs"; constexpr char kStepScopes[] = "step_scopes"; constexpr char kExStates[] = "ex_states"; constexpr char kStates[] = "states"; -constexpr char kStepBlock[] = "step_block"; +constexpr char kStepBlock[] = "sub_block"; constexpr char kReverse[] = "reverse"; constexpr char kIsTrain[] = "is_train"; #define GRAD_SUFFIX "@GRAD" diff --git a/paddle/operators/while_op.cc b/paddle/operators/while_op.cc index b8e44bcc5..9a092a570 100644 --- a/paddle/operators/while_op.cc +++ b/paddle/operators/while_op.cc @@ -25,7 +25,7 @@ namespace operators { using StepScopeVar = std::vector; using LoDTensor = framework::LoDTensor; -constexpr char kStepBlock[] = "step_block"; +constexpr char kStepBlock[] = "sub_block"; constexpr char kCondition[] = "Condition"; constexpr char kStepScopes[] = "StepScopes"; constexpr char kParameters[] = "X"; diff --git a/python/paddle/v2/fluid/layers.py b/python/paddle/v2/fluid/layers.py index f67d6d08c..2781017ec 100644 --- a/python/paddle/v2/fluid/layers.py +++ b/python/paddle/v2/fluid/layers.py @@ -1130,7 +1130,7 @@ class StaticRNN(object): attrs={ 'ex_states': pre_memories, 'states': memories, - 'step_block': rnn_block + 'sub_block': rnn_block }) @@ -1207,7 +1207,7 @@ class While(object): }, outputs={'Out': out_vars, 'StepScopes': [step_scope]}, - attrs={'step_block': while_block}) + attrs={'sub_block': while_block}) def lstm(x, @@ -1671,7 +1671,7 @@ class ConditionalBlock(object): }, outputs={'Out': out_list, 'Scope': [step_scope]}, - attrs={'block': inside_block}) + attrs={'sub_block': inside_block}) class IfElseBlockGuard(object): -- GitLab