diff --git a/paddle/framework/backward.cc b/paddle/framework/backward.cc index a17036c6527da3a4a32f021a57542b6b6d68a395..faf6e60cbd1bcda9864c12696b336998ea7606b7 100644 --- a/paddle/framework/backward.cc +++ b/paddle/framework/backward.cc @@ -430,14 +430,14 @@ std::vector> MakeBlockBackward( std::vector> op_grads; if ((*it)->Type() == "recurrent" || (*it)->Type() == "while") { - int step_block_idx = (*it)->GetBlockAttr("step_block"); + int step_block_idx = (*it)->GetBlockAttr("sub_block"); BlockDescBind* backward_block = CreateStepBlock( program_desc, no_grad_vars, grad_to_var, step_block_idx); op_grads = MakeOpGrad(*it, no_grad_vars, grad_to_var, {backward_block}); } else if ((*it)->Type() == "conditional_block") { BlockDescBind* backward_block = CreateStepBlock(program_desc, no_grad_vars, grad_to_var, - (*it)->GetBlockAttr("block")); + (*it)->GetBlockAttr("sub_block")); op_grads = MakeOpGrad(*it, no_grad_vars, grad_to_var, {backward_block}); } else { op_grads = MakeOpGrad(*it, no_grad_vars, grad_to_var); diff --git a/paddle/operators/conditional_block_op.cc b/paddle/operators/conditional_block_op.cc index 03c58a7eab8b2071a3a0b75ac0c665e32ef39876..6f2ef9174e84a0c0ae096956c04039435e6583c6 100644 --- a/paddle/operators/conditional_block_op.cc +++ b/paddle/operators/conditional_block_op.cc @@ -65,7 +65,7 @@ class ConditionalBlockOp : public ConditionalOp { scopes->front() = &scope.NewScope(); auto &cur_scope = *scopes->front(); - auto *block = Attr("block"); + auto *block = Attr("sub_block"); framework::Executor exec(dev_ctx); exec.Run(*block->Program(), &cur_scope, block->ID(), false); } @@ -88,7 +88,7 @@ class ConditionalBlockOpProtoMaker : public framework::OpProtoAndCheckerMaker { "unify the conditional block, rnn and while op, the type of " "scope is std::vector"); AddAttr( - "block", "The step block of conditional block operator"); + "sub_block", "The step block of conditional block operator"); AddComment(R"DOC(Conditional block operator Run the sub-block if X is not empty. Params is the other inputs and Out is the @@ -117,7 +117,7 @@ class ConditionalBlockGradOp : public ConditionalOp { auto &scopes = scope_var->Get>(); framework::Scope &cur_scope = *scopes[0]; - auto *block = Attr("block"); + auto *block = Attr("sub_block"); framework::Executor exec(dev_ctx); exec.Run(*block->Program(), &cur_scope, block->ID(), false); @@ -181,7 +181,7 @@ class ConditionalBlockGradMaker : public framework::SingleGradOpDescMaker { grad_op->SetInput("Scope", Output("Scope")); grad_op->SetOutput(framework::GradVarName("X"), InputGrad("X")); grad_op->SetOutput(framework::GradVarName("Params"), InputGrad("Params")); - grad_op->SetBlockAttr("block", *this->grad_block_[0]); + grad_op->SetBlockAttr("sub_block", *this->grad_block_[0]); return std::unique_ptr(grad_op); } }; diff --git a/paddle/operators/recurrent_op.cc b/paddle/operators/recurrent_op.cc index 29f91636438449f90ea3ffee8adc21595aabe202..232d926f7b975c3b8ebecad983d0f1cc54b9486f 100644 --- a/paddle/operators/recurrent_op.cc +++ b/paddle/operators/recurrent_op.cc @@ -25,7 +25,7 @@ constexpr char kOutputs[] = "outputs"; constexpr char kStepScopes[] = "step_scopes"; constexpr char kExStates[] = "ex_states"; constexpr char kStates[] = "states"; -constexpr char kStepBlock[] = "step_block"; +constexpr char kStepBlock[] = "sub_block"; constexpr char kReverse[] = "reverse"; constexpr char kIsTrain[] = "is_train"; #define GRAD_SUFFIX "@GRAD" diff --git a/paddle/operators/while_op.cc b/paddle/operators/while_op.cc index b8e44bcc5a99380fdf08cc2819b20045695eaf87..9a092a570ff1f3f529413cd44dff55147adbaadc 100644 --- a/paddle/operators/while_op.cc +++ b/paddle/operators/while_op.cc @@ -25,7 +25,7 @@ namespace operators { using StepScopeVar = std::vector; using LoDTensor = framework::LoDTensor; -constexpr char kStepBlock[] = "step_block"; +constexpr char kStepBlock[] = "sub_block"; constexpr char kCondition[] = "Condition"; constexpr char kStepScopes[] = "StepScopes"; constexpr char kParameters[] = "X"; diff --git a/python/paddle/v2/fluid/layers.py b/python/paddle/v2/fluid/layers.py index f67d6d08c7557d939f280d19c5b86914885490bd..2781017ec4b05e7c6d51a455b8d16620678fb44d 100644 --- a/python/paddle/v2/fluid/layers.py +++ b/python/paddle/v2/fluid/layers.py @@ -1130,7 +1130,7 @@ class StaticRNN(object): attrs={ 'ex_states': pre_memories, 'states': memories, - 'step_block': rnn_block + 'sub_block': rnn_block }) @@ -1207,7 +1207,7 @@ class While(object): }, outputs={'Out': out_vars, 'StepScopes': [step_scope]}, - attrs={'step_block': while_block}) + attrs={'sub_block': while_block}) def lstm(x, @@ -1671,7 +1671,7 @@ class ConditionalBlock(object): }, outputs={'Out': out_list, 'Scope': [step_scope]}, - attrs={'block': inside_block}) + attrs={'sub_block': inside_block}) class IfElseBlockGuard(object):