From cde542e6524d8fd084983e20e0051a3caf22f6b1 Mon Sep 17 00:00:00 2001 From: qiaolongfei Date: Mon, 2 Oct 2017 14:51:15 -0700 Subject: [PATCH] optimize auto --- paddle/operators/recurrent_op.cc | 6 +++--- paddle/operators/rnn/recurrent_op_utils.cc | 10 +++++----- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/paddle/operators/recurrent_op.cc b/paddle/operators/recurrent_op.cc index bcd6a3410a..04c4c24951 100644 --- a/paddle/operators/recurrent_op.cc +++ b/paddle/operators/recurrent_op.cc @@ -53,9 +53,9 @@ void RecurrentAlgorithm::CreateScopes(const Scope& scope, size_t seq_len) const { // TODO(superjom) Only two scopes are needed for inference, this case will be // supported later. - auto step_scopes_var = scope.FindVar(arg_->step_scopes); + auto* step_scopes_var = scope.FindVar(arg_->step_scopes); PADDLE_ENFORCE(step_scopes_var != nullptr, ""); - auto step_scopes = step_scopes_var->GetMutable>(); + auto* step_scopes = step_scopes_var->GetMutable>(); // Now all variables in scope must be created outside of op. PADDLE_ENFORCE_NOT_NULL(stepnet_); @@ -148,7 +148,7 @@ void RecurrentGradientAlgorithm::Run( auto* input0 = scope.FindVar(arg_->inlinks[0]); PADDLE_ENFORCE_NOT_NULL(input0); size_t seq_len = input0->GetMutable()->dims()[0]; - auto step_scopes = GetStepScopes(scope); + auto& step_scopes = GetStepScopes(scope); rnn::SegmentInputs(step_scopes, arg_->inlinks, seq_len); for (int step_id = seq_len - 1; step_id >= 0; --step_id) { if (step_id != seq_len - 1) { diff --git a/paddle/operators/rnn/recurrent_op_utils.cc b/paddle/operators/rnn/recurrent_op_utils.cc index a37d21d480..ef317a71f1 100644 --- a/paddle/operators/rnn/recurrent_op_utils.cc +++ b/paddle/operators/rnn/recurrent_op_utils.cc @@ -92,8 +92,8 @@ void LinkMemories(const std::vector& scopes, auto* scope = scopes[step_id]; auto* linked_scope = scopes[step_id + offset]; for (auto& attr : memories) { - auto mem = scope->FindVar(attr.pre_var)->GetMutable(); - auto linked_mem = linked_scope->FindVar(attr.var)->GetMutable(); + auto* mem = scope->FindVar(attr.pre_var)->GetMutable(); + auto* linked_mem = linked_scope->FindVar(attr.var)->GetMutable(); mem->Resize(linked_mem->dims()); mem->ShareDataWith(*linked_mem); } @@ -106,11 +106,11 @@ void InitArgument(const ArgumentName& name, Argument* arg, arg->inlinks = op.Inputs(name.inlinks); arg->outlinks = op.Outputs(name.outlinks); - auto boot_memories = + auto& boot_memories = is_grad ? op.Outputs(name.boot_memories) : op.Inputs(name.boot_memories); // attributes - auto memories = op.Attr>(name.memories); - auto pre_memories = op.Attr>(name.pre_memories); + auto& memories = op.Attr>(name.memories); + auto& pre_memories = op.Attr>(name.pre_memories); PADDLE_ENFORCE(memories.size() == boot_memories.size(), "the size of memories, boot_memories don't match:%d,%d", -- GitLab