提交 cde542e6 编写于 作者: Q qiaolongfei

optimize auto

上级 6b051b65
...@@ -53,9 +53,9 @@ void RecurrentAlgorithm::CreateScopes(const Scope& scope, ...@@ -53,9 +53,9 @@ void RecurrentAlgorithm::CreateScopes(const Scope& scope,
size_t seq_len) const { size_t seq_len) const {
// TODO(superjom) Only two scopes are needed for inference, this case will be // TODO(superjom) Only two scopes are needed for inference, this case will be
// supported later. // supported later.
auto step_scopes_var = scope.FindVar(arg_->step_scopes); auto* step_scopes_var = scope.FindVar(arg_->step_scopes);
PADDLE_ENFORCE(step_scopes_var != nullptr, ""); PADDLE_ENFORCE(step_scopes_var != nullptr, "");
auto step_scopes = step_scopes_var->GetMutable<std::vector<Scope*>>(); auto* step_scopes = step_scopes_var->GetMutable<std::vector<Scope*>>();
// Now all variables in scope must be created outside of op. // Now all variables in scope must be created outside of op.
PADDLE_ENFORCE_NOT_NULL(stepnet_); PADDLE_ENFORCE_NOT_NULL(stepnet_);
...@@ -148,7 +148,7 @@ void RecurrentGradientAlgorithm::Run( ...@@ -148,7 +148,7 @@ void RecurrentGradientAlgorithm::Run(
auto* input0 = scope.FindVar(arg_->inlinks[0]); auto* input0 = scope.FindVar(arg_->inlinks[0]);
PADDLE_ENFORCE_NOT_NULL(input0); PADDLE_ENFORCE_NOT_NULL(input0);
size_t seq_len = input0->GetMutable<LoDTensor>()->dims()[0]; size_t seq_len = input0->GetMutable<LoDTensor>()->dims()[0];
auto step_scopes = GetStepScopes(scope); auto& step_scopes = GetStepScopes(scope);
rnn::SegmentInputs(step_scopes, arg_->inlinks, seq_len); rnn::SegmentInputs(step_scopes, arg_->inlinks, seq_len);
for (int step_id = seq_len - 1; step_id >= 0; --step_id) { for (int step_id = seq_len - 1; step_id >= 0; --step_id) {
if (step_id != seq_len - 1) { if (step_id != seq_len - 1) {
......
...@@ -92,8 +92,8 @@ void LinkMemories(const std::vector<Scope*>& scopes, ...@@ -92,8 +92,8 @@ void LinkMemories(const std::vector<Scope*>& scopes,
auto* scope = scopes[step_id]; auto* scope = scopes[step_id];
auto* linked_scope = scopes[step_id + offset]; auto* linked_scope = scopes[step_id + offset];
for (auto& attr : memories) { for (auto& attr : memories) {
auto mem = scope->FindVar(attr.pre_var)->GetMutable<LoDTensor>(); auto* mem = scope->FindVar(attr.pre_var)->GetMutable<LoDTensor>();
auto linked_mem = linked_scope->FindVar(attr.var)->GetMutable<LoDTensor>(); auto* linked_mem = linked_scope->FindVar(attr.var)->GetMutable<LoDTensor>();
mem->Resize(linked_mem->dims()); mem->Resize(linked_mem->dims());
mem->ShareDataWith<float>(*linked_mem); mem->ShareDataWith<float>(*linked_mem);
} }
...@@ -106,11 +106,11 @@ void InitArgument(const ArgumentName& name, Argument* arg, ...@@ -106,11 +106,11 @@ void InitArgument(const ArgumentName& name, Argument* arg,
arg->inlinks = op.Inputs(name.inlinks); arg->inlinks = op.Inputs(name.inlinks);
arg->outlinks = op.Outputs(name.outlinks); arg->outlinks = op.Outputs(name.outlinks);
auto boot_memories = auto& boot_memories =
is_grad ? op.Outputs(name.boot_memories) : op.Inputs(name.boot_memories); is_grad ? op.Outputs(name.boot_memories) : op.Inputs(name.boot_memories);
// attributes // attributes
auto memories = op.Attr<std::vector<std::string>>(name.memories); auto& memories = op.Attr<std::vector<std::string>>(name.memories);
auto pre_memories = op.Attr<std::vector<std::string>>(name.pre_memories); auto& pre_memories = op.Attr<std::vector<std::string>>(name.pre_memories);
PADDLE_ENFORCE(memories.size() == boot_memories.size(), PADDLE_ENFORCE(memories.size() == boot_memories.size(),
"the size of memories, boot_memories don't match:%d,%d", "the size of memories, boot_memories don't match:%d,%d",
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册