From edb22c2f0c10bd8e70e3e917a6e2c10a2ab044b3 Mon Sep 17 00:00:00 2001 From: Yu Yang Date: Fri, 10 Nov 2017 10:54:21 -0800 Subject: [PATCH] Add Scope::Rename (#5534) it is useful in gradient phase of an operator with block --- paddle/framework/scope.cc | 18 ++++++++++++++++++ paddle/framework/scope.h | 9 ++++++++- paddle/operators/recurrent_op.cc | 24 ++++++++---------------- 3 files changed, 34 insertions(+), 17 deletions(-) diff --git a/paddle/framework/scope.cc b/paddle/framework/scope.cc index fb2c69105..9428b8a07 100644 --- a/paddle/framework/scope.cc +++ b/paddle/framework/scope.cc @@ -98,5 +98,23 @@ void Scope::DeleteScope(Scope* scope) { delete scope; } +void Scope::Rename(const std::string& origin_name, + const std::string& new_name) const { + auto origin_it = vars_.find(origin_name); + PADDLE_ENFORCE(origin_it != vars_.end(), + "Cannot find original variable with name %s", origin_name); + auto new_it = vars_.find(new_name); + PADDLE_ENFORCE(new_it == vars_.end(), + "The variable with name %s is already in the scope", new_name); + vars_[new_name] = origin_it->second; + vars_.erase(origin_it); +} + +std::string Scope::Rename(const std::string& origin_name) const { + auto var_name = string::Sprintf("%p.%d", this, vars_.size()); + Rename(origin_name, var_name); + return var_name; +} + } // namespace framework } // namespace paddle diff --git a/paddle/framework/scope.h b/paddle/framework/scope.h index fb6609493..c2aafb6ad 100644 --- a/paddle/framework/scope.h +++ b/paddle/framework/scope.h @@ -68,11 +68,18 @@ class Scope { // enumerate all the variables current contains. std::vector GetAllNames(bool recursive = false) const; + // Rename variable to a new name + void Rename(const std::string& origin_name, + const std::string& new_name) const; + + // Rename variable to a new name and return the new name + std::string Rename(const std::string& origin_name) const; + private: // Call Scope::NewScope for a sub-scope. explicit Scope(Scope const* parent) : parent_(parent) {} - std::unordered_map vars_; + mutable std::unordered_map vars_; mutable std::list kids_; Scope const* parent_{nullptr}; diff --git a/paddle/operators/recurrent_op.cc b/paddle/operators/recurrent_op.cc index b0e87b705..0075ccd24 100644 --- a/paddle/operators/recurrent_op.cc +++ b/paddle/operators/recurrent_op.cc @@ -387,8 +387,8 @@ class RecurrentGradOp : public RecurrentBase { auto &p_names = Inputs(kParameters); PADDLE_ENFORCE_EQ(pg_names.size(), p_names.size()); - for (size_t prog_id = 0; prog_id < pg_names.size(); ++prog_id) { - auto inside_grad_name = framework::GradVarName(p_names[prog_id]); + for (size_t param_id = 0; param_id < pg_names.size(); ++param_id) { + auto inside_grad_name = framework::GradVarName(p_names[param_id]); // If does not compute gradient of that variable inside rnn, just // continue @@ -406,27 +406,19 @@ class RecurrentGradOp : public RecurrentBase { attrs["value"] = 0.0f; auto zero_op = framework::OpRegistry::CreateOp( - "fill_constant", {}, {{"Out", {pg_names[prog_id]}}}, attrs); + "fill_constant", {}, {{"Out", {pg_names[param_id]}}}, attrs); zero_op->Run(scope, dev_ctx); } + auto new_inside_name = cur_scope.Rename(inside_grad_name); // sum gradient - auto *outside_var = scope.FindVar(pg_names[prog_id]); - PADDLE_ENFORCE(outside_var != nullptr); - auto &outside_tensor = - *outside_var->GetMutable(); - - std::string result_var_name; - auto *local_result_var = cur_scope.Var(&result_var_name); - auto &local_result_tensor = - *local_result_var->GetMutable(); - - local_result_tensor.ShareDataWith(outside_tensor); auto sum_op = framework::OpRegistry::CreateOp( - "sum", {{"X", {result_var_name, inside_grad_name}}}, - {{"Out", {result_var_name}}}, {}); + "sum", {{"X", {pg_names[param_id], new_inside_name}}}, + {{"Out", {pg_names[param_id]}}}, {}); sum_op->Run(cur_scope, dev_ctx); + + cur_scope.Rename(new_inside_name, inside_grad_name); } } VLOG(5) << "Accumulate Parameter finished "; -- GitLab