diff --git a/doc/design/block.md b/doc/design/block.md index 9c812732d6ead76eb3aa2d1b617449c96807f21a..8f53f8d83cba598502193711efd4d47a3f85034b 100644 --- a/doc/design/block.md +++ b/doc/design/block.md @@ -243,7 +243,7 @@ class SymbolTable { // TODO determine whether name is generated by python or C++. // Currently assume that a unique name will be generated by C++ if the // argument name is left default. - VarDesc* NewVar(const string& name=""); + VarDesc* GetOrCreateVar(const string& name=""); // find a VarDesc by name, if recursive is true, find parent's SymbolTable // recursively. diff --git a/doc/design/scope.md b/doc/design/scope.md index b1f9bb4378eb5ec6926f1e53f7c1f4fd5674064c..6a1a32a63e39a98976a5d561334f5bec71b9c2e8 100644 --- a/doc/design/scope.md +++ b/doc/design/scope.md @@ -37,7 +37,7 @@ Scope is an association of a name to variable. All variables belong to `Scope`. ```cpp class Scope { public: - Variable* NewVar(const std::string& name); + Variable* GetOrCreateVar(const std::string& name); const Variable* FindVar(const std::string& name) const; private: @@ -98,7 +98,7 @@ class Scope { Variable* FindVar(const std::string& name) const; // return if already contains same name variable. - Variable* NewVar(const std::string& name); + Variable* GetOrCreateVar(const std::string& name); private: std::shared_ptr parent_; @@ -107,7 +107,7 @@ class Scope { ``` ## Only scope can create a variable -To ensure `only scope can create a variable`, we should mark `Variable`'s constructor as a private member function, and Scope is a friend class of Variable. And then only `NewVar` can construct `Variable`. +To ensure `only scope can create a variable`, we should mark `Variable`'s constructor as a private member function, and Scope is a friend class of Variable. And then only `GetOrCreateVar` can construct `Variable`. ## When scope destroyed, all variables inside this scope should be destroyed together @@ -121,4 +121,4 @@ Also, as the parent scope is a `shared_ptr`, we can only `Create()` a scope shar ## Orthogonal interface -`FindVar` will return `nullptr` when `name` is not found. It can be used as `Contains` method. `NewVar` will return an `Error` when there is a name conflict locally. Combine `FindVar` and `NewVar`, we can implement `NewVar` easily. +`FindVar` will return `nullptr` when `name` is not found. It can be used as `Contains` method. `GetOrCreateVar` will return an `Error` when there is a name conflict locally. Combine `FindVar` and `GetOrCreateVar`, we can implement `GetOrCreateVar` easily. diff --git a/doc/design/tensor_array.md b/doc/design/tensor_array.md index 8378e97bf7cfaae54c36b1b92e202b16e4fe1e28..662d1850b287c346e96fcb160b734cc0bf7f9804 100644 --- a/doc/design/tensor_array.md +++ b/doc/design/tensor_array.md @@ -161,7 +161,7 @@ class TensorArray: @name: str the name of the variable to output. ''' - tensor = NewVar(name) + tensor = GetOrCreateVar(name) tensor_array_stack(self.name, tensor) return tensor diff --git a/paddle/framework/block_desc.cc b/paddle/framework/block_desc.cc index 509aa235d3ee226adef15f08f5785866700499f1..1e580a045ac09b131bdbf770971adaa0992688f7 100644 --- a/paddle/framework/block_desc.cc +++ b/paddle/framework/block_desc.cc @@ -18,7 +18,7 @@ limitations under the License. */ namespace paddle { namespace framework { -VarDescBind *BlockDescBind::NewVar(const std::string &name) { +VarDescBind *BlockDescBind::GetOrCreateVar(const std::string &name) { need_update_ = true; auto it = vars_.find(name); PADDLE_ENFORCE(it == vars_.end(), "Duplicated variable %s", name); diff --git a/paddle/framework/executor.cc b/paddle/framework/executor.cc index c388b2198e4fbf75d6584d710e00d3deca93eb51..f888fb73ca06466d3d5cc2fb2675e18c163183f7 100644 --- a/paddle/framework/executor.cc +++ b/paddle/framework/executor.cc @@ -66,7 +66,7 @@ void Executor::Run(const ProgramDesc& pdesc, Scope* scope, int block_id) { // Instantiate all the vars in the global scope for (auto& var : block.vars()) { - scope->NewVar(var.name()); + scope->GetOrCreateVar(var.name()); } Scope& local_scope = scope->NewScope(); @@ -78,7 +78,7 @@ void Executor::Run(const ProgramDesc& pdesc, Scope* scope, int block_id) { for (auto& var : block.ops(i).outputs()) { for (auto& argu : var.arguments()) { if (local_scope.FindVar(argu) == nullptr) { - local_scope.NewVar(argu); + local_scope.GetOrCreateVar(argu); } } } diff --git a/paddle/framework/executor_test.cc b/paddle/framework/executor_test.cc index 7f6d8fe6a4aec9fdc39b4ffc0837a03e355ec937..e9b0712706d187472a006fe1da52a092c59ab580 100644 --- a/paddle/framework/executor_test.cc +++ b/paddle/framework/executor_test.cc @@ -34,7 +34,7 @@ void AddOp(const std::string& type, const VariableNameMap& inputs, // insert output for (auto kv : outputs) { for (auto v : kv.second) { - auto var = block->NewVar(v); + auto var = block->GetOrCreateVar(v); var->SetDataType(paddle::framework::DataType::FP32); } } diff --git a/paddle/framework/scope.cc b/paddle/framework/scope.cc index 5821bac928ed898971d61a3e2a86f59155d76991..df0d169038ab2a35cf4bebcc8d88f6a4f946c92c 100644 --- a/paddle/framework/scope.cc +++ b/paddle/framework/scope.cc @@ -31,7 +31,7 @@ Scope& Scope::NewScope() const { return *kids_.back(); } -Variable* Scope::NewVar(const std::string& name) { +Variable* Scope::GetOrCreateVar(const std::string& name) { auto iter = vars_.find(name); if (iter != vars_.end()) { return iter->second; @@ -42,8 +42,8 @@ Variable* Scope::NewVar(const std::string& name) { return v; } -Variable* Scope::NewVar() { - return NewVar(string::Sprintf("%p.%d", this, vars_.size())); +Variable* Scope::GetOrCreateVar() { + return GetOrCreateVar(string::Sprintf("%p.%d", this, vars_.size())); } Variable* Scope::FindVar(const std::string& name) const { @@ -71,8 +71,8 @@ framework::Scope& GetGlobalScope() { static std::unique_ptr g_scope{nullptr}; std::call_once(feed_variable_flag, [&]() { g_scope.reset(new framework::Scope()); - g_scope->NewVar("feed_value"); - g_scope->NewVar("fetch_value"); + g_scope->GetOrCreateVar("feed_value"); + g_scope->GetOrCreateVar("fetch_value"); }); return *(g_scope.get()); } diff --git a/paddle/framework/scope.h b/paddle/framework/scope.h index a8cfb107c25ccd62039db7349cc1c1dbff772f39..5cc1700651db272b0f05381df8e8692f2bff84d6 100644 --- a/paddle/framework/scope.h +++ b/paddle/framework/scope.h @@ -45,10 +45,10 @@ class Scope { Scope& NewScope() const; /// Create a variable with given name if it doesn't exist. - Variable* NewVar(const std::string& name); + Variable* GetOrCreateVar(const std::string& name); /// Create a variable with a scope-unique name. - Variable* NewVar(); + Variable* GetOrCreateVar(); /// Find a variable in the scope or any of its ancestors. Returns /// nullptr if cannot find. diff --git a/paddle/operators/cond_op.cc b/paddle/operators/cond_op.cc index 2737104a205cbc1e18ce4a3a45592a416d38a874..e82d643ef9b7684c89b84d8033835fdc324ec525 100644 --- a/paddle/operators/cond_op.cc +++ b/paddle/operators/cond_op.cc @@ -134,7 +134,7 @@ void CondOp::PrepareDataForSubnet( for (int i = 0; i < BRANCH_NUM; ++i) { for (auto& output : (*sub_net_op_[i]).Outputs()) { for (auto& var_name : output.second) { - sub_scopes[i]->NewVar(var_name); + sub_scopes[i]->GetOrCreateVar(var_name); } } } diff --git a/paddle/operators/dynamic_recurrent_op.cc b/paddle/operators/dynamic_recurrent_op.cc index b919aef8fb62e5b2331c2d842556e0642ea6b095..8a8a623f284f54981b043e50e476e9205d4e179e 100644 --- a/paddle/operators/dynamic_recurrent_op.cc +++ b/paddle/operators/dynamic_recurrent_op.cc @@ -29,7 +29,7 @@ namespace detail { inline void CreateVariables(Scope& scope, const std::vector& var_names) { for (const auto& name : var_names) { - scope.NewVar(name); + scope.GetOrCreateVar(name); } } @@ -112,7 +112,7 @@ void DynamicRecurrentOp::WriteStepInputs() const { auto& step_scope = cache_.GetScope(step); Variable* var = step_scope.FindVar(item.first); if (var == nullptr) { - var = step_scope.NewVar(item.first); + var = step_scope.GetOrCreateVar(item.first); } var->GetMutable()->ShareDataWith(tensor); } @@ -125,7 +125,7 @@ void DynamicRecurrentOp::WriteStepOutputs() const { for (auto& item : step_outputs_) { auto* var = scope.FindVar(item.first); if (var == nullptr) { - var = scope.NewVar(item.first); + var = scope.GetOrCreateVar(item.first); } auto* tensor = var->GetMutable(); item.second.WriteShared(step, *tensor); diff --git a/paddle/operators/dynamic_recurrent_op_test.cc b/paddle/operators/dynamic_recurrent_op_test.cc index 675a7890f3fa6bb7ab9dbbdb04894b2557214a8a..3ab5958835e15724446eb926338bb63bcd3d2bb4 100644 --- a/paddle/operators/dynamic_recurrent_op_test.cc +++ b/paddle/operators/dynamic_recurrent_op_test.cc @@ -36,7 +36,7 @@ void OpDescNewVar(const std::string& param_name, // create a LoD tensor in scope with specific dims LoDTensor* CreateVar(Scope& scope, std::string name, framework::DDim dims, const platform::Place& place) { - auto* var = scope.NewVar(name); + auto* var = scope.GetOrCreateVar(name); auto* tensor = var->GetMutable(); tensor->Resize(dims); tensor->mutable_data(place); @@ -85,7 +85,7 @@ class DynamicRecurrentOpTestHelper : public ::testing::Test { void CreateGlobalVariables() { platform::CPUPlace place; - scope.NewVar("step_scopes"); + scope.GetOrCreateVar("step_scopes"); CreateVar(scope, "boot_mem", framework::make_ddim({10, 20}), place); // auto* out0 = CreateVar(scope, "out0", framework::make_ddim({10, 20}), place); diff --git a/paddle/operators/recurrent_op.cc b/paddle/operators/recurrent_op.cc index 00647f55f79d54602f8e755dba059dfaacc9f41e..261c7b8ca0cf5e0c1b36f74bd9841d8da4d574a4 100644 --- a/paddle/operators/recurrent_op.cc +++ b/paddle/operators/recurrent_op.cc @@ -70,14 +70,14 @@ void RecurrentAlgorithm::CreateScopes(const Scope& scope, // the weight are located in parent scope for (auto& var_name : input.second) { if (!step_scope.FindVar(var_name)) { - step_scope.NewVar(var_name)->GetMutable(); + step_scope.GetOrCreateVar(var_name)->GetMutable(); } } } // create stepnet's outputs for (const auto& output : (*stepnet_)->Outputs()) { for (auto& var_name : output.second) { - step_scope.NewVar(var_name); + step_scope.GetOrCreateVar(var_name); } } step_scopes->emplace_back(&step_scope); @@ -87,7 +87,8 @@ void RecurrentAlgorithm::CreateScopes(const Scope& scope, void RecurrentAlgorithm::InitMemories(Scope* step_scope) const { for (auto& attr : arg_->memories) { - auto* pre_mem = step_scope->NewVar(attr.pre_var)->GetMutable(); + auto* pre_mem = + step_scope->GetOrCreateVar(attr.pre_var)->GetMutable(); PADDLE_ENFORCE(step_scope->FindVar(attr.boot_var) != nullptr, "memory [%s]'s boot variable [%s] not exists", attr.var, attr.boot_var); @@ -167,9 +168,10 @@ void RecurrentGradientAlgorithm::LinkBootMemoryGradients( "memory variable [%s] does not exists", attr.var); PADDLE_ENFORCE(step_scope->FindVar(attr.boot_var) != nullptr, "boot variable [%s] does not exists", attr.boot_var); - auto* mem_grad = step_scope->NewVar(attr.var)->GetMutable(); + auto* mem_grad = + step_scope->GetOrCreateVar(attr.var)->GetMutable(); auto* boot_mem_grad = - step_scope->NewVar(attr.boot_var)->GetMutable(); + step_scope->GetOrCreateVar(attr.boot_var)->GetMutable(); boot_mem_grad->Resize(mem_grad->dims()); boot_mem_grad->ShareDataWith(*mem_grad); } diff --git a/paddle/operators/rnn/recurrent_op_utils.cc b/paddle/operators/rnn/recurrent_op_utils.cc index d264664a99e2af88fc2c35f50476ed4722a9eea0..1d5f2d73abf8633871084398fbab322aa665f2c2 100644 --- a/paddle/operators/rnn/recurrent_op_utils.cc +++ b/paddle/operators/rnn/recurrent_op_utils.cc @@ -40,7 +40,7 @@ void SegmentInputs(const std::vector& step_scopes, f::DDim step_dims = slice_ddim(dims, 1, dims.size()); for (size_t j = 0; j < seq_len; j++) { Tensor* step_input = - step_scopes[j]->NewVar(inlinks[i])->GetMutable(); + step_scopes[j]->GetOrCreateVar(inlinks[i])->GetMutable(); // The input of operators of each step is Tensor here. // Maybe need to modify Slice function. *step_input = input->Slice(j, j + 1); diff --git a/paddle/pybind/protobuf.cc b/paddle/pybind/protobuf.cc index 0e4bbe8415fd86ab29c6809e7652dc581b4e6004..1c4f87d634387ecede79381a874bfbd9de321c49 100644 --- a/paddle/pybind/protobuf.cc +++ b/paddle/pybind/protobuf.cc @@ -137,7 +137,7 @@ void BindBlockDesc(py::module &m) { .def("new_var", [](BlockDescBind &self, py::bytes byte_name) { std::string name = byte_name; - return self.NewVar(name); + return self.GetOrCreateVar(name); }, py::return_value_policy::reference) .def("var", diff --git a/paddle/pybind/pybind.cc b/paddle/pybind/pybind.cc index 0f6e3101e26c5ac249664ce8badc10adc939305f..c32b31eae470afb18f9ee4f9086a0c5075121e6c 100644 --- a/paddle/pybind/pybind.cc +++ b/paddle/pybind/pybind.cc @@ -165,7 +165,7 @@ All parameter, weight, gradient are variables in Paddle. py::class_(m, "Scope", "") .def("new_var", [](Scope &self, const std::string &name) -> Variable * { - return self.NewVar(name); + return self.GetOrCreateVar(name); }, py::return_value_policy::reference) .def("find_var", &Scope::FindVar, py::return_value_policy::reference)