From a0631364a53ceee7d9eadcc70cf250e42ace9ca8 Mon Sep 17 00:00:00 2001 From: xiongkun Date: Tue, 30 Nov 2021 09:59:27 +0800 Subject: [PATCH] Fix test calc gradient (#37672) * add scope_guard * 1. fix control flow cases 2. fix calc_gradient --- .../new_executor/interpretercore_util.cc | 28 ++++++++++++++++--- .../new_executor/standalone_executor.cc | 4 ++- .../tests/unittests/test_calc_gradient.py | 26 +++++++++-------- 3 files changed, 41 insertions(+), 17 deletions(-) diff --git a/paddle/fluid/framework/new_executor/interpretercore_util.cc b/paddle/fluid/framework/new_executor/interpretercore_util.cc index 98799e049d..0501522a7a 100644 --- a/paddle/fluid/framework/new_executor/interpretercore_util.cc +++ b/paddle/fluid/framework/new_executor/interpretercore_util.cc @@ -16,6 +16,9 @@ #include "paddle/fluid/framework/executor_gc_helper.h" #include "paddle/fluid/framework/new_executor/data_transfer.h" +#include "paddle/fluid/operators/controlflow/conditional_block_op_helper.h" +#include "paddle/fluid/operators/controlflow/recurrent_op_helper.h" +#include "paddle/fluid/operators/controlflow/while_op_helper.h" namespace paddle { namespace framework { @@ -127,6 +130,9 @@ void build_variable_scope(const framework::BlockDesc& block, for (auto& var_desc : block.AllVars()) { auto var_name = var_desc->Name(); + // TODO(xiongkun): user may create a variable with name that exists before. + // under such circumstances, we should raise a error. Currently we can't + // get the var_desc of startup_program, so leave it later. if (var_name == framework::kEmptyVarName) { continue; } @@ -149,7 +155,7 @@ void build_variable_scope(const framework::BlockDesc& block, } void create_all_ops(const framework::BlockDesc& block, - std::vector>* ops) { + std::vector>* ops) { for (auto& op : block.AllOps()) { VLOG(3) << "CreateOp from : " << op->Type(); @@ -164,7 +170,7 @@ void create_all_ops(const framework::BlockDesc& block, } auto op_base = info.Creator()(op->Type(), inputs_names, outputs_names, op_attr_map); - ops->emplace_back(std::shared_ptr(op_base)); + ops->emplace_back(std::unique_ptr(op_base)); } } @@ -260,10 +266,24 @@ void build_op_func_list(const platform::Place& place, Scope* local_scope = use_local_scope ? var_scope->GetMutableLocalScope() : var_scope->GetMutableScope(); auto& all_op_kernels = OperatorWithKernel::AllOpKernels(); + std::vector> + ops_unique; // its elements will be moved to vec_func_list + // Step 1: create all ops for current block. + create_all_ops(block, &ops_unique); + // If gc is enabled and block size > 1 + const ProgramDesc& main_program = *block.Program(); + operators::PrepareSafeEagerDeletionOnConditionalOpAndConditionalGradOp( + main_program, block.ID(), ops_unique); + operators::PrepareSafeEagerDeletionOnWhileOpAndWhileGradOp( + main_program, block.ID(), ops_unique); + operators::PrepareSafeEagerDeletionOnRecurrentOpAndRecurrentGradOp( + main_program, block.ID(), ops_unique); + std::vector> ops; // its elements will be moved to vec_func_list - // Step 1: create all ops for current block. - create_all_ops(block, &ops); + for (auto& op_unique : ops_unique) { + ops.emplace_back(std::move(op_unique)); + } auto unused_var_map = get_unused_vars(block, ops); for (size_t i = 0; i < ops.size(); ++i) { diff --git a/paddle/fluid/framework/new_executor/standalone_executor.cc b/paddle/fluid/framework/new_executor/standalone_executor.cc index 51885543d1..50770b6c4a 100644 --- a/paddle/fluid/framework/new_executor/standalone_executor.cc +++ b/paddle/fluid/framework/new_executor/standalone_executor.cc @@ -33,6 +33,7 @@ StandaloneExecutor::StandaloneExecutor(const platform::Place& place, if (scope) { auto name_list = scope->LocalVarNames(); for (auto name : name_list) { + VLOG(4) << "Sync Variable from variable scope: " << name; auto v = scope->Var(name); if (!global_scope_.HasVar(name)) { global_scope_.AddVar(name, *v); @@ -87,8 +88,9 @@ void StandaloneExecutor::BuildVariableScope(const framework::ProgramDesc& pdesc, if (var->Name() == framework::kEmptyVarName) { continue; } - if (!var_scope->HasVar(var->Name())) { + VLOG(4) << "Create variable from startup_prog: " + << var->Proto()->SerializeAsString(); var_scope->AddVar(var->Name(), var); } } diff --git a/python/paddle/fluid/tests/unittests/test_calc_gradient.py b/python/paddle/fluid/tests/unittests/test_calc_gradient.py index fdfaf6a311..339a66b062 100644 --- a/python/paddle/fluid/tests/unittests/test_calc_gradient.py +++ b/python/paddle/fluid/tests/unittests/test_calc_gradient.py @@ -14,6 +14,7 @@ from __future__ import print_function +import paddle import unittest import numpy as np import paddle.fluid as fluid @@ -83,19 +84,20 @@ class TestDoubleGrad(unittest.TestCase): class TestGradientWithPrune(unittest.TestCase): def test_prune(self): - x = fluid.data(name='x', shape=[3], dtype='float32') - x.stop_gradient = False - x1, x2, x3 = fluid.layers.split(x, dim=0, num_or_sections=3) - y = x1 * 2 - x1_grad = fluid.gradients(y, x) + with paddle.fluid.scope_guard(paddle.static.Scope()): + x = fluid.data(name='x', shape=[3], dtype='float32') + x.stop_gradient = False + x1, x2, x3 = fluid.layers.split(x, dim=0, num_or_sections=3) + y = x1 * 2 + x1_grad = fluid.gradients(y, x) - exe = fluid.Executor(fluid.CPUPlace()) - main = fluid.default_main_program() - exe.run(fluid.default_startup_program()) - out = exe.run(main, - feed={'x': np.ones([3]).astype('float32')}, - fetch_list=[x1_grad]) - self.assertTrue(np.array_equal(out[0], [2., 0., 0.])) + exe = fluid.Executor(fluid.CPUPlace()) + main = fluid.default_main_program() + exe.run(fluid.default_startup_program()) + out = exe.run(main, + feed={'x': np.ones([3]).astype('float32')}, + fetch_list=[x1_grad]) + self.assertTrue(np.array_equal(out[0], [2., 0., 0.])) if __name__ == "__main__": -- GitLab