提交 d7383c6d 编写于 作者: Q Qiao Longfei 提交者: Yu Yang

create grad_var when run Backward pass (#4796)

* add target to Backward, generate var in block when call backward

* modify backward_test

* fix executor_test

* set var desc default type to LOD_TENSOR

* update backward_test

* insert loss in the top level of backward

* create grad vars for all blocks in current program

* optimize code

* update test_program.py

* only create var for newly create blocks when backward
上级 dbb60572
......@@ -44,7 +44,7 @@ cc_test(backward_test SRCS backward_test.cc DEPS backward recurrent_op device_co
cc_library(executor SRCS executor.cc DEPS op_registry device_context scope framework_proto backward)
set(EXECUTOR_TEST_OP elementwise_add_op gaussian_random_op feed_op fetch_op
mul_op sum_op squared_l2_distance_op fill_constant_op sgd_op)
mul_op sum_op squared_l2_distance_op fill_constant_op sgd_op mean_op)
if(WITH_GPU)
nv_test(executor_test SRCS executor_test.cc DEPS executor ${EXECUTOR_TEST_OP})
else()
......
......@@ -273,6 +273,21 @@ static bool AllGradInSet(const std::vector<std::string>& names,
return true;
}
static void CreateGradVarInBlock(BlockDescBind* block_desc,
size_t grad_op_start_index) {
auto ops = block_desc->AllOps();
for (size_t op_index = grad_op_start_index; op_index < ops.size();
++op_index) {
for (const auto& output : ops[op_index]->Outputs()) {
for (const auto& real_output : output.second) {
if (!block_desc->HasVar(real_output)) {
block_desc->NewVar(real_output);
}
}
}
}
}
std::vector<std::unique_ptr<OpDescBind>> MakeOpGrad(
const std::unique_ptr<OpDescBind>& op_desc,
std::unordered_set<std::string>* no_grad_vars,
......@@ -326,15 +341,16 @@ std::vector<std::unique_ptr<OpDescBind>> MakeBlockBackward(
std::unordered_map<std::string, std::vector<size_t>> dup_out_ops;
size_t grad_desc_idx = 0;
std::vector<std::unique_ptr<OpDescBind>> backward_descs;
for (auto it = op_descs.rbegin(); it != op_descs.rend(); ++it) {
std::vector<std::unique_ptr<OpDescBind>> op_grads =
MakeOpGrad(*it, no_grad_vars, grad_to_var);
if ((*it)->Type() == "recurrent") {
PADDLE_ENFORCE_EQ(
op_grads.size(), size_t(1),
op_grads.size(), static_cast<size_t>(1),
"rnn_op's gradient process should contain only one op.");
int step_block_idx = (*it)->GetBlockAttr("stop_block");
int step_block_idx = (*it)->GetBlockAttr("step_block");
auto backward_block_op_descs = MakeBlockBackward(
program_desc, step_block_idx, no_grad_vars, grad_to_var);
BlockDescBind* backward_block = program_desc.AppendBlock(*cur_block);
......@@ -380,10 +396,11 @@ std::vector<std::unique_ptr<OpDescBind>> MakeBlockBackward(
backward_descs.insert(backward_descs.begin() + p.first + 1,
std::move(p.second));
}
return backward_descs;
}
void AppendBackward(ProgramDescBind& program_desc,
void AppendBackward(ProgramDescBind& program_desc, const VarDescBind& target,
const std::unordered_set<std::string>& no_grad_vars) {
std::unordered_set<std::string> no_grad_var_names;
no_grad_var_names.reserve(no_grad_vars.size() + 1);
......@@ -391,13 +408,34 @@ void AppendBackward(ProgramDescBind& program_desc,
for (auto& name : no_grad_vars) {
no_grad_var_names.insert(GradVarName(name));
}
const int root_block_idx = 0;
auto root_block = program_desc.Block(root_block_idx);
auto& all_ops = root_block->ops_;
// insert fill one op for target
std::string fill_one_op_out = GradVarName(target.Name());
std::unique_ptr<OpDescBind> fill_one_op(
new OpDescBind("fill_constant", {}, {{"Out", {fill_one_op_out}}},
{{"shape", std::vector<int>{1}},
{"value", static_cast<float>(1.0)},
{"dataType", framework::DataType::FP32}}));
all_ops.push_back(std::move(fill_one_op));
size_t forward_op_num = all_ops.size();
size_t forward_block_num = program_desc.Size();
std::unordered_map<std::string, std::string> grad_to_var;
auto backward_op_descs = MakeBlockBackward(program_desc, root_block_idx,
&no_grad_var_names, &grad_to_var);
auto& forw_op_descs = program_desc.Block(root_block_idx)->ops_;
for (auto& ptr : backward_op_descs) {
forw_op_descs.push_back(std::move(ptr));
all_ops.push_back(std::move(ptr));
}
root_block->NewVar(fill_one_op_out);
// create grad_var for all blocks in this program
CreateGradVarInBlock(root_block, forward_op_num);
for (size_t block_index = forward_block_num;
block_index < program_desc.Size(); ++block_index) {
CreateGradVarInBlock(program_desc.Block(block_index), 0);
}
}
......
......@@ -29,7 +29,7 @@ extern std::unique_ptr<OperatorBase> Backward(
// TODO(jiayi): Add target as parameter and generate backward op
// according to target.
void AppendBackward(ProgramDescBind& program_desc,
void AppendBackward(ProgramDescBind& program_desc, const VarDescBind& target,
const std::unordered_set<std::string>& no_grad_vars);
} // namespace framework
......
......@@ -18,6 +18,7 @@
#include "paddle/framework/block_desc.h"
#include "paddle/framework/op_desc.h"
#include "paddle/framework/op_registry.h"
#include "paddle/framework/var_desc.h"
#include "paddle/operators/net_op.h"
namespace paddle {
......@@ -468,10 +469,14 @@ TEST(Backward, simple_single_op) {
op->SetInput("b", {"b"});
op->SetOutput("Out", {"out"});
AppendBackward(program, {});
auto target = f::VarDescBind("out");
AppendBackward(program, target, {});
ASSERT_EQ(block->AllOps().size(), 2UL);
f::OpDescBind *grad_op = block->AllOps()[1];
ASSERT_EQ(block->AllOps().size(), 3UL);
f::OpDescBind *fill_op = block->AllOps()[1];
EXPECT_EQ(fill_op->Type(), "fill_constant");
f::OpDescBind *grad_op = block->AllOps()[2];
EXPECT_EQ(grad_op->Type(), "rowwise_add_grad");
ASSERT_EQ(grad_op->InputNames().size(), 1UL);
ASSERT_EQ(grad_op->OutputNames().size(), 2UL);
......@@ -494,13 +499,17 @@ TEST(Backward, default_attribute) {
op->SetOutput("Out", {"out"});
op->CheckAttrs();
AppendBackward(program, {});
auto target = f::VarDescBind("out");
AppendBackward(program, target, {});
ASSERT_EQ(block->AllOps().size(), 2UL);
ASSERT_EQ(block->AllOps().size(), 3UL);
EXPECT_EQ(boost::get<int>(op->GetAttr("x_num_col_dims")), 1);
EXPECT_EQ(boost::get<int>(op->GetAttr("y_num_col_dims")), 1);
f::OpDescBind *grad_op = block->AllOps()[1];
f::OpDescBind *fill_op = block->AllOps()[1];
EXPECT_EQ(fill_op->Type(), "fill_constant");
f::OpDescBind *grad_op = block->AllOps()[2];
ASSERT_EQ(grad_op->Type(), "mul_grad");
EXPECT_EQ(boost::get<int>(grad_op->GetAttr("x_num_col_dims")), 1);
EXPECT_EQ(boost::get<int>(grad_op->GetAttr("y_num_col_dims")), 1);
......@@ -528,10 +537,15 @@ TEST(Backward, simple_mult_op) {
op3->SetInput("b", {"b3"});
op3->SetOutput("Out", {"out3"});
AppendBackward(program, {});
auto target = f::VarDescBind("out3");
size_t forward_len = block->AllOps().size();
AppendBackward(program, target, {});
ASSERT_EQ(block->AllOps().size(), 6UL);
f::OpDescBind *grad_op1 = block->AllOps()[5];
ASSERT_EQ(block->AllOps().size(), 6UL + 1);
f::OpDescBind *fill_op = block->AllOps()[forward_len];
EXPECT_EQ(fill_op->Type(), "fill_constant");
f::OpDescBind *grad_op1 = block->AllOps()[6];
EXPECT_EQ(grad_op1->Type(), "rowwise_add_grad");
ASSERT_EQ(grad_op1->InputNames().size(), 1UL);
ASSERT_EQ(grad_op1->OutputNames().size(), 2UL);
......@@ -542,7 +556,7 @@ TEST(Backward, simple_mult_op) {
EXPECT_EQ(grad_op1->Output(f::GradVarName("b")),
std::vector<std::string>({f::GradVarName("b1")}));
f::OpDescBind *grad_op2 = block->AllOps()[4];
f::OpDescBind *grad_op2 = block->AllOps()[5];
EXPECT_EQ(grad_op2->Type(), "mul_grad");
ASSERT_EQ(grad_op2->InputNames().size(), 4UL);
ASSERT_EQ(grad_op2->OutputNames().size(), 2UL);
......@@ -556,7 +570,7 @@ TEST(Backward, simple_mult_op) {
EXPECT_EQ(grad_op2->Output(f::GradVarName("Y")),
std::vector<std::string>({f::GradVarName("y2")}));
f::OpDescBind *grad_op3 = block->AllOps()[3];
f::OpDescBind *grad_op3 = block->AllOps()[4];
EXPECT_EQ(grad_op3->Type(), "rowwise_add_grad");
ASSERT_EQ(grad_op3->InputNames().size(), 1UL);
ASSERT_EQ(grad_op3->OutputNames().size(), 2UL);
......@@ -596,10 +610,15 @@ TEST(Backward, intermedia_var_no_grad) {
op4->SetInput("Y", {"out3"});
op4->SetOutput("Out", {"out4"});
AppendBackward(program, {"out3"});
auto target = f::VarDescBind("out4");
size_t forward_len = block->AllOps().size();
AppendBackward(program, target, {"out3"});
ASSERT_EQ(block->AllOps().size(), 6UL);
f::OpDescBind *grad_op1 = block->AllOps()[5];
ASSERT_EQ(block->AllOps().size(), 7UL);
f::OpDescBind *fill_op = block->AllOps()[forward_len];
EXPECT_EQ(fill_op->Type(), "fill_constant");
f::OpDescBind *grad_op1 = block->AllOps()[6];
EXPECT_EQ(grad_op1->Type(), "rowwise_add_grad");
ASSERT_EQ(grad_op1->InputNames().size(), 1UL);
ASSERT_EQ(grad_op1->OutputNames().size(), 2UL);
......@@ -610,7 +629,7 @@ TEST(Backward, intermedia_var_no_grad) {
EXPECT_EQ(grad_op1->Output(f::GradVarName("b")),
std::vector<std::string>({f::GradVarName("b1")}));
f::OpDescBind *grad_op4 = block->AllOps()[4];
f::OpDescBind *grad_op4 = block->AllOps()[5];
EXPECT_EQ(grad_op4->Type(), "mul_grad");
ASSERT_EQ(grad_op4->InputNames().size(), 4UL);
ASSERT_EQ(grad_op4->OutputNames().size(), 2UL);
......@@ -642,10 +661,15 @@ TEST(Backward, var_no_grad) {
op2->SetOutput("Y", {"y2"});
op2->SetOutput("Z", {"z2"});
AppendBackward(program, {"z1"});
auto target = f::VarDescBind("z2");
size_t forward_len = block->AllOps().size();
AppendBackward(program, target, {"z1"});
ASSERT_EQ(block->AllOps().size(), 5UL);
f::OpDescBind *grad_op2 = block->AllOps()[2];
ASSERT_EQ(block->AllOps().size(), 6UL);
f::OpDescBind *fill_op = block->AllOps()[forward_len];
EXPECT_EQ(fill_op->Type(), "fill_constant");
f::OpDescBind *grad_op2 = block->AllOps()[3];
ASSERT_EQ(grad_op2->Type(), "mult_in_out_grad");
ASSERT_EQ(grad_op2->InputNames().size(), 6UL);
ASSERT_EQ(grad_op2->OutputNames().size(), 2UL);
......@@ -661,7 +685,7 @@ TEST(Backward, var_no_grad) {
std::vector<std::string>({f::GradVarName("y1")}));
EXPECT_EQ(grad_op2->Output(f::GradVarName("H")), std::vector<std::string>());
f::OpDescBind *fill_zero_op = block->AllOps()[3];
f::OpDescBind *fill_zero_op = block->AllOps()[4];
ASSERT_EQ(fill_zero_op->Type(), "fill_zeros_like");
ASSERT_EQ(fill_zero_op->InputNames().size(), 1UL);
ASSERT_EQ(fill_zero_op->OutputNames().size(), 1UL);
......@@ -669,7 +693,7 @@ TEST(Backward, var_no_grad) {
EXPECT_EQ(fill_zero_op->Output("Y"),
std::vector<std::string>({std::string("z1") + f::kZeroVarSuffix}));
f::OpDescBind *grad_op1 = block->AllOps()[4];
f::OpDescBind *grad_op1 = block->AllOps()[5];
ASSERT_EQ(grad_op1->Type(), "mult_in_out_grad");
ASSERT_EQ(grad_op1->InputNames().size(), 6UL);
ASSERT_EQ(grad_op1->OutputNames().size(), 2UL);
......@@ -709,10 +733,15 @@ TEST(Backward, shared_var) {
op3->SetInput("b", {"b3"});
op3->SetOutput("Out", {"out3"});
AppendBackward(program, {});
auto target = f::VarDescBind("out3");
size_t forward_len = block->AllOps().size();
AppendBackward(program, target, {});
ASSERT_EQ(block->AllOps().size(), 7UL);
f::OpDescBind *grad_op3 = block->AllOps()[3];
ASSERT_EQ(block->AllOps().size(), 8UL);
f::OpDescBind *fill_op = block->AllOps()[forward_len];
EXPECT_EQ(fill_op->Type(), "fill_constant");
f::OpDescBind *grad_op3 = block->AllOps()[4];
ASSERT_EQ(grad_op3->Type(), "rowwise_add_grad");
ASSERT_EQ(grad_op3->InputNames().size(), 1UL);
ASSERT_EQ(grad_op3->OutputNames().size(), 2UL);
......@@ -723,7 +752,7 @@ TEST(Backward, shared_var) {
EXPECT_EQ(grad_op3->Output(f::GradVarName("b")),
std::vector<std::string>({f::GradVarName("b3")}));
f::OpDescBind *grad_op4 = block->AllOps()[4];
f::OpDescBind *grad_op4 = block->AllOps()[5];
ASSERT_EQ(grad_op4->Type(), "mul_grad");
ASSERT_EQ(grad_op4->InputNames().size(), 4UL);
ASSERT_EQ(grad_op4->OutputNames().size(), 2UL);
......@@ -737,7 +766,7 @@ TEST(Backward, shared_var) {
EXPECT_EQ(grad_op4->Output(f::GradVarName("Y")),
std::vector<std::string>({f::GradVarName("y2")}));
f::OpDescBind *sum_op = block->AllOps()[5];
f::OpDescBind *sum_op = block->AllOps()[6];
ASSERT_EQ(sum_op->Type(), "sum");
ASSERT_EQ(sum_op->InputNames().size(), 1UL);
ASSERT_EQ(sum_op->OutputNames().size(), 1UL);
......@@ -747,7 +776,7 @@ TEST(Backward, shared_var) {
EXPECT_EQ(sum_op->Output("Out"),
std::vector<std::string>({f::GradVarName("out1")}));
f::OpDescBind *grad_op1 = block->AllOps()[6];
f::OpDescBind *grad_op1 = block->AllOps()[7];
ASSERT_EQ(grad_op1->Type(), "rowwise_add_grad");
ASSERT_EQ(grad_op1->InputNames().size(), 1UL);
ASSERT_EQ(grad_op1->OutputNames().size(), 2UL);
......@@ -769,7 +798,11 @@ TEST(Backward, half_backward) {
op1->SetInput("Y", {"b"});
op1->SetOutput("Out", {"out"});
AppendBackward(program, {"b"});
auto target = f::VarDescBind("out");
size_t forward_len = block->AllOps().size();
AppendBackward(program, target, {"b"});
f::OpDescBind *fill_op = block->AllOps()[forward_len];
EXPECT_EQ(fill_op->Type(), "fill_constant");
auto ops = block->AllOps();
ASSERT_EQ(2UL, ops.size());
}
\ No newline at end of file
ASSERT_EQ(3UL, ops.size());
}
......@@ -39,7 +39,7 @@ class BlockDescBind {
std::unordered_map<std::string, std::string> *grad_to_var);
friend void AppendBackward(
ProgramDescBind &program_desc,
ProgramDescBind &program_desc, const VarDescBind &target,
const std::unordered_set<std::string> &no_grad_vars);
BlockDescBind(ProgramDescBind *prog, BlockDesc *desc)
......
......@@ -34,6 +34,7 @@ USE_OP(mul);
USE_OP(sum);
USE_OP(squared_l2_distance);
USE_OP(fill_constant);
USE_OP(mean);
USE_OP(sgd);
using namespace paddle::platform;
......@@ -45,9 +46,10 @@ void AddOp(const std::string& type, const VariableNameMap& inputs,
// insert output
for (auto kv : outputs) {
for (auto v : kv.second) {
auto var = block->NewVar(v);
var->SetType(VarDesc::LOD_TENSOR);
var->SetDataType(paddle::framework::DataType::FP32);
if (!block->HasVar(v)) {
auto var = block->NewVar(v);
var->SetDataType(paddle::framework::DataType::FP32);
}
}
}
......@@ -147,12 +149,12 @@ class ExecutorTesterRandom : public ::testing::Test {
AddOp("squared_l2_distance", {{"X", {"a"}}, {"Y", {"a_out"}}},
{{"Out", {"l2_distance"}}, {"sub_result", {"l2_distance_sub"}}}, {},
root_block);
AddOp("mean", {{"X", {"l2_distance"}}}, {{"Out", {"mean_out"}}}, {},
root_block);
// backward
AddOp("fill_constant", {}, {{"Out", {"l2_distance@GRAD"}}},
{{"shape", std::vector<int>{batch_size, 1}}, {"value", float(1.0)}},
root_block);
AppendBackward(program, {});
auto target = VarDescBind("mean_out");
AppendBackward(program, target, {});
// update
AddOp("fill_constant", {}, {{"Out", {"learning_rate"}}},
......@@ -328,4 +330,4 @@ int main(int argc, char** argv) {
return RUN_ALL_TESTS();
}
#endif
\ No newline at end of file
#endif
......@@ -54,7 +54,10 @@ inline void VectorToRepeated(const std::vector<bool> &vec,
class VarDescBind {
public:
explicit VarDescBind(const std::string &name) { desc_.set_name(name); }
explicit VarDescBind(const std::string &name) {
desc_.set_name(name);
desc_.set_type(VarDesc::LOD_TENSOR);
}
VarDesc *Proto() { return &desc_; }
......
......@@ -118,9 +118,9 @@ void BindProgramDesc(py::module &m) {
.def("append_block", &ProgramDescBind::AppendBlock,
py::return_value_policy::reference)
.def("append_backward",
[](ProgramDescBind &program_desc,
[](ProgramDescBind &program_desc, const VarDescBind &target,
const std::unordered_set<std::string> &no_grad_vars) {
AppendBackward(program_desc, no_grad_vars);
AppendBackward(program_desc, target, no_grad_vars);
})
.def("block", &ProgramDescBind::Block, py::return_value_policy::reference)
.def("num_blocks", &ProgramDescBind::Size)
......
......@@ -51,11 +51,14 @@ class TestProgram(unittest.TestCase):
sum_op_desc.set_input("Y", ["b1"])
sum_op_desc.set_output("Out", ["out2"])
target = block.new_var("out2")
expect_ops = [
"mul", "elementwise_add", "elementwise_add_grad", "mul_grad"
"mul", "elementwise_add", "fill_constant", "elementwise_add_grad",
"mul_grad"
]
actual_ops = []
prog.append_backward(set())
prog.append_backward(target, set())
for op in block.all_ops():
actual_ops.append(op.type())
print(actual_ops)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册