提交 5bd14f6e 编写于 作者: D Dong Zhihong

Merge remote-tracking branch 'origin/develop' into fix/scope

...@@ -44,7 +44,7 @@ cc_test(backward_test SRCS backward_test.cc DEPS backward recurrent_op device_co ...@@ -44,7 +44,7 @@ cc_test(backward_test SRCS backward_test.cc DEPS backward recurrent_op device_co
cc_library(executor SRCS executor.cc DEPS op_registry device_context scope framework_proto backward) cc_library(executor SRCS executor.cc DEPS op_registry device_context scope framework_proto backward)
set(EXECUTOR_TEST_OP elementwise_add_op gaussian_random_op feed_op fetch_op set(EXECUTOR_TEST_OP elementwise_add_op gaussian_random_op feed_op fetch_op
mul_op sum_op squared_l2_distance_op fill_constant_op sgd_op) mul_op sum_op squared_l2_distance_op fill_constant_op sgd_op mean_op)
if(WITH_GPU) if(WITH_GPU)
nv_test(executor_test SRCS executor_test.cc DEPS executor ${EXECUTOR_TEST_OP}) nv_test(executor_test SRCS executor_test.cc DEPS executor ${EXECUTOR_TEST_OP})
else() else()
......
...@@ -273,6 +273,21 @@ static bool AllGradInSet(const std::vector<std::string>& names, ...@@ -273,6 +273,21 @@ static bool AllGradInSet(const std::vector<std::string>& names,
return true; return true;
} }
static void CreateGradVarInBlock(BlockDescBind* block_desc,
size_t grad_op_start_index) {
auto ops = block_desc->AllOps();
for (size_t op_index = grad_op_start_index; op_index < ops.size();
++op_index) {
for (const auto& output : ops[op_index]->Outputs()) {
for (const auto& real_output : output.second) {
if (!block_desc->HasVar(real_output)) {
block_desc->Var(real_output);
}
}
}
}
}
std::vector<std::unique_ptr<OpDescBind>> MakeOpGrad( std::vector<std::unique_ptr<OpDescBind>> MakeOpGrad(
const std::unique_ptr<OpDescBind>& op_desc, const std::unique_ptr<OpDescBind>& op_desc,
std::unordered_set<std::string>* no_grad_vars, std::unordered_set<std::string>* no_grad_vars,
...@@ -326,15 +341,16 @@ std::vector<std::unique_ptr<OpDescBind>> MakeBlockBackward( ...@@ -326,15 +341,16 @@ std::vector<std::unique_ptr<OpDescBind>> MakeBlockBackward(
std::unordered_map<std::string, std::vector<size_t>> dup_out_ops; std::unordered_map<std::string, std::vector<size_t>> dup_out_ops;
size_t grad_desc_idx = 0; size_t grad_desc_idx = 0;
std::vector<std::unique_ptr<OpDescBind>> backward_descs; std::vector<std::unique_ptr<OpDescBind>> backward_descs;
for (auto it = op_descs.rbegin(); it != op_descs.rend(); ++it) { for (auto it = op_descs.rbegin(); it != op_descs.rend(); ++it) {
std::vector<std::unique_ptr<OpDescBind>> op_grads = std::vector<std::unique_ptr<OpDescBind>> op_grads =
MakeOpGrad(*it, no_grad_vars, grad_to_var); MakeOpGrad(*it, no_grad_vars, grad_to_var);
if ((*it)->Type() == "recurrent") { if ((*it)->Type() == "recurrent") {
PADDLE_ENFORCE_EQ( PADDLE_ENFORCE_EQ(
op_grads.size(), size_t(1), op_grads.size(), static_cast<size_t>(1),
"rnn_op's gradient process should contain only one op."); "rnn_op's gradient process should contain only one op.");
int step_block_idx = (*it)->GetBlockAttr("stop_block"); int step_block_idx = (*it)->GetBlockAttr("step_block");
auto backward_block_op_descs = MakeBlockBackward( auto backward_block_op_descs = MakeBlockBackward(
program_desc, step_block_idx, no_grad_vars, grad_to_var); program_desc, step_block_idx, no_grad_vars, grad_to_var);
BlockDescBind* backward_block = program_desc.AppendBlock(*cur_block); BlockDescBind* backward_block = program_desc.AppendBlock(*cur_block);
...@@ -380,10 +396,11 @@ std::vector<std::unique_ptr<OpDescBind>> MakeBlockBackward( ...@@ -380,10 +396,11 @@ std::vector<std::unique_ptr<OpDescBind>> MakeBlockBackward(
backward_descs.insert(backward_descs.begin() + p.first + 1, backward_descs.insert(backward_descs.begin() + p.first + 1,
std::move(p.second)); std::move(p.second));
} }
return backward_descs; return backward_descs;
} }
void AppendBackward(ProgramDescBind& program_desc, void AppendBackward(ProgramDescBind& program_desc, const VarDescBind& target,
const std::unordered_set<std::string>& no_grad_vars) { const std::unordered_set<std::string>& no_grad_vars) {
std::unordered_set<std::string> no_grad_var_names; std::unordered_set<std::string> no_grad_var_names;
no_grad_var_names.reserve(no_grad_vars.size() + 1); no_grad_var_names.reserve(no_grad_vars.size() + 1);
...@@ -391,13 +408,34 @@ void AppendBackward(ProgramDescBind& program_desc, ...@@ -391,13 +408,34 @@ void AppendBackward(ProgramDescBind& program_desc,
for (auto& name : no_grad_vars) { for (auto& name : no_grad_vars) {
no_grad_var_names.insert(GradVarName(name)); no_grad_var_names.insert(GradVarName(name));
} }
const int root_block_idx = 0; const int root_block_idx = 0;
auto root_block = program_desc.Block(root_block_idx);
auto& all_ops = root_block->ops_;
// insert fill one op for target
std::string fill_one_op_out = GradVarName(target.Name());
std::unique_ptr<OpDescBind> fill_one_op(
new OpDescBind("fill_constant", {}, {{"Out", {fill_one_op_out}}},
{{"shape", std::vector<int>{1}},
{"value", static_cast<float>(1.0)},
{"dataType", framework::DataType::FP32}}));
all_ops.push_back(std::move(fill_one_op));
size_t forward_op_num = all_ops.size();
size_t forward_block_num = program_desc.Size();
std::unordered_map<std::string, std::string> grad_to_var; std::unordered_map<std::string, std::string> grad_to_var;
auto backward_op_descs = MakeBlockBackward(program_desc, root_block_idx, auto backward_op_descs = MakeBlockBackward(program_desc, root_block_idx,
&no_grad_var_names, &grad_to_var); &no_grad_var_names, &grad_to_var);
auto& forw_op_descs = program_desc.Block(root_block_idx)->ops_;
for (auto& ptr : backward_op_descs) { for (auto& ptr : backward_op_descs) {
forw_op_descs.push_back(std::move(ptr)); all_ops.push_back(std::move(ptr));
}
root_block->Var(fill_one_op_out);
// create grad_var for all blocks in this program
CreateGradVarInBlock(root_block, forward_op_num);
for (size_t block_index = forward_block_num;
block_index < program_desc.Size(); ++block_index) {
CreateGradVarInBlock(program_desc.Block(block_index), 0);
} }
} }
......
...@@ -29,7 +29,7 @@ extern std::unique_ptr<OperatorBase> Backward( ...@@ -29,7 +29,7 @@ extern std::unique_ptr<OperatorBase> Backward(
// TODO(jiayi): Add target as parameter and generate backward op // TODO(jiayi): Add target as parameter and generate backward op
// according to target. // according to target.
void AppendBackward(ProgramDescBind& program_desc, void AppendBackward(ProgramDescBind& program_desc, const VarDescBind& target,
const std::unordered_set<std::string>& no_grad_vars); const std::unordered_set<std::string>& no_grad_vars);
} // namespace framework } // namespace framework
......
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
#include "paddle/framework/block_desc.h" #include "paddle/framework/block_desc.h"
#include "paddle/framework/op_desc.h" #include "paddle/framework/op_desc.h"
#include "paddle/framework/op_registry.h" #include "paddle/framework/op_registry.h"
#include "paddle/framework/var_desc.h"
#include "paddle/operators/net_op.h" #include "paddle/operators/net_op.h"
namespace paddle { namespace paddle {
...@@ -468,10 +469,14 @@ TEST(Backward, simple_single_op) { ...@@ -468,10 +469,14 @@ TEST(Backward, simple_single_op) {
op->SetInput("b", {"b"}); op->SetInput("b", {"b"});
op->SetOutput("Out", {"out"}); op->SetOutput("Out", {"out"});
AppendBackward(program, {}); auto target = f::VarDescBind("out");
AppendBackward(program, target, {});
ASSERT_EQ(block->AllOps().size(), 2UL); ASSERT_EQ(block->AllOps().size(), 3UL);
f::OpDescBind *grad_op = block->AllOps()[1]; f::OpDescBind *fill_op = block->AllOps()[1];
EXPECT_EQ(fill_op->Type(), "fill_constant");
f::OpDescBind *grad_op = block->AllOps()[2];
EXPECT_EQ(grad_op->Type(), "rowwise_add_grad"); EXPECT_EQ(grad_op->Type(), "rowwise_add_grad");
ASSERT_EQ(grad_op->InputNames().size(), 1UL); ASSERT_EQ(grad_op->InputNames().size(), 1UL);
ASSERT_EQ(grad_op->OutputNames().size(), 2UL); ASSERT_EQ(grad_op->OutputNames().size(), 2UL);
...@@ -494,13 +499,17 @@ TEST(Backward, default_attribute) { ...@@ -494,13 +499,17 @@ TEST(Backward, default_attribute) {
op->SetOutput("Out", {"out"}); op->SetOutput("Out", {"out"});
op->CheckAttrs(); op->CheckAttrs();
AppendBackward(program, {}); auto target = f::VarDescBind("out");
AppendBackward(program, target, {});
ASSERT_EQ(block->AllOps().size(), 2UL); ASSERT_EQ(block->AllOps().size(), 3UL);
EXPECT_EQ(boost::get<int>(op->GetAttr("x_num_col_dims")), 1); EXPECT_EQ(boost::get<int>(op->GetAttr("x_num_col_dims")), 1);
EXPECT_EQ(boost::get<int>(op->GetAttr("y_num_col_dims")), 1); EXPECT_EQ(boost::get<int>(op->GetAttr("y_num_col_dims")), 1);
f::OpDescBind *grad_op = block->AllOps()[1]; f::OpDescBind *fill_op = block->AllOps()[1];
EXPECT_EQ(fill_op->Type(), "fill_constant");
f::OpDescBind *grad_op = block->AllOps()[2];
ASSERT_EQ(grad_op->Type(), "mul_grad"); ASSERT_EQ(grad_op->Type(), "mul_grad");
EXPECT_EQ(boost::get<int>(grad_op->GetAttr("x_num_col_dims")), 1); EXPECT_EQ(boost::get<int>(grad_op->GetAttr("x_num_col_dims")), 1);
EXPECT_EQ(boost::get<int>(grad_op->GetAttr("y_num_col_dims")), 1); EXPECT_EQ(boost::get<int>(grad_op->GetAttr("y_num_col_dims")), 1);
...@@ -528,10 +537,15 @@ TEST(Backward, simple_mult_op) { ...@@ -528,10 +537,15 @@ TEST(Backward, simple_mult_op) {
op3->SetInput("b", {"b3"}); op3->SetInput("b", {"b3"});
op3->SetOutput("Out", {"out3"}); op3->SetOutput("Out", {"out3"});
AppendBackward(program, {}); auto target = f::VarDescBind("out3");
size_t forward_len = block->AllOps().size();
AppendBackward(program, target, {});
ASSERT_EQ(block->AllOps().size(), 6UL); ASSERT_EQ(block->AllOps().size(), 6UL + 1);
f::OpDescBind *grad_op1 = block->AllOps()[5]; f::OpDescBind *fill_op = block->AllOps()[forward_len];
EXPECT_EQ(fill_op->Type(), "fill_constant");
f::OpDescBind *grad_op1 = block->AllOps()[6];
EXPECT_EQ(grad_op1->Type(), "rowwise_add_grad"); EXPECT_EQ(grad_op1->Type(), "rowwise_add_grad");
ASSERT_EQ(grad_op1->InputNames().size(), 1UL); ASSERT_EQ(grad_op1->InputNames().size(), 1UL);
ASSERT_EQ(grad_op1->OutputNames().size(), 2UL); ASSERT_EQ(grad_op1->OutputNames().size(), 2UL);
...@@ -542,7 +556,7 @@ TEST(Backward, simple_mult_op) { ...@@ -542,7 +556,7 @@ TEST(Backward, simple_mult_op) {
EXPECT_EQ(grad_op1->Output(f::GradVarName("b")), EXPECT_EQ(grad_op1->Output(f::GradVarName("b")),
std::vector<std::string>({f::GradVarName("b1")})); std::vector<std::string>({f::GradVarName("b1")}));
f::OpDescBind *grad_op2 = block->AllOps()[4]; f::OpDescBind *grad_op2 = block->AllOps()[5];
EXPECT_EQ(grad_op2->Type(), "mul_grad"); EXPECT_EQ(grad_op2->Type(), "mul_grad");
ASSERT_EQ(grad_op2->InputNames().size(), 4UL); ASSERT_EQ(grad_op2->InputNames().size(), 4UL);
ASSERT_EQ(grad_op2->OutputNames().size(), 2UL); ASSERT_EQ(grad_op2->OutputNames().size(), 2UL);
...@@ -556,7 +570,7 @@ TEST(Backward, simple_mult_op) { ...@@ -556,7 +570,7 @@ TEST(Backward, simple_mult_op) {
EXPECT_EQ(grad_op2->Output(f::GradVarName("Y")), EXPECT_EQ(grad_op2->Output(f::GradVarName("Y")),
std::vector<std::string>({f::GradVarName("y2")})); std::vector<std::string>({f::GradVarName("y2")}));
f::OpDescBind *grad_op3 = block->AllOps()[3]; f::OpDescBind *grad_op3 = block->AllOps()[4];
EXPECT_EQ(grad_op3->Type(), "rowwise_add_grad"); EXPECT_EQ(grad_op3->Type(), "rowwise_add_grad");
ASSERT_EQ(grad_op3->InputNames().size(), 1UL); ASSERT_EQ(grad_op3->InputNames().size(), 1UL);
ASSERT_EQ(grad_op3->OutputNames().size(), 2UL); ASSERT_EQ(grad_op3->OutputNames().size(), 2UL);
...@@ -596,10 +610,15 @@ TEST(Backward, intermedia_var_no_grad) { ...@@ -596,10 +610,15 @@ TEST(Backward, intermedia_var_no_grad) {
op4->SetInput("Y", {"out3"}); op4->SetInput("Y", {"out3"});
op4->SetOutput("Out", {"out4"}); op4->SetOutput("Out", {"out4"});
AppendBackward(program, {"out3"}); auto target = f::VarDescBind("out4");
size_t forward_len = block->AllOps().size();
AppendBackward(program, target, {"out3"});
ASSERT_EQ(block->AllOps().size(), 6UL); ASSERT_EQ(block->AllOps().size(), 7UL);
f::OpDescBind *grad_op1 = block->AllOps()[5]; f::OpDescBind *fill_op = block->AllOps()[forward_len];
EXPECT_EQ(fill_op->Type(), "fill_constant");
f::OpDescBind *grad_op1 = block->AllOps()[6];
EXPECT_EQ(grad_op1->Type(), "rowwise_add_grad"); EXPECT_EQ(grad_op1->Type(), "rowwise_add_grad");
ASSERT_EQ(grad_op1->InputNames().size(), 1UL); ASSERT_EQ(grad_op1->InputNames().size(), 1UL);
ASSERT_EQ(grad_op1->OutputNames().size(), 2UL); ASSERT_EQ(grad_op1->OutputNames().size(), 2UL);
...@@ -610,7 +629,7 @@ TEST(Backward, intermedia_var_no_grad) { ...@@ -610,7 +629,7 @@ TEST(Backward, intermedia_var_no_grad) {
EXPECT_EQ(grad_op1->Output(f::GradVarName("b")), EXPECT_EQ(grad_op1->Output(f::GradVarName("b")),
std::vector<std::string>({f::GradVarName("b1")})); std::vector<std::string>({f::GradVarName("b1")}));
f::OpDescBind *grad_op4 = block->AllOps()[4]; f::OpDescBind *grad_op4 = block->AllOps()[5];
EXPECT_EQ(grad_op4->Type(), "mul_grad"); EXPECT_EQ(grad_op4->Type(), "mul_grad");
ASSERT_EQ(grad_op4->InputNames().size(), 4UL); ASSERT_EQ(grad_op4->InputNames().size(), 4UL);
ASSERT_EQ(grad_op4->OutputNames().size(), 2UL); ASSERT_EQ(grad_op4->OutputNames().size(), 2UL);
...@@ -642,10 +661,15 @@ TEST(Backward, var_no_grad) { ...@@ -642,10 +661,15 @@ TEST(Backward, var_no_grad) {
op2->SetOutput("Y", {"y2"}); op2->SetOutput("Y", {"y2"});
op2->SetOutput("Z", {"z2"}); op2->SetOutput("Z", {"z2"});
AppendBackward(program, {"z1"}); auto target = f::VarDescBind("z2");
size_t forward_len = block->AllOps().size();
AppendBackward(program, target, {"z1"});
ASSERT_EQ(block->AllOps().size(), 5UL); ASSERT_EQ(block->AllOps().size(), 6UL);
f::OpDescBind *grad_op2 = block->AllOps()[2]; f::OpDescBind *fill_op = block->AllOps()[forward_len];
EXPECT_EQ(fill_op->Type(), "fill_constant");
f::OpDescBind *grad_op2 = block->AllOps()[3];
ASSERT_EQ(grad_op2->Type(), "mult_in_out_grad"); ASSERT_EQ(grad_op2->Type(), "mult_in_out_grad");
ASSERT_EQ(grad_op2->InputNames().size(), 6UL); ASSERT_EQ(grad_op2->InputNames().size(), 6UL);
ASSERT_EQ(grad_op2->OutputNames().size(), 2UL); ASSERT_EQ(grad_op2->OutputNames().size(), 2UL);
...@@ -661,7 +685,7 @@ TEST(Backward, var_no_grad) { ...@@ -661,7 +685,7 @@ TEST(Backward, var_no_grad) {
std::vector<std::string>({f::GradVarName("y1")})); std::vector<std::string>({f::GradVarName("y1")}));
EXPECT_EQ(grad_op2->Output(f::GradVarName("H")), std::vector<std::string>()); EXPECT_EQ(grad_op2->Output(f::GradVarName("H")), std::vector<std::string>());
f::OpDescBind *fill_zero_op = block->AllOps()[3]; f::OpDescBind *fill_zero_op = block->AllOps()[4];
ASSERT_EQ(fill_zero_op->Type(), "fill_zeros_like"); ASSERT_EQ(fill_zero_op->Type(), "fill_zeros_like");
ASSERT_EQ(fill_zero_op->InputNames().size(), 1UL); ASSERT_EQ(fill_zero_op->InputNames().size(), 1UL);
ASSERT_EQ(fill_zero_op->OutputNames().size(), 1UL); ASSERT_EQ(fill_zero_op->OutputNames().size(), 1UL);
...@@ -669,7 +693,7 @@ TEST(Backward, var_no_grad) { ...@@ -669,7 +693,7 @@ TEST(Backward, var_no_grad) {
EXPECT_EQ(fill_zero_op->Output("Y"), EXPECT_EQ(fill_zero_op->Output("Y"),
std::vector<std::string>({std::string("z1") + f::kZeroVarSuffix})); std::vector<std::string>({std::string("z1") + f::kZeroVarSuffix}));
f::OpDescBind *grad_op1 = block->AllOps()[4]; f::OpDescBind *grad_op1 = block->AllOps()[5];
ASSERT_EQ(grad_op1->Type(), "mult_in_out_grad"); ASSERT_EQ(grad_op1->Type(), "mult_in_out_grad");
ASSERT_EQ(grad_op1->InputNames().size(), 6UL); ASSERT_EQ(grad_op1->InputNames().size(), 6UL);
ASSERT_EQ(grad_op1->OutputNames().size(), 2UL); ASSERT_EQ(grad_op1->OutputNames().size(), 2UL);
...@@ -709,10 +733,15 @@ TEST(Backward, shared_var) { ...@@ -709,10 +733,15 @@ TEST(Backward, shared_var) {
op3->SetInput("b", {"b3"}); op3->SetInput("b", {"b3"});
op3->SetOutput("Out", {"out3"}); op3->SetOutput("Out", {"out3"});
AppendBackward(program, {}); auto target = f::VarDescBind("out3");
size_t forward_len = block->AllOps().size();
AppendBackward(program, target, {});
ASSERT_EQ(block->AllOps().size(), 7UL); ASSERT_EQ(block->AllOps().size(), 8UL);
f::OpDescBind *grad_op3 = block->AllOps()[3]; f::OpDescBind *fill_op = block->AllOps()[forward_len];
EXPECT_EQ(fill_op->Type(), "fill_constant");
f::OpDescBind *grad_op3 = block->AllOps()[4];
ASSERT_EQ(grad_op3->Type(), "rowwise_add_grad"); ASSERT_EQ(grad_op3->Type(), "rowwise_add_grad");
ASSERT_EQ(grad_op3->InputNames().size(), 1UL); ASSERT_EQ(grad_op3->InputNames().size(), 1UL);
ASSERT_EQ(grad_op3->OutputNames().size(), 2UL); ASSERT_EQ(grad_op3->OutputNames().size(), 2UL);
...@@ -723,7 +752,7 @@ TEST(Backward, shared_var) { ...@@ -723,7 +752,7 @@ TEST(Backward, shared_var) {
EXPECT_EQ(grad_op3->Output(f::GradVarName("b")), EXPECT_EQ(grad_op3->Output(f::GradVarName("b")),
std::vector<std::string>({f::GradVarName("b3")})); std::vector<std::string>({f::GradVarName("b3")}));
f::OpDescBind *grad_op4 = block->AllOps()[4]; f::OpDescBind *grad_op4 = block->AllOps()[5];
ASSERT_EQ(grad_op4->Type(), "mul_grad"); ASSERT_EQ(grad_op4->Type(), "mul_grad");
ASSERT_EQ(grad_op4->InputNames().size(), 4UL); ASSERT_EQ(grad_op4->InputNames().size(), 4UL);
ASSERT_EQ(grad_op4->OutputNames().size(), 2UL); ASSERT_EQ(grad_op4->OutputNames().size(), 2UL);
...@@ -737,7 +766,7 @@ TEST(Backward, shared_var) { ...@@ -737,7 +766,7 @@ TEST(Backward, shared_var) {
EXPECT_EQ(grad_op4->Output(f::GradVarName("Y")), EXPECT_EQ(grad_op4->Output(f::GradVarName("Y")),
std::vector<std::string>({f::GradVarName("y2")})); std::vector<std::string>({f::GradVarName("y2")}));
f::OpDescBind *sum_op = block->AllOps()[5]; f::OpDescBind *sum_op = block->AllOps()[6];
ASSERT_EQ(sum_op->Type(), "sum"); ASSERT_EQ(sum_op->Type(), "sum");
ASSERT_EQ(sum_op->InputNames().size(), 1UL); ASSERT_EQ(sum_op->InputNames().size(), 1UL);
ASSERT_EQ(sum_op->OutputNames().size(), 1UL); ASSERT_EQ(sum_op->OutputNames().size(), 1UL);
...@@ -747,7 +776,7 @@ TEST(Backward, shared_var) { ...@@ -747,7 +776,7 @@ TEST(Backward, shared_var) {
EXPECT_EQ(sum_op->Output("Out"), EXPECT_EQ(sum_op->Output("Out"),
std::vector<std::string>({f::GradVarName("out1")})); std::vector<std::string>({f::GradVarName("out1")}));
f::OpDescBind *grad_op1 = block->AllOps()[6]; f::OpDescBind *grad_op1 = block->AllOps()[7];
ASSERT_EQ(grad_op1->Type(), "rowwise_add_grad"); ASSERT_EQ(grad_op1->Type(), "rowwise_add_grad");
ASSERT_EQ(grad_op1->InputNames().size(), 1UL); ASSERT_EQ(grad_op1->InputNames().size(), 1UL);
ASSERT_EQ(grad_op1->OutputNames().size(), 2UL); ASSERT_EQ(grad_op1->OutputNames().size(), 2UL);
...@@ -769,7 +798,11 @@ TEST(Backward, half_backward) { ...@@ -769,7 +798,11 @@ TEST(Backward, half_backward) {
op1->SetInput("Y", {"b"}); op1->SetInput("Y", {"b"});
op1->SetOutput("Out", {"out"}); op1->SetOutput("Out", {"out"});
AppendBackward(program, {"b"}); auto target = f::VarDescBind("out");
size_t forward_len = block->AllOps().size();
AppendBackward(program, target, {"b"});
f::OpDescBind *fill_op = block->AllOps()[forward_len];
EXPECT_EQ(fill_op->Type(), "fill_constant");
auto ops = block->AllOps(); auto ops = block->AllOps();
ASSERT_EQ(2UL, ops.size()); ASSERT_EQ(3UL, ops.size());
} }
\ No newline at end of file
...@@ -39,7 +39,7 @@ class BlockDescBind { ...@@ -39,7 +39,7 @@ class BlockDescBind {
std::unordered_map<std::string, std::string> *grad_to_var); std::unordered_map<std::string, std::string> *grad_to_var);
friend void AppendBackward( friend void AppendBackward(
ProgramDescBind &program_desc, ProgramDescBind &program_desc, const VarDescBind &target,
const std::unordered_set<std::string> &no_grad_vars); const std::unordered_set<std::string> &no_grad_vars);
BlockDescBind(ProgramDescBind *prog, BlockDesc *desc) BlockDescBind(ProgramDescBind *prog, BlockDesc *desc)
......
...@@ -34,6 +34,7 @@ USE_OP(mul); ...@@ -34,6 +34,7 @@ USE_OP(mul);
USE_OP(sum); USE_OP(sum);
USE_OP(squared_l2_distance); USE_OP(squared_l2_distance);
USE_OP(fill_constant); USE_OP(fill_constant);
USE_OP(mean);
USE_OP(sgd); USE_OP(sgd);
using namespace paddle::platform; using namespace paddle::platform;
...@@ -45,9 +46,16 @@ void AddOp(const std::string& type, const VariableNameMap& inputs, ...@@ -45,9 +46,16 @@ void AddOp(const std::string& type, const VariableNameMap& inputs,
// insert output // insert output
for (auto kv : outputs) { for (auto kv : outputs) {
for (auto v : kv.second) { for (auto v : kv.second) {
auto var = block->Var(v); // <<<<<<< HEAD
var->SetType(VarDesc::LOD_TENSOR); // auto var = block->Var(v);
var->SetDataType(paddle::framework::DataType::FP32); // var->SetType(VarDesc::LOD_TENSOR);
// var->SetDataType(paddle::framework::DataType::FP32);
// =======
if (!block->HasVar(v)) {
auto var = block->Var(v);
var->SetDataType(paddle::framework::DataType::FP32);
}
// >>>>>>> origin/develop
} }
} }
...@@ -147,12 +155,12 @@ class ExecutorTesterRandom : public ::testing::Test { ...@@ -147,12 +155,12 @@ class ExecutorTesterRandom : public ::testing::Test {
AddOp("squared_l2_distance", {{"X", {"a"}}, {"Y", {"a_out"}}}, AddOp("squared_l2_distance", {{"X", {"a"}}, {"Y", {"a_out"}}},
{{"Out", {"l2_distance"}}, {"sub_result", {"l2_distance_sub"}}}, {}, {{"Out", {"l2_distance"}}, {"sub_result", {"l2_distance_sub"}}}, {},
root_block); root_block);
AddOp("mean", {{"X", {"l2_distance"}}}, {{"Out", {"mean_out"}}}, {},
root_block);
// backward // backward
AddOp("fill_constant", {}, {{"Out", {"l2_distance@GRAD"}}}, auto target = VarDescBind("mean_out");
{{"shape", std::vector<int>{batch_size, 1}}, {"value", float(1.0)}}, AppendBackward(program, target, {});
root_block);
AppendBackward(program, {});
// update // update
AddOp("fill_constant", {}, {{"Out", {"learning_rate"}}}, AddOp("fill_constant", {}, {{"Out", {"learning_rate"}}},
......
...@@ -54,7 +54,10 @@ inline void VectorToRepeated(const std::vector<bool> &vec, ...@@ -54,7 +54,10 @@ inline void VectorToRepeated(const std::vector<bool> &vec,
class VarDescBind { class VarDescBind {
public: public:
explicit VarDescBind(const std::string &name) { desc_.set_name(name); } explicit VarDescBind(const std::string &name) {
desc_.set_name(name);
desc_.set_type(VarDesc::LOD_TENSOR);
}
VarDesc *Proto() { return &desc_; } VarDesc *Proto() { return &desc_; }
......
...@@ -118,9 +118,9 @@ void BindProgramDesc(py::module &m) { ...@@ -118,9 +118,9 @@ void BindProgramDesc(py::module &m) {
.def("append_block", &ProgramDescBind::AppendBlock, .def("append_block", &ProgramDescBind::AppendBlock,
py::return_value_policy::reference) py::return_value_policy::reference)
.def("append_backward", .def("append_backward",
[](ProgramDescBind &program_desc, [](ProgramDescBind &program_desc, const VarDescBind &target,
const std::unordered_set<std::string> &no_grad_vars) { const std::unordered_set<std::string> &no_grad_vars) {
AppendBackward(program_desc, no_grad_vars); AppendBackward(program_desc, target, no_grad_vars);
}) })
.def("block", &ProgramDescBind::Block, py::return_value_policy::reference) .def("block", &ProgramDescBind::Block, py::return_value_policy::reference)
.def("num_blocks", &ProgramDescBind::Size) .def("num_blocks", &ProgramDescBind::Size)
......
...@@ -176,6 +176,18 @@ class Operator(object): ...@@ -176,6 +176,18 @@ class Operator(object):
proto = OpProtoHolder.instance().get_op_proto(type) proto = OpProtoHolder.instance().get_op_proto(type)
if inputs is not None: if inputs is not None:
given = set()
need = set()
for n in inputs:
given.add(n)
for m in proto.inputs:
need.add(m.name)
if not given == need:
raise ValueError(
"Incorrect setting for input(s) of operator \"%s\". Need: [%s] Given: [%s]"
% (type, ", ".join(str(e) for e in need), ", ".join(
str(e) for e in given)))
for in_proto in proto.inputs: for in_proto in proto.inputs:
in_argus = inputs[in_proto.name] in_argus = inputs[in_proto.name]
if not isinstance(in_argus, list): if not isinstance(in_argus, list):
...@@ -190,6 +202,18 @@ class Operator(object): ...@@ -190,6 +202,18 @@ class Operator(object):
self.desc.set_input(in_proto.name, in_argu_names) self.desc.set_input(in_proto.name, in_argu_names)
if outputs is not None: if outputs is not None:
given = set()
need = set()
for n in outputs:
given.add(n)
for m in proto.outputs:
need.add(m.name)
if not given == need:
raise ValueError(
"Incorrect setting for output(s) of operator \"%s\". Need: [%s] Given: [%s]"
% (type, ", ".join(str(e) for e in need), ", ".join(
str(e) for e in given)))
for out_proto in proto.outputs: for out_proto in proto.outputs:
out_argus = outputs[out_proto.name] out_argus = outputs[out_proto.name]
if not isinstance(out_argus, list): if not isinstance(out_argus, list):
......
...@@ -51,11 +51,14 @@ class TestProgram(unittest.TestCase): ...@@ -51,11 +51,14 @@ class TestProgram(unittest.TestCase):
sum_op_desc.set_input("Y", ["b1"]) sum_op_desc.set_input("Y", ["b1"])
sum_op_desc.set_output("Out", ["out2"]) sum_op_desc.set_output("Out", ["out2"])
target = block.new_var("out2")
expect_ops = [ expect_ops = [
"mul", "elementwise_add", "elementwise_add_grad", "mul_grad" "mul", "elementwise_add", "fill_constant", "elementwise_add_grad",
"mul_grad"
] ]
actual_ops = [] actual_ops = []
prog.append_backward(set()) prog.append_backward(target, set())
for op in block.all_ops(): for op in block.all_ops():
actual_ops.append(op.type()) actual_ops.append(op.type())
print(actual_ops) print(actual_ops)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册