未验证 提交 95924686 编写于 作者: Y Yiqun Liu 提交者: GitHub

Fix gcc4.9 (#6442)

* Fix compiling error of gcc4.9.

* Refine the check of cxx compiler flags in api/CMakeLists.txt.
上级 74ead9dc
...@@ -25,8 +25,18 @@ FILE(GLOB PY_PADDLE_PYTHON_FILES ${PADDLE_SOURCE_DIR}/paddle/py_paddle/*.py) ...@@ -25,8 +25,18 @@ FILE(GLOB PY_PADDLE_PYTHON_FILES ${PADDLE_SOURCE_DIR}/paddle/py_paddle/*.py)
SET_SOURCE_FILES_PROPERTIES(Paddle.i PROPERTIES CPLUSPLUS ON) SET_SOURCE_FILES_PROPERTIES(Paddle.i PROPERTIES CPLUSPLUS ON)
SET(SWIG_NEED_FLAGS
-ftls-model=global-dynamic
-Wno-parentheses-equality
-Wno-self-assign
-Wno-maybe-uninitialized
-Wno-missing-field-initializers)
FOREACH(flag ${SWIG_NEED_FLAGS})
safe_set_cxxflag(SWIG_CXX_FLAGS ${flag})
ENDFOREACH()
SET(CMAKE_SWIG_OUTDIR ${CMAKE_CURRENT_BINARY_DIR}) SET(CMAKE_SWIG_OUTDIR ${CMAKE_CURRENT_BINARY_DIR})
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-parentheses-equality -Wno-missing-field-initializers -Wno-self-assign -ftls-model=global-dynamic") SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SWIG_CXX_FLAGS}")
SET(SWIG_MODULE_swig_paddle_EXTRA_DEPS SET(SWIG_MODULE_swig_paddle_EXTRA_DEPS
paddle_parameter paddle_parameter
......
...@@ -190,8 +190,9 @@ static std::unique_ptr<OperatorBase> BackwardRecursive( ...@@ -190,8 +190,9 @@ static std::unique_ptr<OperatorBase> BackwardRecursive(
// collect all the offset for each alias, // collect all the offset for each alias,
// insert a sum operator to add all aliases to output // insert a sum operator to add all aliases to output
insert_position.push_back( insert_position.push_back(
{dup_op.back(), OpRegistry::CreateOp("sum", {{"X", dup_outputs}}, {dup_op.back(),
{{"Out", {name}}}, {})}); OpRegistry::CreateOp("sum", {{"X", dup_outputs}}, {{"Out", {name}}},
AttributeMap{})});
} }
// make sure the inserted `sum` ops follow the BFS order. // make sure the inserted `sum` ops follow the BFS order.
...@@ -216,7 +217,8 @@ static std::unique_ptr<OperatorBase> BackwardRecursive( ...@@ -216,7 +217,8 @@ static std::unique_ptr<OperatorBase> BackwardRecursive(
// If part of input gradient of that operator is not calculated, fill // If part of input gradient of that operator is not calculated, fill
// zero variables to that input gradient. // zero variables to that input gradient.
net->AppendOp(OpRegistry::CreateOp("fill_zeros_like", {{"X", {prefix}}}, net->AppendOp(OpRegistry::CreateOp("fill_zeros_like", {{"X", {prefix}}},
{{"Y", {grad_input}}}, {})); {{"Y", {grad_input}}},
AttributeMap{}));
} }
return false; return false;
}); });
...@@ -392,8 +394,9 @@ std::vector<std::unique_ptr<OpDescBind>> MakeOpGrad( ...@@ -392,8 +394,9 @@ std::vector<std::unique_ptr<OpDescBind>> MakeOpGrad(
0, in_name.size() - sizeof(kGradVarSuffix) / sizeof(char) + 1); 0, in_name.size() - sizeof(kGradVarSuffix) / sizeof(char) + 1);
std::string new_name = prefix + kZeroVarSuffix; std::string new_name = prefix + kZeroVarSuffix;
desc->Rename(in_name, new_name); desc->Rename(in_name, new_name);
std::unique_ptr<OpDescBind> fill_zeros_op(new OpDescBind( std::unique_ptr<OpDescBind> fill_zeros_op(
"fill_zeros_like", {{"X", {prefix}}}, {{"Y", {new_name}}}, {})); new OpDescBind("fill_zeros_like", {{"X", {prefix}}},
{{"Y", {new_name}}}, AttributeMap{}));
pending_fill_zeros_ops.push_back(std::move(fill_zeros_op)); pending_fill_zeros_ops.push_back(std::move(fill_zeros_op));
} }
} }
...@@ -483,8 +486,9 @@ std::vector<std::unique_ptr<OpDescBind>> MakeBlockBackward( ...@@ -483,8 +486,9 @@ std::vector<std::unique_ptr<OpDescBind>> MakeBlockBackward(
sum_op_inputs.emplace_back(new_name); sum_op_inputs.emplace_back(new_name);
next_g_name = sum_op_inputs.back(); next_g_name = sum_op_inputs.back();
} }
std::unique_ptr<OpDescBind> sum_op(new OpDescBind( std::unique_ptr<OpDescBind> sum_op(
"sum", {{"X", sum_op_inputs}}, {{"Out", {out_name}}}, {})); new OpDescBind("sum", {{"X", sum_op_inputs}}, {{"Out", {out_name}}},
AttributeMap{}));
pending_sum_ops.push_back({dup_op.back(), std::move(sum_op)}); pending_sum_ops.push_back({dup_op.back(), std::move(sum_op)});
} }
} }
......
...@@ -106,15 +106,15 @@ class FcOp : public operators::NetOp { ...@@ -106,15 +106,15 @@ class FcOp : public operators::NetOp {
FcOp(const std::string &type, const VariableNameMap &inputs, FcOp(const std::string &type, const VariableNameMap &inputs,
const VariableNameMap &outputs, const AttributeMap &attrs) const VariableNameMap &outputs, const AttributeMap &attrs)
: NetOp(type, inputs, outputs, attrs) { : NetOp(type, inputs, outputs, attrs) {
AppendOp(OpRegistry::CreateOp("mul", AppendOp(OpRegistry::CreateOp(
{{"X", {Input("X")}}, {"Y", {Input("W")}}}, "mul", {{"X", {Input("X")}}, {"Y", {Input("W")}}},
{{"Out", {Output("mul_result")}}}, {})); {{"Out", {Output("mul_result")}}}, AttributeMap{}));
auto input_b = Inputs("b"); auto input_b = Inputs("b");
std::string before_act = "mul_result"; std::string before_act = "mul_result";
if (input_b.size() != 0) { if (input_b.size() != 0) {
AppendOp(OpRegistry::CreateOp( AppendOp(OpRegistry::CreateOp(
"rowwise_add", {{"X", {Output("mul_result")}}, {"b", {input_b[0]}}}, "rowwise_add", {{"X", {Output("mul_result")}}, {"b", {input_b[0]}}},
{{"Out", {Output("add_result")}}}, {})); {{"Out", {Output("add_result")}}}, AttributeMap{}));
before_act = "add_result"; before_act = "add_result";
} else { } else {
auto out_varname = Output("add_result"); auto out_varname = Output("add_result");
...@@ -124,7 +124,7 @@ class FcOp : public operators::NetOp { ...@@ -124,7 +124,7 @@ class FcOp : public operators::NetOp {
} }
AppendOp(OpRegistry::CreateOp("sigmoid", {{"X", {Output(before_act)}}}, AppendOp(OpRegistry::CreateOp("sigmoid", {{"X", {Output(before_act)}}},
{{"Out", {Output("Out")}}}, {})); {{"Out", {Output("Out")}}}, AttributeMap{}));
CompleteAddOp(false); CompleteAddOp(false);
} }
}; };
...@@ -278,8 +278,9 @@ REGISTER_OPERATOR(scale, f::NoneOp); ...@@ -278,8 +278,9 @@ REGISTER_OPERATOR(scale, f::NoneOp);
REGISTER_OP_CPU_KERNEL(scale, f::NoneKernel<paddle::platform::CPUPlace, float>); REGISTER_OP_CPU_KERNEL(scale, f::NoneKernel<paddle::platform::CPUPlace, float>);
TEST(Backward, simple_op_not_need_grad) { TEST(Backward, simple_op_not_need_grad) {
auto fwd = f::OpRegistry::CreateOp( auto fwd =
"rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {}); f::OpRegistry::CreateOp("rowwise_add", {{"X", {"x"}}, {"b", {"b"}}},
{{"Out", {"out"}}}, f::AttributeMap{});
ASSERT_NE(fwd, nullptr); ASSERT_NE(fwd, nullptr);
auto gop = f::Backward(*fwd, {"x"}); auto gop = f::Backward(*fwd, {"x"});
ASSERT_EQ(gop->Output(f::GradVarName("X")), f::kEmptyVarName); ASSERT_EQ(gop->Output(f::GradVarName("X")), f::kEmptyVarName);
...@@ -296,9 +297,10 @@ TEST(Backward, net_fc_backward_normal) { ...@@ -296,9 +297,10 @@ TEST(Backward, net_fc_backward_normal) {
{{"mul_result", {"mul_res"}}, {{"mul_result", {"mul_res"}},
{"add_result", {"add_re"}}, {"add_result", {"add_re"}},
{"Out", {"out"}}}, {"Out", {"out"}}},
{}); f::AttributeMap{});
ASSERT_NE(fwd, nullptr); ASSERT_NE(fwd, nullptr);
std::shared_ptr<f::OperatorBase> gop = f::Backward(*fwd, {}); std::shared_ptr<f::OperatorBase> gop =
f::Backward(*fwd, std::unordered_set<std::string>{});
ASSERT_TRUE(gop->IsNetOp()); ASSERT_TRUE(gop->IsNetOp());
auto net = static_cast<ops::NetOp *>(gop.get()); auto net = static_cast<ops::NetOp *>(gop.get());
...@@ -322,9 +324,10 @@ TEST(Backward, net_fc_backward_not_have_b) { ...@@ -322,9 +324,10 @@ TEST(Backward, net_fc_backward_not_have_b) {
{{"mul_result", {"mul_res"}}, {{"mul_result", {"mul_res"}},
{"add_result", {"add_res"}}, {"add_result", {"add_res"}},
{"Out", {"tmp"}}}, {"Out", {"tmp"}}},
{}); f::AttributeMap{});
ASSERT_NE(fwd, nullptr); ASSERT_NE(fwd, nullptr);
std::shared_ptr<f::OperatorBase> gop = f::Backward(*fwd, {}); std::shared_ptr<f::OperatorBase> gop =
f::Backward(*fwd, std::unordered_set<std::string>{});
ASSERT_TRUE(gop->IsNetOp()); ASSERT_TRUE(gop->IsNetOp());
auto net = static_cast<ops::NetOp *>(gop.get()); auto net = static_cast<ops::NetOp *>(gop.get());
...@@ -346,13 +349,13 @@ TEST(Backward, net_input_of_network_not_need_grad) { ...@@ -346,13 +349,13 @@ TEST(Backward, net_input_of_network_not_need_grad) {
{{"mul_result", {"mul_tmp_0"}}, {{"mul_result", {"mul_tmp_0"}},
{"add_result", {"add_tmp_0"}}, {"add_result", {"add_tmp_0"}},
{"Out", {"hidden0"}}}, {"Out", {"hidden0"}}},
{})); f::AttributeMap{}));
net.AppendOp(f::OpRegistry::CreateOp( net.AppendOp(f::OpRegistry::CreateOp(
"fc", {{"X", {"hidden0"}}, {"W", {"W2"}}, {"b", {"b2"}}}, "fc", {{"X", {"hidden0"}}, {"W", {"W2"}}, {"b", {"b2"}}},
{{"mul_result", {"mul_tmp_1"}}, {{"mul_result", {"mul_tmp_1"}},
{"add_result", {"add_tmp_1"}}, {"add_result", {"add_tmp_1"}},
{"Out", {"hidden1"}}}, {"Out", {"hidden1"}}},
{})); f::AttributeMap{}));
net.CompleteAddOp(); net.CompleteAddOp();
auto bwd = Backward(net, {"x"}); // x@GRAD is not need. auto bwd = Backward(net, {"x"}); // x@GRAD is not need.
ASSERT_TRUE(bwd->IsNetOp()); ASSERT_TRUE(bwd->IsNetOp());
...@@ -381,12 +384,13 @@ TEST(Backward, net_input_of_network_not_need_grad) { ...@@ -381,12 +384,13 @@ TEST(Backward, net_input_of_network_not_need_grad) {
TEST(Backward, net_shared_weight) { TEST(Backward, net_shared_weight) {
ops::NetOp net; ops::NetOp net;
net.AppendOp(f::OpRegistry::CreateOp("mul", {{"X", {"x"}}, {"Y", {"w"}}}, net.AppendOp(f::OpRegistry::CreateOp("mul", {{"X", {"x"}}, {"Y", {"w"}}},
{{"Out", {"out"}}}, {})); {{"Out", {"out"}}}, f::AttributeMap{}));
net.AppendOp(f::OpRegistry::CreateOp("mul", {{"X", {"out"}}, {"Y", {"w"}}}, net.AppendOp(f::OpRegistry::CreateOp("mul", {{"X", {"out"}}, {"Y", {"w"}}},
{{"Out", {"FinalOut"}}}, {})); {{"Out", {"FinalOut"}}},
f::AttributeMap{}));
net.CompleteAddOp(); net.CompleteAddOp();
auto bwd = f::Backward(net, {}); auto bwd = f::Backward(net, std::unordered_set<std::string>{});
ASSERT_TRUE(bwd->IsNetOp()); ASSERT_TRUE(bwd->IsNetOp());
auto bwd_net = static_cast<ops::NetOp *>(bwd.get()); auto bwd_net = static_cast<ops::NetOp *>(bwd.get());
ASSERT_EQ(3UL, bwd_net->ops_.size()); ASSERT_EQ(3UL, bwd_net->ops_.size());
...@@ -394,8 +398,9 @@ TEST(Backward, net_shared_weight) { ...@@ -394,8 +398,9 @@ TEST(Backward, net_shared_weight) {
} }
TEST(Backward, op_all_input_are_not_need) { TEST(Backward, op_all_input_are_not_need) {
auto fwd = f::OpRegistry::CreateOp( auto fwd =
"rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {}); f::OpRegistry::CreateOp("rowwise_add", {{"X", {"x"}}, {"b", {"b"}}},
{{"Out", {"out"}}}, f::AttributeMap{});
auto backward = f::Backward(*fwd, {"x", "b"}); auto backward = f::Backward(*fwd, {"x", "b"});
ASSERT_TRUE(backward->IsNetOp()); ASSERT_TRUE(backward->IsNetOp());
auto net = static_cast<ops::NetOp *>(backward.get()); auto net = static_cast<ops::NetOp *>(backward.get());
...@@ -403,8 +408,9 @@ TEST(Backward, op_all_input_are_not_need) { ...@@ -403,8 +408,9 @@ TEST(Backward, op_all_input_are_not_need) {
} }
TEST(Backward, op_all_output_are_not_need) { TEST(Backward, op_all_output_are_not_need) {
auto fwd = f::OpRegistry::CreateOp( auto fwd =
"rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {}); f::OpRegistry::CreateOp("rowwise_add", {{"X", {"x"}}, {"b", {"b"}}},
{{"Out", {"out"}}}, f::AttributeMap{});
auto backward = f::Backward(*fwd, {"out"}); auto backward = f::Backward(*fwd, {"out"});
ASSERT_TRUE(backward->IsNetOp()); ASSERT_TRUE(backward->IsNetOp());
auto net = static_cast<ops::NetOp *>(backward.get()); auto net = static_cast<ops::NetOp *>(backward.get());
...@@ -412,8 +418,9 @@ TEST(Backward, op_all_output_are_not_need) { ...@@ -412,8 +418,9 @@ TEST(Backward, op_all_output_are_not_need) {
} }
TEST(Backward, op_part_of_output_are_not_need) { TEST(Backward, op_part_of_output_are_not_need) {
auto fwd = f::OpRegistry::CreateOp("many_output_op", {{"x", {"X"}}}, auto fwd =
{{"y", {"Y"}}, {"z", {"Z"}}}, {}); f::OpRegistry::CreateOp("many_output_op", {{"x", {"X"}}},
{{"y", {"Y"}}, {"z", {"Z"}}}, f::AttributeMap{});
auto backward = f::Backward(*fwd, {"Z"}); auto backward = f::Backward(*fwd, {"Z"});
ASSERT_TRUE(backward->IsNetOp()); ASSERT_TRUE(backward->IsNetOp());
auto net = static_cast<ops::NetOp *>(backward.get()); auto net = static_cast<ops::NetOp *>(backward.get());
...@@ -437,7 +444,7 @@ TEST(Backward, op_part_of_output_are_not_need) { ...@@ -437,7 +444,7 @@ TEST(Backward, op_part_of_output_are_not_need) {
TEST(Backward, op_part_of_input_are_not_need) { TEST(Backward, op_part_of_input_are_not_need) {
auto fwd = f::OpRegistry::CreateOp("mul", {{"X", {"a"}}, {"Y", {"b"}}}, auto fwd = f::OpRegistry::CreateOp("mul", {{"X", {"a"}}, {"Y", {"b"}}},
{{"Out", {"out"}}}, {}); {{"Out", {"out"}}}, f::AttributeMap{});
auto backward = f::Backward(*fwd, {"a"}); auto backward = f::Backward(*fwd, {"a"});
auto &grad_mul = *backward; auto &grad_mul = *backward;
ASSERT_EQ(grad_mul.Type(), "mul_grad"); ASSERT_EQ(grad_mul.Type(), "mul_grad");
...@@ -458,19 +465,19 @@ TEST(Backward, linear_net_intermediate_variable_has_no_grad) { ...@@ -458,19 +465,19 @@ TEST(Backward, linear_net_intermediate_variable_has_no_grad) {
{{"mul_result", {"mul_out1"}}, {{"mul_result", {"mul_out1"}},
{"add_result", {"add_out1"}}, {"add_result", {"add_out1"}},
{"Out", {"out1"}}}, {"Out", {"out1"}}},
{})); f::AttributeMap{}));
net.AppendOp(f::OpRegistry::CreateOp( net.AppendOp(f::OpRegistry::CreateOp(
"fc", {{"X", {"out1"}}, {"W", {"w2"}}, {"b", {"b2"}}}, "fc", {{"X", {"out1"}}, {"W", {"w2"}}, {"b", {"b2"}}},
{{"mul_result", {"mul_out2"}}, {{"mul_result", {"mul_out2"}},
{"add_result", {"tmp_out2"}}, {"add_result", {"tmp_out2"}},
{"Out", {"out2"}}}, {"Out", {"out2"}}},
{})); f::AttributeMap{}));
net.AppendOp(f::OpRegistry::CreateOp( net.AppendOp(f::OpRegistry::CreateOp(
"fc", {{"X", {"out2"}}, {"W", {"w3"}}, {"b", {"b3"}}}, "fc", {{"X", {"out2"}}, {"W", {"w3"}}, {"b", {"b3"}}},
{{"mul_result", {"mul_out3"}}, {{"mul_result", {"mul_out3"}},
{"add_result", {"tmp_out3"}}, {"add_result", {"tmp_out3"}},
{"Out", {"out3"}}}, {"Out", {"out3"}}},
{})); f::AttributeMap{}));
net.CompleteAddOp(); net.CompleteAddOp();
auto backward = f::Backward(net, {"mul_out2", "tmp_out2", "out2"}); auto backward = f::Backward(net, {"mul_out2", "tmp_out2", "out2"});
...@@ -509,7 +516,8 @@ TEST(Backward, simple_single_op) { ...@@ -509,7 +516,8 @@ TEST(Backward, simple_single_op) {
auto target = f::VarDescBind("out"); auto target = f::VarDescBind("out");
target.SetShape({1}); target.SetShape({1});
auto var_to_grad = AppendBackward(program, target, {}); auto var_to_grad =
AppendBackward(program, target, std::unordered_set<std::string>{});
ASSERT_EQ(block->AllOps().size(), 3UL); ASSERT_EQ(block->AllOps().size(), 3UL);
f::OpDescBind *fill_op = block->AllOps()[1]; f::OpDescBind *fill_op = block->AllOps()[1];
...@@ -546,7 +554,7 @@ TEST(Backward, default_attribute) { ...@@ -546,7 +554,7 @@ TEST(Backward, default_attribute) {
auto target = f::VarDescBind("out"); auto target = f::VarDescBind("out");
target.SetShape({1}); target.SetShape({1});
AppendBackward(program, target, {}); AppendBackward(program, target, std::unordered_set<std::string>{});
ASSERT_EQ(block->AllOps().size(), 3UL); ASSERT_EQ(block->AllOps().size(), 3UL);
EXPECT_EQ(boost::get<int>(op->GetAttr("x_num_col_dims")), 1); EXPECT_EQ(boost::get<int>(op->GetAttr("x_num_col_dims")), 1);
...@@ -585,7 +593,8 @@ TEST(Backward, simple_mult_op) { ...@@ -585,7 +593,8 @@ TEST(Backward, simple_mult_op) {
auto target = f::VarDescBind("out3"); auto target = f::VarDescBind("out3");
target.SetShape({1}); target.SetShape({1});
size_t forward_len = block->AllOps().size(); size_t forward_len = block->AllOps().size();
auto var_to_grad = AppendBackward(program, target, {}); auto var_to_grad =
AppendBackward(program, target, std::unordered_set<std::string>{});
ASSERT_EQ(block->AllOps().size(), 6UL + 1); ASSERT_EQ(block->AllOps().size(), 6UL + 1);
f::OpDescBind *fill_op = block->AllOps()[forward_len]; f::OpDescBind *fill_op = block->AllOps()[forward_len];
...@@ -817,7 +826,8 @@ TEST(Backward, shared_var) { ...@@ -817,7 +826,8 @@ TEST(Backward, shared_var) {
auto target = f::VarDescBind("out3"); auto target = f::VarDescBind("out3");
target.SetShape({1}); target.SetShape({1});
size_t forward_len = block->AllOps().size(); size_t forward_len = block->AllOps().size();
auto var_to_grad = AppendBackward(program, target, {}); auto var_to_grad =
AppendBackward(program, target, std::unordered_set<std::string>{});
ASSERT_EQ(block->AllOps().size(), 8UL); ASSERT_EQ(block->AllOps().size(), 8UL);
f::OpDescBind *fill_op = block->AllOps()[forward_len]; f::OpDescBind *fill_op = block->AllOps()[forward_len];
......
...@@ -316,8 +316,8 @@ static void InitInferShapeFuncs() { ...@@ -316,8 +316,8 @@ static void InitInferShapeFuncs() {
for (auto &kern_pair : OperatorWithKernel::AllOpKernels()) { for (auto &kern_pair : OperatorWithKernel::AllOpKernels()) {
auto op_type = kern_pair.first; auto op_type = kern_pair.first;
auto &op_info = info_map.at(op_type); auto &op_info = info_map.at(op_type);
auto op = auto op = static_cast<OperatorWithKernel *>(op_info.Creator()(
static_cast<OperatorWithKernel *>(op_info.Creator()("", {}, {}, {})); "", VariableNameMap{}, VariableNameMap{}, AttributeMap{}));
if (op_info.infer_shape_) { // infer_shape has been registered. if (op_info.infer_shape_) { // infer_shape has been registered.
continue; continue;
} }
......
...@@ -261,7 +261,9 @@ class OperatorClone : public paddle::framework::OperatorBase { ...@@ -261,7 +261,9 @@ class OperatorClone : public paddle::framework::OperatorBase {
}; };
TEST(Operator, Clone) { TEST(Operator, Clone) {
OperatorClone a("ABC", {}, {}, {}); OperatorClone a("ABC", paddle::framework::VariableNameMap{},
paddle::framework::VariableNameMap{},
paddle::framework::AttributeMap{});
auto b = a.Clone(); auto b = a.Clone();
ASSERT_EQ(a.Type(), b->Type()); ASSERT_EQ(a.Type(), b->Type());
} }
...@@ -54,7 +54,8 @@ TEST(Prune, one_operator) { ...@@ -54,7 +54,8 @@ TEST(Prune, one_operator) {
f::ProgramDescBind program; f::ProgramDescBind program;
f::BlockDescBind *block = program.MutableBlock(0); f::BlockDescBind *block = program.MutableBlock(0);
AddOp("one_one", {{"input", {"a"}}}, {{"output", {"b"}}}, {}, block); AddOp("one_one", {{"input", {"a"}}}, {{"output", {"b"}}}, f::AttributeMap{},
block);
f::ProgramDesc *pdesc = program.Proto(); f::ProgramDesc *pdesc = program.Proto();
f::ProgramDesc pruned; f::ProgramDesc pruned;
...@@ -71,10 +72,14 @@ TEST(Prune, forward) { ...@@ -71,10 +72,14 @@ TEST(Prune, forward) {
f::ProgramDescBind program; f::ProgramDescBind program;
f::BlockDescBind *block = program.MutableBlock(0); f::BlockDescBind *block = program.MutableBlock(0);
AddOp("one_one", {{"input", {"a"}}}, {{"output", {"b"}}}, {}, block); AddOp("one_one", {{"input", {"a"}}}, {{"output", {"b"}}}, f::AttributeMap{},
AddOp("one_one", {{"input", {"b"}}}, {{"output", {"c"}}}, {}, block); block);
AddOp("one_one", {{"input", {"c"}}}, {{"output", {"d"}}}, {}, block); AddOp("one_one", {{"input", {"b"}}}, {{"output", {"c"}}}, f::AttributeMap{},
AddOp("one_one", {{"input", {"d"}}}, {{"output", {"e"}}}, {}, block); block);
AddOp("one_one", {{"input", {"c"}}}, {{"output", {"d"}}}, f::AttributeMap{},
block);
AddOp("one_one", {{"input", {"d"}}}, {{"output", {"e"}}}, f::AttributeMap{},
block);
f::ProgramDesc *pdesc = program.Proto(); f::ProgramDesc *pdesc = program.Proto();
...@@ -90,11 +95,14 @@ TEST(Prune, multi_input_op) { ...@@ -90,11 +95,14 @@ TEST(Prune, multi_input_op) {
f::ProgramDescBind program; f::ProgramDescBind program;
f::BlockDescBind *block = program.MutableBlock(0); f::BlockDescBind *block = program.MutableBlock(0);
AddOp("one_one", {{"input", {"a0"}}}, {{"output", {"b0"}}}, {}, block); AddOp("one_one", {{"input", {"a0"}}}, {{"output", {"b0"}}}, f::AttributeMap{},
AddOp("one_one", {{"input", {"a1"}}}, {{"output", {"b1"}}}, {}, block); block);
AddOp("one_one", {{"input", {"a2"}}}, {{"output", {"b2"}}}, {}, block); AddOp("one_one", {{"input", {"a1"}}}, {{"output", {"b1"}}}, f::AttributeMap{},
AddOp("three_one", {{"input", {"b0", "b1", "b2"}}}, {{"output", {"c"}}}, {},
block); block);
AddOp("one_one", {{"input", {"a2"}}}, {{"output", {"b2"}}}, f::AttributeMap{},
block);
AddOp("three_one", {{"input", {"b0", "b1", "b2"}}}, {{"output", {"c"}}},
f::AttributeMap{}, block);
f::ProgramDesc *pdesc = program.Proto(); f::ProgramDesc *pdesc = program.Proto();
pdesc->mutable_blocks(0)->mutable_ops(3)->set_is_target(true); pdesc->mutable_blocks(0)->mutable_ops(3)->set_is_target(true);
...@@ -108,9 +116,12 @@ TEST(Prune, multi_output_op) { ...@@ -108,9 +116,12 @@ TEST(Prune, multi_output_op) {
f::ProgramDescBind program; f::ProgramDescBind program;
f::BlockDescBind *block = program.MutableBlock(0); f::BlockDescBind *block = program.MutableBlock(0);
AddOp("one_two", {{"input", {"a"}}}, {{"output", {"b", "c"}}}, {}, block); AddOp("one_two", {{"input", {"a"}}}, {{"output", {"b", "c"}}},
AddOp("one_one", {{"input", {"b"}}}, {{"output", {"b1"}}}, {}, block); f::AttributeMap{}, block);
AddOp("one_one", {{"input", {"c"}}}, {{"output", {"c1"}}}, {}, block); AddOp("one_one", {{"input", {"b"}}}, {{"output", {"b1"}}}, f::AttributeMap{},
block);
AddOp("one_one", {{"input", {"c"}}}, {{"output", {"c1"}}}, f::AttributeMap{},
block);
f::ProgramDesc *pdesc = program.Proto(); f::ProgramDesc *pdesc = program.Proto();
pdesc->mutable_blocks(0)->mutable_ops(2)->set_is_target(true); pdesc->mutable_blocks(0)->mutable_ops(2)->set_is_target(true);
...@@ -124,9 +135,12 @@ TEST(Prune, multi_target) { ...@@ -124,9 +135,12 @@ TEST(Prune, multi_target) {
f::ProgramDescBind program; f::ProgramDescBind program;
f::BlockDescBind *block = program.MutableBlock(0); f::BlockDescBind *block = program.MutableBlock(0);
AddOp("one_two", {{"input", {"a"}}}, {{"output", {"b", "c"}}}, {}, block); AddOp("one_two", {{"input", {"a"}}}, {{"output", {"b", "c"}}},
AddOp("one_one", {{"input", {"b"}}}, {{"output", {"b1"}}}, {}, block); f::AttributeMap{}, block);
AddOp("one_one", {{"input", {"c"}}}, {{"output", {"c1"}}}, {}, block); AddOp("one_one", {{"input", {"b"}}}, {{"output", {"b1"}}}, f::AttributeMap{},
block);
AddOp("one_one", {{"input", {"c"}}}, {{"output", {"c1"}}}, f::AttributeMap{},
block);
f::ProgramDesc *pdesc = program.Proto(); f::ProgramDesc *pdesc = program.Proto();
pdesc->mutable_blocks(0)->mutable_ops(1)->set_is_target(true); pdesc->mutable_blocks(0)->mutable_ops(1)->set_is_target(true);
......
...@@ -142,9 +142,9 @@ class ConditionalBlockGradOp : public ConditionalOp { ...@@ -142,9 +142,9 @@ class ConditionalBlockGradOp : public ConditionalOp {
continue; continue;
} }
auto new_in_grad_name = cur_scope.Rename(in_grad_name); auto new_in_grad_name = cur_scope.Rename(in_grad_name);
auto assign = auto assign = framework::OpRegistry::CreateOp(
framework::OpRegistry::CreateOp("assign", {{"X", {new_in_grad_name}}}, "assign", {{"X", {new_in_grad_name}}}, {{"Out", {out_grad_name}}},
{{"Out", {out_grad_name}}}, {}); framework::AttributeMap{});
assign->Run(cur_scope, dev_ctx); assign->Run(cur_scope, dev_ctx);
cur_scope.Rename(new_in_grad_name, in_grad_name); cur_scope.Rename(new_in_grad_name, in_grad_name);
} }
......
...@@ -38,7 +38,10 @@ namespace operators { ...@@ -38,7 +38,10 @@ namespace operators {
class NetOp : public framework::OperatorBase { class NetOp : public framework::OperatorBase {
public: public:
static const char kAll[]; static const char kAll[];
NetOp() : framework::OperatorBase("plain_net", {}, {}, {}) {} NetOp()
: framework::OperatorBase("plain_net", framework::VariableNameMap{},
framework::VariableNameMap{},
framework::AttributeMap{}) {}
NetOp(const std::string& type, const framework::VariableNameMap& inputs, NetOp(const std::string& type, const framework::VariableNameMap& inputs,
const framework::VariableNameMap& outputs, const framework::VariableNameMap& outputs,
......
...@@ -38,10 +38,10 @@ TEST(OpKernel, all) { ...@@ -38,10 +38,10 @@ TEST(OpKernel, all) {
net->AppendOp(std::unique_ptr<TestOp>( net->AppendOp(std::unique_ptr<TestOp>(
new TestOp("test", {{"X", {"x"}}, {"W", {"w1"}}, {"b", {"b1"}}}, new TestOp("test", {{"X", {"x"}}, {"W", {"w1"}}, {"b", {"b1"}}},
{{"Out", {"y"}}}, {}))); {{"Out", {"y"}}}, framework::AttributeMap{})));
net->AppendOp(std::unique_ptr<TestOp>( net->AppendOp(std::unique_ptr<TestOp>(
new TestOp("test", {{"X", {"y"}}, {"W", {"w2"}}, {"b", {"b2"}}}, new TestOp("test", {{"X", {"y"}}, {"W", {"w2"}}, {"b", {"b2"}}},
{{"Out", {"z"}}}, {}))); {{"Out", {"z"}}}, framework::AttributeMap{})));
net->CompleteAddOp(); net->CompleteAddOp();
AssertSameVectorWithoutOrder({"x", "w1", "b1", "w2", "b2"}, AssertSameVectorWithoutOrder({"x", "w1", "b1", "w2", "b2"},
...@@ -58,7 +58,7 @@ TEST(NetOp, insert_op) { ...@@ -58,7 +58,7 @@ TEST(NetOp, insert_op) {
NetOp net; NetOp net;
auto op1 = std::unique_ptr<framework::NOP>( auto op1 = std::unique_ptr<framework::NOP>(
new framework::NOP("empty", {{"X", {"x"}}, {"W", {"w1"}}, {"b", {"b1"}}}, new framework::NOP("empty", {{"X", {"x"}}, {"W", {"w1"}}, {"b", {"b1"}}},
{{"Out", {"y"}}}, {})); {{"Out", {"y"}}}, framework::AttributeMap{}));
net.AppendOp(*op1); net.AppendOp(*op1);
net.InsertOp(0, *op1); net.InsertOp(0, *op1);
ASSERT_EQ(2UL, net.ops_.size()); ASSERT_EQ(2UL, net.ops_.size());
...@@ -68,10 +68,12 @@ TEST(NetOp, insert_op) { ...@@ -68,10 +68,12 @@ TEST(NetOp, insert_op) {
TEST(NetOp, Clone) { TEST(NetOp, Clone) {
NetOp net; NetOp net;
net.AppendOp( net.AppendOp(std::unique_ptr<framework::NOP>(new framework::NOP{
std::unique_ptr<framework::NOP>(new framework::NOP{"empty", {}, {}, {}})); "empty", framework::VariableNameMap{}, framework::VariableNameMap{},
net.AppendOp(std::unique_ptr<framework::NOP>( framework::AttributeMap{}}));
new framework::NOP{"empty2", {}, {}, {}})); net.AppendOp(std::unique_ptr<framework::NOP>(new framework::NOP{
"empty2", framework::VariableNameMap{}, framework::VariableNameMap{},
framework::AttributeMap{}}));
net.CompleteAddOp(true); net.CompleteAddOp(true);
auto new_net_op = net.Clone(); auto new_net_op = net.Clone();
ASSERT_NE(new_net_op, nullptr); ASSERT_NE(new_net_op, nullptr);
......
...@@ -408,7 +408,8 @@ class RecurrentGradOp : public RecurrentBase { ...@@ -408,7 +408,8 @@ class RecurrentGradOp : public RecurrentBase {
attrs["value"] = 0.0f; attrs["value"] = 0.0f;
auto zero_op = framework::OpRegistry::CreateOp( auto zero_op = framework::OpRegistry::CreateOp(
"fill_constant", {}, {{"Out", {pg_names[param_id]}}}, attrs); "fill_constant", framework::VariableNameMap{},
{{"Out", {pg_names[param_id]}}}, attrs);
zero_op->Run(scope, dev_ctx); zero_op->Run(scope, dev_ctx);
} }
...@@ -417,7 +418,7 @@ class RecurrentGradOp : public RecurrentBase { ...@@ -417,7 +418,7 @@ class RecurrentGradOp : public RecurrentBase {
auto sum_op = framework::OpRegistry::CreateOp( auto sum_op = framework::OpRegistry::CreateOp(
"sum", {{"X", {pg_names[param_id], new_inside_name}}}, "sum", {{"X", {pg_names[param_id], new_inside_name}}},
{{"Out", {pg_names[param_id]}}}, {}); {{"Out", {pg_names[param_id]}}}, framework::AttributeMap{});
sum_op->Run(cur_scope, dev_ctx); sum_op->Run(cur_scope, dev_ctx);
cur_scope.Rename(new_inside_name, inside_grad_name); cur_scope.Rename(new_inside_name, inside_grad_name);
......
...@@ -187,7 +187,8 @@ class WhileGradOp : public framework::OperatorBase { ...@@ -187,7 +187,8 @@ class WhileGradOp : public framework::OperatorBase {
attrs["value"] = 0.0f; attrs["value"] = 0.0f;
auto zero_op = framework::OpRegistry::CreateOp( auto zero_op = framework::OpRegistry::CreateOp(
"fill_constant", {}, {{"Out", {pg_names[param_id]}}}, attrs); "fill_constant", framework::VariableNameMap{},
{{"Out", {pg_names[param_id]}}}, attrs);
zero_op->Run(scope, dev_ctx); zero_op->Run(scope, dev_ctx);
} }
} }
...@@ -195,7 +196,7 @@ class WhileGradOp : public framework::OperatorBase { ...@@ -195,7 +196,7 @@ class WhileGradOp : public framework::OperatorBase {
auto new_inside_name = cur_scope.Rename(inside_grad_name); auto new_inside_name = cur_scope.Rename(inside_grad_name);
auto sum_op = framework::OpRegistry::CreateOp( auto sum_op = framework::OpRegistry::CreateOp(
"sum", {{"X", {pg_names[param_id], new_inside_name}}}, "sum", {{"X", {pg_names[param_id], new_inside_name}}},
{{"Out", {pg_names[param_id]}}}, {}); {{"Out", {pg_names[param_id]}}}, framework::AttributeMap{});
sum_op->Run(cur_scope, dev_ctx); sum_op->Run(cur_scope, dev_ctx);
cur_scope.Rename(new_inside_name, inside_grad_name); cur_scope.Rename(new_inside_name, inside_grad_name);
} }
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册