From 509d3209dbe407ebf8be798af4caee4850f5c417 Mon Sep 17 00:00:00 2001 From: Yu Yang Date: Sat, 12 Aug 2017 14:42:58 +0800 Subject: [PATCH] Fix CI and style --- paddle/framework/backward.cc | 2 +- paddle/framework/grad_op_builder_test.cc | 14 ++++++-------- paddle/framework/op_registry.h | 2 +- paddle/framework/operator.h | 4 ++-- paddle/framework/pybind.cc | 10 ++++++---- 5 files changed, 16 insertions(+), 16 deletions(-) diff --git a/paddle/framework/backward.cc b/paddle/framework/backward.cc index 36cc616358..315bdde76d 100644 --- a/paddle/framework/backward.cc +++ b/paddle/framework/backward.cc @@ -31,7 +31,7 @@ static void ForEachVarName(Map& names, T callback) { } static bool AllInSet( - const std::unordered_map>& names, + const std::map>& names, const std::string& suffix, const std::unordered_set& set) { bool all_in_set = true; ForEachVarName(names, [&all_in_set, &set, &suffix](const std::string& n) { diff --git a/paddle/framework/grad_op_builder_test.cc b/paddle/framework/grad_op_builder_test.cc index 85e745322b..f54a66110f 100644 --- a/paddle/framework/grad_op_builder_test.cc +++ b/paddle/framework/grad_op_builder_test.cc @@ -68,10 +68,9 @@ REGISTER_GRADIENT_OP(io_ignored, io_ignored_grad, f::NOP); TEST(GradOpBuilder, MutiInOut) { std::shared_ptr test_op(f::OpRegistry::CreateOp( - "mult_io", - {{"In1", {"in1"}}, - {"In2_mult", {"in2_1", "in2_2", "in2_3"}}, - {"In3", {"in3"}}}, + "mult_io", {{"In1", {"in1"}}, + {"In2_mult", {"in2_1", "in2_2", "in2_3"}}, + {"In3", {"in3"}}}, {{"Out1", {"out1"}}, {"Out2_mult", {"out2_1", "out2_2"}}}, {})); std::shared_ptr grad_test_op = f::OpRegistry::CreateGradOp(*test_op); @@ -101,10 +100,9 @@ TEST(GradOpBuilder, MutiInOut) { TEST(GradOpBuilder, IOIgnoredInGradient) { std::shared_ptr test_op(f::OpRegistry::CreateOp( - "io_ignored", - {{"In1", {"in1"}}, - {"In2_mult", {"in2_1", "in2_2"}}, - {"In3_mult", {"in3_1", "in3_2"}}}, + "io_ignored", {{"In1", {"in1"}}, + {"In2_mult", {"in2_1", "in2_2"}}, + {"In3_mult", {"in3_1", "in3_2"}}}, {{"Out1_mult", {"out1_1", "out1_2"}}, {"Out2", {"out2"}}}, {})); std::shared_ptr grad_test_op = f::OpRegistry::CreateGradOp(*test_op); diff --git a/paddle/framework/op_registry.h b/paddle/framework/op_registry.h index f2236e60d8..f6b71a4efd 100644 --- a/paddle/framework/op_registry.h +++ b/paddle/framework/op_registry.h @@ -118,7 +118,7 @@ class OpProtoAndCheckerMaker { class OpRegistry { using OpCreator = std::function; - using VarNameMap = std::unordered_map>; + using VarNameMap = std::map>; public: template diff --git a/paddle/framework/operator.h b/paddle/framework/operator.h index 6dc331b2f0..5ed199adc6 100644 --- a/paddle/framework/operator.h +++ b/paddle/framework/operator.h @@ -143,11 +143,11 @@ class OperatorBase { // I (Inputs) // O (Outputs) // OG (Output Gradients) - std::unordered_map> inputs_; + std::map> inputs_; // NOTE: in case of OpGrad, outputs_ contains // IG (Inputs Gradients) - std::unordered_map> outputs_; + std::map> outputs_; AttributeMap attrs_; }; diff --git a/paddle/framework/pybind.cc b/paddle/framework/pybind.cc index 05ed603e1a..07b42c8371 100644 --- a/paddle/framework/pybind.cc +++ b/paddle/framework/pybind.cc @@ -57,15 +57,17 @@ void ExposeOperator(ClassType &m) { }) .def("outputs", [](const typename ClassType::type &op) - -> std::unordered_map> { + -> std::map> { return op.outputs_; }) .def("inputs", [](const typename ClassType::type &op) { return op.inputs_; }) .def("__str__", &ClassType::type::DebugString) - .def("no_intermediate_outputs", [](const typename ClassType::type &op) { - return op.OutputVars(false); - }); + .def("no_intermediate_outputs", + [](const typename ClassType::type &op) { + return op.OutputVars(false); + }) + .def("support_gpu", &ClassType::type::SupportGPU); } static size_t UniqueIntegerGenerator() { -- GitLab