提交 f4858157 编写于 作者: Y Yi Wang 提交者: GitHub

Merge pull request #10 from qingqing01/framework_proto

Follow yang's comments to fix grad_op_builder.
...@@ -21,8 +21,6 @@ namespace framework { ...@@ -21,8 +21,6 @@ namespace framework {
class OpRegistry; class OpRegistry;
using VarIndexMap = std::unordered_map<std::string, int>;
enum class OpArgType { IN, OUT }; enum class OpArgType { IN, OUT };
static void TransOpArg(const OperatorBase* src_op, OperatorBase* dst_op, static void TransOpArg(const OperatorBase* src_op, OperatorBase* dst_op,
...@@ -30,19 +28,19 @@ static void TransOpArg(const OperatorBase* src_op, OperatorBase* dst_op, ...@@ -30,19 +28,19 @@ static void TransOpArg(const OperatorBase* src_op, OperatorBase* dst_op,
bool is_grad) { bool is_grad) {
const auto& src_inout = const auto& src_inout =
src_type == OpArgType::IN ? src_op->inputs_ : src_op->outputs_; src_type == OpArgType::IN ? src_op->inputs_ : src_op->outputs_;
auto& dst_inout = auto& dst_inout =
dst_type == OpArgType::IN ? dst_op->inputs_ : dst_op->outputs_; dst_type == OpArgType::IN ? dst_op->inputs_ : dst_op->outputs_;
const OpProto& proto = OpProtos().at(src_op->type_); const OpProto& proto = OpProtos().at(src_op->type_);
const auto& src_arg_list = const auto& src_arg_list =
src_type == OpArgType::IN ? proto.inputs() : proto.outputs(); src_type == OpArgType::IN ? proto.inputs() : proto.outputs();
for (const auto& arg : src_arg_list) { for (const auto& arg : src_arg_list) {
const std::string& src_name = arg.name(); if (arg.no_gradient() && !is_grad) continue;
const std::string src_name = arg.name();
std::string dst_name = is_grad ? GradVarName(src_name) : src_name; std::string dst_name = is_grad ? GradVarName(src_name) : src_name;
dst_inout[dst_name].reserve(src_inout.at(src_name).size());
for (auto& var_name : src_inout.at(src_name)) { for (auto& var_name : src_inout.at(src_name)) {
std::string s = is_grad ? GradVarName(var_name) std::string s = is_grad ? GradVarName(var_name) : var_name;
: (arg.no_gradient() ? kEmptyVarName : var_name);
dst_inout[dst_name].emplace_back(s); dst_inout[dst_name].emplace_back(s);
} }
} }
......
...@@ -110,15 +110,12 @@ TEST(GradOpBuilder, IOIgnoredInGradient) { ...@@ -110,15 +110,12 @@ TEST(GradOpBuilder, IOIgnoredInGradient) {
f::OpRegistry::CreateGradOp(*test_op); f::OpRegistry::CreateGradOp(*test_op);
// 'In2' and 'Out2' are ignored in gradient calculating // 'In2' and 'Out2' are ignored in gradient calculating
ASSERT_EQ(grad_test_op->inputs_.size(), 3UL + 2UL + 2UL); ASSERT_EQ(grad_test_op->inputs_.size(), 2UL + 1UL + 2UL);
EXPECT_EQ(grad_test_op->Input("In1"), "in1"); EXPECT_EQ(grad_test_op->Input("In1"), "in1");
EXPECT_EQ(grad_test_op->Inputs("In2_mult"),
std::vector<std::string>({f::kEmptyVarName, f::kEmptyVarName}));
EXPECT_EQ(grad_test_op->Inputs("In3_mult"), EXPECT_EQ(grad_test_op->Inputs("In3_mult"),
std::vector<std::string>({"in3_1", "in3_2"})); std::vector<std::string>({"in3_1", "in3_2"}));
EXPECT_EQ(grad_test_op->Inputs("Out1_mult"), EXPECT_EQ(grad_test_op->Inputs("Out1_mult"),
std::vector<std::string>({"out1_1", "out1_2"})); std::vector<std::string>({"out1_1", "out1_2"}));
EXPECT_EQ(grad_test_op->Input("Out2"), f::kEmptyVarName);
EXPECT_EQ(grad_test_op->Inputs(f::GradVarName("Out1_mult")), EXPECT_EQ(grad_test_op->Inputs(f::GradVarName("Out1_mult")),
std::vector<std::string>( std::vector<std::string>(
{f::GradVarName("out1_1"), f::GradVarName("out1_2")})); {f::GradVarName("out1_1"), f::GradVarName("out1_2")}));
......
...@@ -120,7 +120,6 @@ class OpProtoAndCheckerMaker { ...@@ -120,7 +120,6 @@ class OpProtoAndCheckerMaker {
class OpRegistry { class OpRegistry {
using OpCreator = std::function<OperatorBase*()>; using OpCreator = std::function<OperatorBase*()>;
using VarIndexMap = std::unordered_map<std::string, int>;
using VarNameMap = std::unordered_map<std::string, std::vector<std::string>>; using VarNameMap = std::unordered_map<std::string, std::vector<std::string>>;
public: public:
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册