提交 b228b463 编写于 作者: Y Yu Yang

Make const variables in operator.h fit google style

* No POD instance is forbidden in global scope.

See
https://google.github.io/styleguide/cppguide.html#Static_and_Global_Variables
上级 0d0bc357
...@@ -133,8 +133,8 @@ std::shared_ptr<OperatorBase> BackwardRecursive( ...@@ -133,8 +133,8 @@ std::shared_ptr<OperatorBase> BackwardRecursive(
std::shared_ptr<OperatorBase> grad_op = OpRegistry::CreateGradOp(forwardOp); std::shared_ptr<OperatorBase> grad_op = OpRegistry::CreateGradOp(forwardOp);
for (std::string& grad_input : grad_op->inputs_) { for (std::string& grad_input : grad_op->inputs_) {
if (no_grad_names.count(grad_input)) { if (no_grad_names.count(grad_input)) {
std::string prefix = std::string prefix = grad_input.substr(
grad_input.substr(0, grad_input.size() - kGradVarSuffix.size()); 0, grad_input.size() - sizeof(kGradVarSuffix) / sizeof(char));
grad_input = prefix + kZeroVarSuffix; grad_input = prefix + kZeroVarSuffix;
// If part of input gradient of that operator is not calculated, fill // If part of input gradient of that operator is not calculated, fill
...@@ -167,7 +167,7 @@ std::shared_ptr<OperatorBase> Backward( ...@@ -167,7 +167,7 @@ std::shared_ptr<OperatorBase> Backward(
std::unordered_set<std::string> no_grad_names; std::unordered_set<std::string> no_grad_names;
no_grad_names.reserve(no_grad_vars.size()); no_grad_names.reserve(no_grad_vars.size());
no_grad_names.insert(kEmptyVarName + kGradVarSuffix); no_grad_names.insert(std::string(kEmptyVarName) + kGradVarSuffix);
for (auto& name : no_grad_vars) { for (auto& name : no_grad_vars) {
no_grad_names.insert(name + kGradVarSuffix); no_grad_names.insert(name + kGradVarSuffix);
......
...@@ -171,10 +171,10 @@ TEST(Backward, simple_op_grad) { ...@@ -171,10 +171,10 @@ TEST(Backward, simple_op_grad) {
ASSERT_EQ(4UL, gop->inputs_.size()); ASSERT_EQ(4UL, gop->inputs_.size());
ASSERT_EQ(f::kEmptyVarName, gop->inputs_[0]); ASSERT_EQ(f::kEmptyVarName, gop->inputs_[0]);
ASSERT_EQ("rowwise_add_grad", gop->type_); ASSERT_EQ("rowwise_add_grad", gop->type_);
ASSERT_EQ("X" + f::kGradVarSuffix, gop->outputs_[0]); ASSERT_EQ(f::GradVarName("X"), gop->outputs_[0]);
ASSERT_EQ("b" + f::kGradVarSuffix, gop->outputs_[1]); ASSERT_EQ(f::GradVarName("b"), gop->outputs_[1]);
ASSERT_EQ("X" + f::kGradVarSuffix, gop->Output("X" + f::kGradVarSuffix)); ASSERT_EQ(f::GradVarName("X"), gop->Output(f::GradVarName("X")));
} }
TEST(Backward, simple_op_not_need_grad) { TEST(Backward, simple_op_not_need_grad) {
...@@ -182,7 +182,7 @@ TEST(Backward, simple_op_not_need_grad) { ...@@ -182,7 +182,7 @@ TEST(Backward, simple_op_not_need_grad) {
ASSERT_NE(fwd, nullptr); ASSERT_NE(fwd, nullptr);
auto gop = f::Backward(*fwd, {"X"}); auto gop = f::Backward(*fwd, {"X"});
ASSERT_EQ(std::find(gop->outputs_.begin(), gop->outputs_.end(), ASSERT_EQ(std::find(gop->outputs_.begin(), gop->outputs_.end(),
"X" + f::kGradVarSuffix), f::GradVarName("X")),
gop->outputs_.end()); gop->outputs_.end());
auto no_input_gop = f::Backward(*fwd, {"X", "b"}); auto no_input_gop = f::Backward(*fwd, {"X", "b"});
...@@ -250,18 +250,18 @@ TEST(Backward, net_input_of_network_not_need_grad) { ...@@ -250,18 +250,18 @@ TEST(Backward, net_input_of_network_not_need_grad) {
all_output.erase(f::kEmptyVarName); all_output.erase(f::kEmptyVarName);
for (auto &out : {"W1", "b1", "hidden0", "W2", "b2"}) { for (auto &out : {"W1", "b1", "hidden0", "W2", "b2"}) {
ASSERT_NE(all_output.find(out + f::kGradVarSuffix), all_output.end()); ASSERT_NE(all_output.find(f::GradVarName(out)), all_output.end());
} }
// Not Generated X // Not Generated X
ASSERT_EQ(all_output.find("X" + f::kGradVarSuffix), all_output.end()); ASSERT_EQ(all_output.find(f::GradVarName("X")), all_output.end());
ASSERT_EQ(2UL, bwd_net->ops_.size()); ASSERT_EQ(2UL, bwd_net->ops_.size());
ASSERT_TRUE(bwd_net->ops_[1]->IsNetOp()); ASSERT_TRUE(bwd_net->ops_[1]->IsNetOp());
auto first_fc_grad = static_cast<ops::NetOp *>(bwd_net->ops_[1].get()); auto first_fc_grad = static_cast<ops::NetOp *>(bwd_net->ops_[1].get());
ASSERT_EQ(3UL, first_fc_grad->ops_.size()); ASSERT_EQ(3UL, first_fc_grad->ops_.size());
ASSERT_EQ(f::kEmptyVarName, ASSERT_EQ(f::kEmptyVarName,
first_fc_grad->ops_[2]->Output("A" + f::kGradVarSuffix)); first_fc_grad->ops_[2]->Output(f::GradVarName("A")));
} }
TEST(Backward, net_shared_weight) { TEST(Backward, net_shared_weight) {
...@@ -313,15 +313,15 @@ TEST(Backward, op_part_of_output_are_not_need) { ...@@ -313,15 +313,15 @@ TEST(Backward, op_part_of_output_are_not_need) {
ASSERT_EQ(1UL, fill_zero.inputs_.size()); ASSERT_EQ(1UL, fill_zero.inputs_.size());
ASSERT_EQ("Z", fill_zero.inputs_[0]); ASSERT_EQ("Z", fill_zero.inputs_[0]);
ASSERT_EQ(1UL, fill_zero.outputs_.size()); ASSERT_EQ(1UL, fill_zero.outputs_.size());
ASSERT_EQ("Z" + f::kZeroVarSuffix, fill_zero.outputs_[0]); ASSERT_EQ(std::string("Z") + f::kZeroVarSuffix, fill_zero.outputs_[0]);
auto &d_many_out = *net->ops_[1]; auto &d_many_out = *net->ops_[1];
ASSERT_EQ("many_output_op_grad", d_many_out.type_); ASSERT_EQ("many_output_op_grad", d_many_out.type_);
ASSERT_EQ(1UL + 2UL + 2UL, d_many_out.inputs_.size()); // I/O/OG ASSERT_EQ(1UL + 2UL + 2UL, d_many_out.inputs_.size()); // I/O/OG
ASSERT_EQ("Z" + f::kZeroVarSuffix, d_many_out.Input("z" + f::kGradVarSuffix)); ASSERT_EQ(std::string("Z") + f::kZeroVarSuffix,
ASSERT_EQ("Y" + f::kGradVarSuffix, d_many_out.Input("y" + f::kGradVarSuffix)); d_many_out.Input(f::GradVarName("z")));
ASSERT_EQ("X" + f::kGradVarSuffix, ASSERT_EQ(f::GradVarName("Y"), d_many_out.Input(f::GradVarName("y")));
d_many_out.Output("x" + f::kGradVarSuffix)); ASSERT_EQ(f::GradVarName("X"), d_many_out.Output(f::GradVarName("x")));
} }
TEST(Backward, op_part_of_input_are_not_need) { TEST(Backward, op_part_of_input_are_not_need) {
...@@ -331,10 +331,9 @@ TEST(Backward, op_part_of_input_are_not_need) { ...@@ -331,10 +331,9 @@ TEST(Backward, op_part_of_input_are_not_need) {
ASSERT_EQ(grad_mul.type_, "mul_grad"); ASSERT_EQ(grad_mul.type_, "mul_grad");
ASSERT_EQ(grad_mul.inputs_.size(), 2UL + 1UL + 1UL); ASSERT_EQ(grad_mul.inputs_.size(), 2UL + 1UL + 1UL);
ASSERT_EQ(grad_mul.outputs_.size(), 2UL); ASSERT_EQ(grad_mul.outputs_.size(), 2UL);
ASSERT_EQ(grad_mul.Output("A" + f::kGradVarSuffix), f::kEmptyVarName); ASSERT_EQ(grad_mul.Output(f::GradVarName("A")), f::kEmptyVarName);
ASSERT_EQ(grad_mul.Output("B" + f::kGradVarSuffix), "b" + f::kGradVarSuffix); ASSERT_EQ(grad_mul.Output(f::GradVarName("B")), f::GradVarName("b"));
ASSERT_EQ(grad_mul.Input("Out" + f::kGradVarSuffix), ASSERT_EQ(grad_mul.Input(f::GradVarName("Out")), f::GradVarName("out"));
"out" + f::kGradVarSuffix);
ASSERT_EQ(grad_mul.Input("A"), "a"); ASSERT_EQ(grad_mul.Input("A"), "a");
ASSERT_EQ(grad_mul.Input("B"), "b"); ASSERT_EQ(grad_mul.Input("B"), "b");
ASSERT_EQ(grad_mul.Input("Out"), "out"); ASSERT_EQ(grad_mul.Input("Out"), "out");
......
...@@ -83,21 +83,19 @@ TEST(GradOpBuilder, MutiInOut) { ...@@ -83,21 +83,19 @@ TEST(GradOpBuilder, MutiInOut) {
EXPECT_EQ(grad_test_op->Input("Out1"), "out1"); EXPECT_EQ(grad_test_op->Input("Out1"), "out1");
EXPECT_EQ(grad_test_op->Inputs("Out2_mult"), EXPECT_EQ(grad_test_op->Inputs("Out2_mult"),
std::vector<std::string>({"out2_1", "out2_2"})); std::vector<std::string>({"out2_1", "out2_2"}));
EXPECT_EQ(grad_test_op->Input("Out1" + f::kGradVarSuffix), EXPECT_EQ(grad_test_op->Input(f::GradVarName("Out1")),
"out1" + f::kGradVarSuffix); f::GradVarName("out1"));
EXPECT_EQ(grad_test_op->Inputs("Out2_mult" + f::kGradVarSuffix), EXPECT_EQ(grad_test_op->Inputs(f::GradVarName("Out2_mult")),
std::vector<std::string>( std::vector<std::string>(
{"out2_1" + f::kGradVarSuffix, "out2_2" + f::kGradVarSuffix})); {f::GradVarName("out2_1"), f::GradVarName("out2_2")}));
ASSERT_EQ(grad_test_op->outputs_.size(), 5UL); ASSERT_EQ(grad_test_op->outputs_.size(), 5UL);
EXPECT_EQ(grad_test_op->Output("In1" + f::kGradVarSuffix), EXPECT_EQ(grad_test_op->Output(f::GradVarName("In1")), f::GradVarName("in1"));
"in1" + f::kGradVarSuffix); EXPECT_EQ(grad_test_op->Outputs(f::GradVarName("In2_mult")),
EXPECT_EQ(grad_test_op->Outputs("In2_mult" + f::kGradVarSuffix), std::vector<std::string>({f::GradVarName("in2_1"),
std::vector<std::string>({"in2_1" + f::kGradVarSuffix, f::GradVarName("in2_2"),
"in2_2" + f::kGradVarSuffix, f::GradVarName("in2_3")}));
"in2_3" + f::kGradVarSuffix})); EXPECT_EQ(grad_test_op->Output(f::GradVarName("In3")), f::GradVarName("in3"));
EXPECT_EQ(grad_test_op->Output("In3" + f::kGradVarSuffix),
"in3" + f::kGradVarSuffix);
} }
TEST(GradOpBuilder, IOIgnoredInGradient) { TEST(GradOpBuilder, IOIgnoredInGradient) {
...@@ -119,19 +117,18 @@ TEST(GradOpBuilder, IOIgnoredInGradient) { ...@@ -119,19 +117,18 @@ TEST(GradOpBuilder, IOIgnoredInGradient) {
EXPECT_EQ(grad_test_op->Inputs("Out1_mult"), EXPECT_EQ(grad_test_op->Inputs("Out1_mult"),
std::vector<std::string>({"out1_1", "out1_2"})); std::vector<std::string>({"out1_1", "out1_2"}));
EXPECT_EQ(grad_test_op->Input("Out2"), f::kEmptyVarName); EXPECT_EQ(grad_test_op->Input("Out2"), f::kEmptyVarName);
EXPECT_EQ(grad_test_op->Inputs("Out1_mult" + f::kGradVarSuffix), EXPECT_EQ(grad_test_op->Inputs(f::GradVarName("Out1_mult")),
std::vector<std::string>( std::vector<std::string>(
{"out1_1" + f::kGradVarSuffix, "out1_2" + f::kGradVarSuffix})); {f::GradVarName("out1_1"), f::GradVarName("out1_2")}));
EXPECT_EQ(grad_test_op->Input("Out2" + f::kGradVarSuffix), EXPECT_EQ(grad_test_op->Input(f::GradVarName("Out2")),
"out2" + f::kGradVarSuffix); f::GradVarName("out2"));
ASSERT_EQ(grad_test_op->outputs_.size(), 5UL); ASSERT_EQ(grad_test_op->outputs_.size(), 5UL);
EXPECT_EQ(grad_test_op->Output("In1" + f::kGradVarSuffix), EXPECT_EQ(grad_test_op->Output(f::GradVarName("In1")), f::GradVarName("in1"));
"in1" + f::kGradVarSuffix); EXPECT_EQ(grad_test_op->Outputs(f::GradVarName("In2_mult")),
EXPECT_EQ(grad_test_op->Outputs("In2_mult" + f::kGradVarSuffix),
std::vector<std::string>( std::vector<std::string>(
{"in2_1" + f::kGradVarSuffix, "in2_2" + f::kGradVarSuffix})); {f::GradVarName("in2_1"), f::GradVarName("in2_2")}));
EXPECT_EQ(grad_test_op->Outputs("In3_mult" + f::kGradVarSuffix), EXPECT_EQ(grad_test_op->Outputs(f::GradVarName("In3_mult")),
std::vector<std::string>( std::vector<std::string>(
{"in3_1" + f::kGradVarSuffix, "in3_2" + f::kGradVarSuffix})); {f::GradVarName("in3_1"), f::GradVarName("in3_2")}));
} }
...@@ -33,19 +33,19 @@ namespace paddle { ...@@ -33,19 +33,19 @@ namespace paddle {
namespace framework { namespace framework {
/// If a variable is a empty variable, that name will be used. /// If a variable is a empty variable, that name will be used.
const std::string kEmptyVarName = "@EMPTY@"; constexpr char kEmptyVarName[] = "@EMPTY@";
/// If a variable is a temporary variable, that name will be set in Python, /// If a variable is a temporary variable, that name will be set in Python,
/// but it will be convert to a unique name in scope after OpCreator. /// but it will be convert to a unique name in scope after OpCreator.
const std::string kTempVarName = "@TEMP@"; constexpr char kTempVarName[] = "@TEMP@";
/// If a variable's name has a certain suffix, it means that the /// If a variable's name has a certain suffix, it means that the
/// variable is the gradient of another varibale. /// variable is the gradient of another varibale.
/// e.g. Variable "x@GRAD" is the gradient of varibale "x". /// e.g. Variable "x@GRAD" is the gradient of varibale "x".
const std::string kGradVarSuffix = "@GRAD"; constexpr char kGradVarSuffix[] = "@GRAD";
/// Variables with this suffix are supposed to be filled up with zeros. /// Variables with this suffix are supposed to be filled up with zeros.
const std::string kZeroVarSuffix = "@ZERO"; constexpr char kZeroVarSuffix[] = "@ZERO";
inline std::string GradVarName(const std::string& var_name) { inline std::string GradVarName(const std::string& var_name) {
return var_name + kGradVarSuffix; return var_name + kGradVarSuffix;
......
...@@ -41,7 +41,7 @@ class MeanOpMaker : public framework::OpProtoAndCheckerMaker { ...@@ -41,7 +41,7 @@ class MeanOpMaker : public framework::OpProtoAndCheckerMaker {
class MeanGradOp : public framework::OperatorWithKernel { class MeanGradOp : public framework::OperatorWithKernel {
protected: protected:
void InferShape(const framework::InferShapeContext &ctx) const override { void InferShape(const framework::InferShapeContext &ctx) const override {
ctx.Output<Tensor>("X" + framework::kGradVarSuffix) ctx.Output<Tensor>(framework::GradVarName("X"))
->Resize(ctx.Input<Tensor>("X")->dims()); ->Resize(ctx.Input<Tensor>("X")->dims());
} }
}; };
......
...@@ -48,10 +48,10 @@ template <typename Place, typename T> ...@@ -48,10 +48,10 @@ template <typename Place, typename T>
class MeanGradKernel : public framework::OpKernel { class MeanGradKernel : public framework::OpKernel {
public: public:
void Compute(const framework::ExecutionContext& context) const override { void Compute(const framework::ExecutionContext& context) const override {
auto OG = context.Input<Tensor>("Out" + framework::kGradVarSuffix); auto OG = context.Input<Tensor>(framework::GradVarName("Out"));
PADDLE_ENFORCE(framework::product(OG->dims()) == 1, PADDLE_ENFORCE(framework::product(OG->dims()) == 1,
"Mean Gradient should be scalar"); "Mean Gradient should be scalar");
auto IG = context.Output<Tensor>("X" + framework::kGradVarSuffix); auto IG = context.Output<Tensor>(framework::GradVarName("X"));
IG->mutable_data<T>(context.GetPlace()); IG->mutable_data<T>(context.GetPlace());
T ig_size = (T)framework::product(IG->dims()); T ig_size = (T)framework::product(IG->dims());
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册