backward_test.cc 13.8 KB
Newer Older
Y
Yu Yang 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

   http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License. */

Y
Yu Yang 已提交
15
#include "paddle/framework/backward.h"
D
dongzhihong 已提交
16

Y
Yu Yang 已提交
17 18
#include <gtest/gtest.h>
#include "paddle/framework/op_registry.h"
Y
Yan Chunwei 已提交
19
#include "paddle/operators/net_op.h"
Y
Yu Yang 已提交
20

Y
Yu Yang 已提交
21 22 23
namespace paddle {
namespace framework {

D
dongzhihong 已提交
24 25
using DeviceContext = platform::DeviceContext;

Y
Yu Yang 已提交
26
class RowWiseAddOpMaker : public OpProtoAndCheckerMaker {
Y
Yu Yang 已提交
27
 public:
Y
Yu Yang 已提交
28
  RowWiseAddOpMaker(OpProto *proto, OpAttrChecker *op_checker)
Y
Yu Yang 已提交
29
      : OpProtoAndCheckerMaker(proto, op_checker) {
30 31 32
    AddInput("X", "Input X of Add");
    AddInput("b", "Bias of Add");
    AddOutput("Out", "Out of Add");
Y
Yu Yang 已提交
33 34 35 36
    AddComment("Add Op");
  }
};

37 38 39 40 41
class RowWiseAddGradMaker : public SingleGradOpDescMaker {
 public:
  using SingleGradOpDescMaker::SingleGradOpDescMaker;

 protected:
Y
Yu Yang 已提交
42 43 44 45 46 47 48
  std::unique_ptr<OpDescBind> Apply() const override {
    auto grad_op = new OpDescBind();
    grad_op->SetInput(GradVarName("Out"), OutputGrad("Out"));
    grad_op->SetOutput(GradVarName("X"), InputGrad("X"));
    grad_op->SetOutput(GradVarName("b"), InputGrad("b"));
    grad_op->SetType("rowwise_add_grad");
    return std::unique_ptr<OpDescBind>(grad_op);
49 50 51
  }
};

Y
Yu Yang 已提交
52 53 54 55
class MulOpMaker : public OpProtoAndCheckerMaker {
 public:
  MulOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
Y
Yu Yang 已提交
56 57
    AddInput("X", "A");
    AddInput("Y", "B");
Y
Yu Yang 已提交
58 59 60 61 62 63 64 65 66 67
    AddOutput("Out", "Out");
    AddComment("Mul");
  }
};

class SigmoidOpMaker : public OpProtoAndCheckerMaker {
 public:
  SigmoidOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "X");
Y
Yu Yang 已提交
68
    AddOutput("Out", "Y");
Y
Yu Yang 已提交
69 70 71 72
    AddComment("Sigmoid");
  }
};

D
dongzhihong 已提交
73 74 75 76 77
class NoGradOpMaker : public OpProtoAndCheckerMaker {
 public:
  NoGradOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "X input");
Y
Yu Yang 已提交
78
    AddOutput("Out", "Y output");
D
dongzhihong 已提交
79 80 81 82
    AddComment("NoGradOp, same input output. no Grad");
  }
};

D
dongzhihong 已提交
83
class FcOp : public operators::NetOp {
Y
Yu Yang 已提交
84
 public:
Y
Yu Yang 已提交
85 86
  FcOp(const std::string &type, const VariableNameMap &inputs,
       const VariableNameMap &outputs, const AttributeMap &attrs)
Y
Yu Yang 已提交
87
      : NetOp(type, inputs, outputs, attrs) {
Y
Yu Yang 已提交
88 89 90
    AppendOp(OpRegistry::CreateOp("mul",
                                  {{"X", {Input("X")}}, {"Y", {Input("W")}}},
                                  {{"Out", {Output("mul_result")}}}, {}));
91
    auto input_b = Inputs("b");
Y
Yu Yang 已提交
92
    std::string before_act = "mul_result";
93
    if (input_b.size() != 0) {
Y
Yu Yang 已提交
94
      AppendOp(OpRegistry::CreateOp(
95
          "rowwise_add", {{"X", {Output("mul_result")}}, {"b", {input_b[0]}}},
Y
Yu Yang 已提交
96
          {{"Out", {Output("add_result")}}}, {}));
Y
Yu Yang 已提交
97 98 99
      before_act = "add_result";
    } else {
      auto out_varname = Output("add_result");
100 101
      if (out_varname != kEmptyVarName) {
        this->Rename(out_varname, kEmptyVarName);
Y
Yu Yang 已提交
102
      }
Y
Yu Yang 已提交
103
    }
Y
Yu Yang 已提交
104

Y
Yu Yang 已提交
105 106
    AppendOp(OpRegistry::CreateOp("sigmoid", {{"X", {Output(before_act)}}},
                                  {{"Out", {Output("Out")}}}, {}));
Y
Yu Yang 已提交
107 108 109 110 111 112 113 114 115 116 117
    CompleteAddOp(false);
  }
};

class FcOpMaker : public OpProtoAndCheckerMaker {
 public:
  FcOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "x");
    AddInput("W", "w");
    AddInput("b", "b");
Y
Yu Yang 已提交
118 119
    AddOutput("mul_result", "").AsIntermediate();
    AddOutput("add_result", "").AsIntermediate();
Y
Yu Yang 已提交
120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139
    AddOutput("Out", "");
    AddComment("");
  }
};

class ManyOutputOpMaker : public OpProtoAndCheckerMaker {
 public:
  ManyOutputOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("x", "x");
    AddOutput("y", "y");
    AddOutput("z", "z");
    AddComment("");
  }
};

class FillZeroOpMaker : public OpProtoAndCheckerMaker {
 public:
  FillZeroOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
D
dangqingqing 已提交
140 141
    AddInput("X", "x");
    AddOutput("Y", "out");
Y
Yu Yang 已提交
142 143 144
    AddComment("");
  }
};
Y
Yu Yang 已提交
145

D
dongzhihong 已提交
146
class SumOpMaker : public framework::OpProtoAndCheckerMaker {
Y
Yu Yang 已提交
147
 public:
D
dongzhihong 已提交
148
  SumOpMaker(framework::OpProto *proto, framework::OpAttrChecker *op_checker)
Y
Yu Yang 已提交
149
      : OpProtoAndCheckerMaker(proto, op_checker) {
Y
Yu Yang 已提交
150 151
    AddInput("X", "the input tensors of sum operator.").AsDuplicable();
    AddOutput("Out", "the output tensor of sum operator.");
Y
Yu Yang 已提交
152 153 154
    AddComment("");
  }
};
D
dongzhihong 已提交
155

Y
Yu Yang 已提交
156 157 158 159
}  // namespace framework
}  // namespace paddle

namespace f = paddle::framework;
D
dongzhihong 已提交
160
namespace ops = paddle::operators;
Y
Yu Yang 已提交
161
using EnforceNotMet = paddle::platform::EnforceNotMet;
162 163 164
REGISTER_OPERATOR(rowwise_add, f::NOP, f::RowWiseAddOpMaker,
                  f::RowWiseAddGradMaker);
REGISTER_OPERATOR(rowwise_add_grad, f::NOP);
165 166
REGISTER_OP(mul, f::NOP, f::MulOpMaker, mul_grad, f::NOP);
REGISTER_OP(sigmoid, f::NOP, f::SigmoidOpMaker, sigmoid_grad, f::NOP);
F
fengjiayi 已提交
167 168
REGISTER_OP_WITHOUT_GRADIENT(nograd, f::NOP, f::NoGradOpMaker);
REGISTER_OP_WITHOUT_GRADIENT(fill_zeros_like, f::NOP, f::FillZeroOpMaker);
D
dongzhihong 已提交
169
REGISTER_OP(sum, f::NOP, f::SumOpMaker, sum_grad, f::NOP);
F
fengjiayi 已提交
170
REGISTER_OP_WITHOUT_GRADIENT(fc, f::FcOp, f::FcOpMaker);
171 172
REGISTER_OP(many_output_op, f::NOP, f::ManyOutputOpMaker, many_output_op_grad,
            f::NOP);
Y
Yu Yang 已提交
173

174 175 176 177 178 179 180 181 182 183
TEST(Backward, simple_op_not_need_grad) {
  auto fwd = f::OpRegistry::CreateOp(
      "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
  ASSERT_NE(fwd, nullptr);
  auto gop = f::Backward(*fwd, {"x"});
  ASSERT_EQ(gop->Output(f::GradVarName("X")), f::kEmptyVarName);

  auto no_input_gop = f::Backward(*fwd, {"x", "b"});
  ASSERT_NE(no_input_gop, nullptr);
  ASSERT_TRUE(no_input_gop->IsNetOp());
Y
Yu Yang 已提交
184
  ASSERT_EQ(0UL, static_cast<ops::NetOp *>(no_input_gop.get())->ops_.size());
185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203
}

TEST(Backward, net_fc_backward_normal) {
  std::shared_ptr<f::OperatorBase> fwd =
      f::OpRegistry::CreateOp("fc", {{"X", {"x"}}, {"W", {"w"}}, {"b", {"b"}}},
                              {{"mul_result", {"mul_res"}},
                               {"add_result", {"add_re"}},
                               {"Out", {"out"}}},
                              {});
  ASSERT_NE(fwd, nullptr);
  std::shared_ptr<f::OperatorBase> gop = f::Backward(*fwd, {});
  ASSERT_TRUE(gop->IsNetOp());
  auto net = static_cast<ops::NetOp *>(gop.get());

  ASSERT_NO_THROW(net->DebugString());

  ASSERT_EQ(3UL, net->ops_.size());

  f::OperatorBase &d_sigmoid = *net->ops_[0];
Q
qiaolongfei 已提交
204
  ASSERT_EQ("sigmoid_grad", d_sigmoid.Type());
205 206

  f::OperatorBase &d_add = *net->ops_[1];
Q
qiaolongfei 已提交
207
  ASSERT_EQ("rowwise_add_grad", d_add.Type());
208 209

  f::OperatorBase &d_mul = *net->ops_[2];
Q
qiaolongfei 已提交
210
  ASSERT_EQ("mul_grad", d_mul.Type());
211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229
}

TEST(Backward, net_fc_backward_not_have_b) {
  std::shared_ptr<f::OperatorBase> fwd =
      f::OpRegistry::CreateOp("fc", {{"X", {"x"}}, {"W", {"w"}}, {"b", {}}},
                              {{"mul_result", {"mul_res"}},
                               {"add_result", {"add_res"}},
                               {"Out", {"tmp"}}},
                              {});
  ASSERT_NE(fwd, nullptr);
  std::shared_ptr<f::OperatorBase> gop = f::Backward(*fwd, {});
  ASSERT_TRUE(gop->IsNetOp());
  auto net = static_cast<ops::NetOp *>(gop.get());

  ASSERT_NO_THROW(net->DebugString());

  ASSERT_EQ(2UL, net->ops_.size());

  f::OperatorBase &d_sigmoid = *net->ops_[0];
Q
qiaolongfei 已提交
230
  ASSERT_EQ("sigmoid_grad", d_sigmoid.Type());
231 232

  f::OperatorBase &d_mul = *net->ops_[1];
Q
qiaolongfei 已提交
233
  ASSERT_EQ("mul_grad", d_mul.Type());
234 235 236 237
}

TEST(Backward, net_input_of_network_not_need_grad) {
  ops::NetOp net;
Y
Yu Yang 已提交
238
  net.AppendOp(f::OpRegistry::CreateOp(
239 240 241 242 243
      "fc", {{"X", {"x"}}, {"W", {"W1"}}, {"b", {"b1"}}},
      {{"mul_result", {"mul_tmp_0"}},
       {"add_result", {"add_tmp_0"}},
       {"Out", {"hidden0"}}},
      {}));
Y
Yu Yang 已提交
244
  net.AppendOp(f::OpRegistry::CreateOp(
245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276
      "fc", {{"X", {"hidden0"}}, {"W", {"W2"}}, {"b", {"b2"}}},
      {{"mul_result", {"mul_tmp_1"}},
       {"add_result", {"add_tmp_1"}},
       {"Out", {"hidden1"}}},
      {}));
  net.CompleteAddOp();
  auto bwd = Backward(net, {"x"});  // x@GRAD is not need.
  ASSERT_TRUE(bwd->IsNetOp());
  auto bwd_net = static_cast<ops::NetOp *>(bwd.get());

  auto output_vars = bwd_net->OutputVars(true);
  std::unordered_set<std::string> all_outputs =
      std::unordered_set<std::string>(output_vars.begin(), output_vars.end());
  all_outputs.erase(f::kEmptyVarName);

  for (auto &out : {"W1", "b1", "hidden0", "W2", "b2"}) {
    ASSERT_NE(all_outputs.find(f::GradVarName(out)), all_outputs.end());
  }

  // Not Generated X
  ASSERT_EQ(all_outputs.find(f::GradVarName("X")), all_outputs.end());

  ASSERT_EQ(2UL, bwd_net->ops_.size());
  ASSERT_TRUE(bwd_net->ops_[1]->IsNetOp());
  auto first_fc_grad = static_cast<ops::NetOp *>(bwd_net->ops_[1].get());
  ASSERT_EQ(3UL, first_fc_grad->ops_.size());
  ASSERT_EQ(f::kEmptyVarName,
            first_fc_grad->ops_[2]->Output(f::GradVarName("X")));
}

TEST(Backward, net_shared_weight) {
  ops::NetOp net;
Y
Yu Yang 已提交
277 278 279 280
  net.AppendOp(f::OpRegistry::CreateOp("mul", {{"X", {"x"}}, {"Y", {"w"}}},
                                       {{"Out", {"out"}}}, {}));
  net.AppendOp(f::OpRegistry::CreateOp("mul", {{"X", {"out"}}, {"Y", {"w"}}},
                                       {{"Out", {"FinalOut"}}}, {}));
281 282 283 284 285 286
  net.CompleteAddOp();

  auto bwd = f::Backward(net, {});
  ASSERT_TRUE(bwd->IsNetOp());
  auto bwd_net = static_cast<ops::NetOp *>(bwd.get());
  ASSERT_EQ(3UL, bwd_net->ops_.size());
D
dongzhihong 已提交
287
  ASSERT_EQ("sum", bwd_net->ops_[2]->Type());
288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316
}

TEST(Backward, op_all_input_are_not_need) {
  auto fwd = f::OpRegistry::CreateOp(
      "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
  auto backward = f::Backward(*fwd, {"x", "b"});
  ASSERT_TRUE(backward->IsNetOp());
  auto net = static_cast<ops::NetOp *>(backward.get());
  ASSERT_TRUE(net->ops_.empty());
}

TEST(Backward, op_all_output_are_not_need) {
  auto fwd = f::OpRegistry::CreateOp(
      "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
  auto backward = f::Backward(*fwd, {"out"});
  ASSERT_TRUE(backward->IsNetOp());
  auto net = static_cast<ops::NetOp *>(backward.get());
  ASSERT_TRUE(net->ops_.empty());
}

TEST(Backward, op_part_of_output_are_not_need) {
  auto fwd = f::OpRegistry::CreateOp("many_output_op", {{"x", {"X"}}},
                                     {{"y", {"Y"}}, {"z", {"Z"}}}, {});
  auto backward = f::Backward(*fwd, {"Z"});
  ASSERT_TRUE(backward->IsNetOp());
  auto net = static_cast<ops::NetOp *>(backward.get());
  ASSERT_EQ(net->ops_.size(), 2UL);

  auto &fill_zero = *net->ops_[0];
Q
qiaolongfei 已提交
317
  ASSERT_EQ("fill_zeros_like", fill_zero.Type());
D
dangqingqing 已提交
318 319 320 321
  ASSERT_EQ(1UL, fill_zero.Inputs("X").size());
  ASSERT_EQ("Z", fill_zero.Input("X"));
  ASSERT_EQ(1UL, fill_zero.Outputs("Y").size());
  ASSERT_EQ(std::string("Z") + f::kZeroVarSuffix, fill_zero.Output("Y"));
322 323

  auto &d_many_out = *net->ops_[1];
Q
qiaolongfei 已提交
324 325
  ASSERT_EQ("many_output_op_grad", d_many_out.Type());
  ASSERT_EQ(1UL + 2UL + 2UL, d_many_out.Inputs().size());  // I/O/OG
326 327 328 329 330 331 332 333 334 335 336
  ASSERT_EQ(std::string("Z") + f::kZeroVarSuffix,
            d_many_out.Input(f::GradVarName("z")));
  ASSERT_EQ(f::GradVarName("Y"), d_many_out.Input(f::GradVarName("y")));
  ASSERT_EQ(f::GradVarName("X"), d_many_out.Output(f::GradVarName("x")));
}

TEST(Backward, op_part_of_input_are_not_need) {
  auto fwd = f::OpRegistry::CreateOp("mul", {{"X", {"a"}}, {"Y", {"b"}}},
                                     {{"Out", {"out"}}}, {});
  auto backward = f::Backward(*fwd, {"a"});
  auto &grad_mul = *backward;
Q
qiaolongfei 已提交
337 338 339
  ASSERT_EQ(grad_mul.Type(), "mul_grad");
  ASSERT_EQ(grad_mul.Inputs().size(), 2UL + 1UL + 1UL);
  ASSERT_EQ(grad_mul.Outputs().size(), 2UL);
340 341 342 343 344 345 346 347 348 349
  ASSERT_EQ(grad_mul.Output(f::GradVarName("X")), f::kEmptyVarName);
  ASSERT_EQ(grad_mul.Output(f::GradVarName("Y")), f::GradVarName("b"));
  ASSERT_EQ(grad_mul.Input(f::GradVarName("Out")), f::GradVarName("out"));
  ASSERT_EQ(grad_mul.Input("X"), "a");
  ASSERT_EQ(grad_mul.Input("Y"), "b");
  ASSERT_EQ(grad_mul.Input("Out"), "out");
}

TEST(Backward, linear_net_intermediate_variable_has_no_grad) {
  ops::NetOp net;
Y
Yu Yang 已提交
350
  net.AppendOp(f::OpRegistry::CreateOp(
351 352 353 354 355
      "fc", {{"X", {"x1"}}, {"W", {"w1"}}, {"b", {"b1"}}},
      {{"mul_result", {"mul_out1"}},
       {"add_result", {"add_out1"}},
       {"Out", {"out1"}}},
      {}));
Y
Yu Yang 已提交
356
  net.AppendOp(f::OpRegistry::CreateOp(
357 358 359 360 361
      "fc", {{"X", {"out1"}}, {"W", {"w2"}}, {"b", {"b2"}}},
      {{"mul_result", {"mul_out2"}},
       {"add_result", {"tmp_out2"}},
       {"Out", {"out2"}}},
      {}));
Y
Yu Yang 已提交
362
  net.AppendOp(f::OpRegistry::CreateOp(
363 364 365 366 367 368 369 370 371 372 373 374
      "fc", {{"X", {"out2"}}, {"W", {"w3"}}, {"b", {"b3"}}},
      {{"mul_result", {"mul_out3"}},
       {"add_result", {"tmp_out3"}},
       {"Out", {"out3"}}},
      {}));
  net.CompleteAddOp();

  auto backward = f::Backward(net, {"mul_out2", "tmp_out2", "out2"});
  ASSERT_TRUE(backward->IsNetOp());
  auto bwd_net = static_cast<ops::NetOp *>(backward.get());
  ASSERT_EQ(bwd_net->ops_.size(), 3UL);
  auto &grad_fc = *bwd_net->ops_[0];
Y
Yu Yang 已提交
375 376

  const char *all = paddle::operators::NetOp::kAll;
Q
qiaolongfei 已提交
377
  EXPECT_EQ(grad_fc.Inputs(all).size(),
378 379 380 381
            2UL       /* external input number */
                + 1UL /* external output number*/
                + 1UL /* number of gradient of external output*/
                + 2U /* internal variable number*/);
Q
qiaolongfei 已提交
382
  EXPECT_EQ(grad_fc.Outputs(all).size(),
383 384 385 386
            2UL       /* input number of mul*/
                + 2UL /* input number of rowwise_add
                       */
                + 1UL /* input number of sigmod */);
Q
qiaolongfei 已提交
387 388 389 390
  EXPECT_EQ(bwd_net->ops_[1]->Inputs(all).size(), 0UL);
  EXPECT_EQ(bwd_net->ops_[1]->Outputs(all).size(), 0UL);
  EXPECT_EQ(bwd_net->ops_[2]->Inputs(all).size(), 0UL);
  EXPECT_EQ(bwd_net->ops_[2]->Outputs(all).size(), 0UL);
391
}