backward_test.cc 14.6 KB
Newer Older
Y
Yu Yang 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

   http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License. */

Y
Yu Yang 已提交
15
#include "paddle/framework/backward.h"
D
dongzhihong 已提交
16

Y
Yu Yang 已提交
17 18
#include <gtest/gtest.h>
#include "paddle/framework/op_registry.h"
Y
Yan Chunwei 已提交
19
#include "paddle/operators/net_op.h"
Y
Yu Yang 已提交
20

Y
Yu Yang 已提交
21 22 23
namespace paddle {
namespace framework {

D
dongzhihong 已提交
24 25 26 27 28 29 30
using OperatorBase = framework::OperatorBase;
using OpProtoAndCheckerMaker = framework::OpProtoAndCheckerMaker;
using OpProto = framework::OpProto;
using OpAttrChecker = framework::OpAttrChecker;
using Scope = framework::Scope;
using DeviceContext = platform::DeviceContext;

Y
Yu Yang 已提交
31 32
class EmptyOp : public OperatorBase {
 public:
Y
Yu Yang 已提交
33
  using OperatorBase::OperatorBase;
Y
Yu Yang 已提交
34
  void InferShape(const Scope &scope) const override {}
D
dongzhihong 已提交
35
  void Run(const Scope &scope, const DeviceContext &dev_ctx) const override {}
Y
Yu Yang 已提交
36 37
};

Y
Yu Yang 已提交
38
class RowWiseAddOpMaker : public OpProtoAndCheckerMaker {
Y
Yu Yang 已提交
39
 public:
Y
Yu Yang 已提交
40
  RowWiseAddOpMaker(OpProto *proto, OpAttrChecker *op_checker)
Y
Yu Yang 已提交
41
      : OpProtoAndCheckerMaker(proto, op_checker) {
Y
Yu Yang 已提交
42 43 44
    AddInput("X", "Input X of Add").AsNoGradient();
    AddInput("b", "Bias of Add").AsNoGradient();
    AddOutput("Out", "Out of Add").AsNoGradient();
Y
Yu Yang 已提交
45 46 47 48
    AddComment("Add Op");
  }
};

Y
Yu Yang 已提交
49 50 51 52
class MulOpMaker : public OpProtoAndCheckerMaker {
 public:
  MulOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
Y
Yu Yang 已提交
53 54
    AddInput("X", "A");
    AddInput("Y", "B");
Y
Yu Yang 已提交
55 56 57 58 59 60 61 62 63 64
    AddOutput("Out", "Out");
    AddComment("Mul");
  }
};

class SigmoidOpMaker : public OpProtoAndCheckerMaker {
 public:
  SigmoidOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "X");
Y
Yu Yang 已提交
65
    AddOutput("Out", "Y");
Y
Yu Yang 已提交
66 67 68 69
    AddComment("Sigmoid");
  }
};

D
dongzhihong 已提交
70 71 72 73 74
class NoGradOpMaker : public OpProtoAndCheckerMaker {
 public:
  NoGradOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "X input");
Y
Yu Yang 已提交
75
    AddOutput("Out", "Y output");
D
dongzhihong 已提交
76 77 78 79
    AddComment("NoGradOp, same input output. no Grad");
  }
};

D
dongzhihong 已提交
80
class FcOp : public operators::NetOp {
Y
Yu Yang 已提交
81
 public:
Y
Yu Yang 已提交
82 83 84
  FcOp(const std::string &type, const VarNameMap &inputs,
       const VarNameMap &outputs, const AttributeMap &attrs)
      : NetOp(type, inputs, outputs, attrs) {
Y
Yu Yang 已提交
85 86 87
    AddOp(OpRegistry::CreateOp("mul",
                               {{"X", {Input("X")}}, {"Y", {Input("W")}}},
                               {{"Out", {Output("mul_result")}}}, {}));
88
    auto input_b = Inputs("b");
Y
Yu Yang 已提交
89
    std::string before_act = "mul_result";
90
    if (input_b.size() != 0) {
Y
Yu Yang 已提交
91
      AddOp(OpRegistry::CreateOp(
92
          "rowwise_add", {{"X", {Output("mul_result")}}, {"b", {input_b[0]}}},
Y
Yu Yang 已提交
93
          {{"Out", {Output("add_result")}}}, {}));
Y
Yu Yang 已提交
94 95 96
      before_act = "add_result";
    } else {
      auto out_varname = Output("add_result");
97 98
      if (out_varname != kEmptyVarName) {
        this->Rename(out_varname, kEmptyVarName);
Y
Yu Yang 已提交
99
      }
Y
Yu Yang 已提交
100
    }
Y
Yu Yang 已提交
101

Y
Yu Yang 已提交
102 103
    AddOp(OpRegistry::CreateOp("sigmoid", {{"X", {Output(before_act)}}},
                               {{"Out", {Output("Out")}}}, {}));
Y
Yu Yang 已提交
104 105 106 107 108 109 110 111 112 113 114
    CompleteAddOp(false);
  }
};

class FcOpMaker : public OpProtoAndCheckerMaker {
 public:
  FcOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "x");
    AddInput("W", "w");
    AddInput("b", "b");
Y
Yu Yang 已提交
115 116
    AddOutput("mul_result", "").AsIntermediate();
    AddOutput("add_result", "").AsIntermediate();
Y
Yu Yang 已提交
117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141
    AddOutput("Out", "");
    AddComment("");
  }
};

class ManyOutputOpMaker : public OpProtoAndCheckerMaker {
 public:
  ManyOutputOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("x", "x");
    AddOutput("y", "y");
    AddOutput("z", "z");
    AddComment("");
  }
};

class FillZeroOpMaker : public OpProtoAndCheckerMaker {
 public:
  FillZeroOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("x", "x");
    AddOutput("out", "out");
    AddComment("");
  }
};
Y
Yu Yang 已提交
142 143 144 145 146

class AddOpMaker : public OpProtoAndCheckerMaker {
 public:
  AddOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
Y
Yu Yang 已提交
147
    AddInput("X", "x").AsDuplicable();
Y
Yu Yang 已提交
148 149 150 151
    AddOutput("Y", "y");
    AddComment("");
  }
};
Y
Yu Yang 已提交
152 153 154 155
}  // namespace framework
}  // namespace paddle

namespace f = paddle::framework;
D
dongzhihong 已提交
156
namespace ops = paddle::operators;
Y
Yu Yang 已提交
157 158 159 160 161 162 163
using EnforceNotMet = paddle::platform::EnforceNotMet;
REGISTER_OP(rowwise_add, f::EmptyOp, f::RowWiseAddOpMaker);
REGISTER_GRADIENT_OP(rowwise_add, rowwise_add_grad, f::EmptyOp);
REGISTER_OP(mul, f::EmptyOp, f::MulOpMaker);
REGISTER_GRADIENT_OP(mul, mul_grad, f::EmptyOp);
REGISTER_OP(sigmoid, f::EmptyOp, f::SigmoidOpMaker);
REGISTER_GRADIENT_OP(sigmoid, sigmoid_grad, f::EmptyOp);
D
dongzhihong 已提交
164
REGISTER_OP(nograd, f::EmptyOp, f::NoGradOpMaker);
Y
Yu Yang 已提交
165
REGISTER_OP(fill_zeros_like, f::EmptyOp, f::FillZeroOpMaker);
Y
Yu Yang 已提交
166 167
REGISTER_OP(add, f::EmptyOp, f::AddOpMaker);
REGISTER_GRADIENT_OP(add, add_grad, f::EmptyOp);
D
dongzhihong 已提交
168 169 170
REGISTER_OP(fc, f::FcOp, f::FcOpMaker);
REGISTER_OP(many_output_op, f::EmptyOp, f::ManyOutputOpMaker);
REGISTER_GRADIENT_OP(many_output_op, many_output_op_grad, f::EmptyOp);
Y
Yu Yang 已提交
171

172 173 174 175 176
TEST(Backward, simple_op_grad) {
  auto fwd = f::OpRegistry::CreateOp(
      "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
  ASSERT_NE(fwd, nullptr);
  auto gop = f::OpRegistry::CreateGradOp(*fwd);
Q
qiaolongfei 已提交
177 178
  ASSERT_EQ(1UL, gop->Inputs().size());
  ASSERT_EQ("rowwise_add_grad", gop->Type());
179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213
  ASSERT_EQ(f::GradVarName("x"), gop->Output(f::GradVarName("X")));
  ASSERT_EQ(f::GradVarName("b"), gop->Output(f::GradVarName("b")));
}

TEST(Backward, simple_op_not_need_grad) {
  auto fwd = f::OpRegistry::CreateOp(
      "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
  ASSERT_NE(fwd, nullptr);
  auto gop = f::Backward(*fwd, {"x"});
  ASSERT_EQ(gop->Output(f::GradVarName("X")), f::kEmptyVarName);

  auto no_input_gop = f::Backward(*fwd, {"x", "b"});
  ASSERT_NE(no_input_gop, nullptr);
  ASSERT_TRUE(no_input_gop->IsNetOp());
  ASSERT_EQ(0UL,
            std::static_pointer_cast<ops::NetOp>(no_input_gop)->ops_.size());
}

TEST(Backward, net_fc_backward_normal) {
  std::shared_ptr<f::OperatorBase> fwd =
      f::OpRegistry::CreateOp("fc", {{"X", {"x"}}, {"W", {"w"}}, {"b", {"b"}}},
                              {{"mul_result", {"mul_res"}},
                               {"add_result", {"add_re"}},
                               {"Out", {"out"}}},
                              {});
  ASSERT_NE(fwd, nullptr);
  std::shared_ptr<f::OperatorBase> gop = f::Backward(*fwd, {});
  ASSERT_TRUE(gop->IsNetOp());
  auto net = static_cast<ops::NetOp *>(gop.get());

  ASSERT_NO_THROW(net->DebugString());

  ASSERT_EQ(3UL, net->ops_.size());

  f::OperatorBase &d_sigmoid = *net->ops_[0];
Q
qiaolongfei 已提交
214
  ASSERT_EQ("sigmoid_grad", d_sigmoid.Type());
215 216

  f::OperatorBase &d_add = *net->ops_[1];
Q
qiaolongfei 已提交
217
  ASSERT_EQ("rowwise_add_grad", d_add.Type());
218 219

  f::OperatorBase &d_mul = *net->ops_[2];
Q
qiaolongfei 已提交
220
  ASSERT_EQ("mul_grad", d_mul.Type());
221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239
}

TEST(Backward, net_fc_backward_not_have_b) {
  std::shared_ptr<f::OperatorBase> fwd =
      f::OpRegistry::CreateOp("fc", {{"X", {"x"}}, {"W", {"w"}}, {"b", {}}},
                              {{"mul_result", {"mul_res"}},
                               {"add_result", {"add_res"}},
                               {"Out", {"tmp"}}},
                              {});
  ASSERT_NE(fwd, nullptr);
  std::shared_ptr<f::OperatorBase> gop = f::Backward(*fwd, {});
  ASSERT_TRUE(gop->IsNetOp());
  auto net = static_cast<ops::NetOp *>(gop.get());

  ASSERT_NO_THROW(net->DebugString());

  ASSERT_EQ(2UL, net->ops_.size());

  f::OperatorBase &d_sigmoid = *net->ops_[0];
Q
qiaolongfei 已提交
240
  ASSERT_EQ("sigmoid_grad", d_sigmoid.Type());
241 242

  f::OperatorBase &d_mul = *net->ops_[1];
Q
qiaolongfei 已提交
243
  ASSERT_EQ("mul_grad", d_mul.Type());
244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296
}

TEST(Backward, net_input_of_network_not_need_grad) {
  ops::NetOp net;
  net.AddOp(f::OpRegistry::CreateOp(
      "fc", {{"X", {"x"}}, {"W", {"W1"}}, {"b", {"b1"}}},
      {{"mul_result", {"mul_tmp_0"}},
       {"add_result", {"add_tmp_0"}},
       {"Out", {"hidden0"}}},
      {}));
  net.AddOp(f::OpRegistry::CreateOp(
      "fc", {{"X", {"hidden0"}}, {"W", {"W2"}}, {"b", {"b2"}}},
      {{"mul_result", {"mul_tmp_1"}},
       {"add_result", {"add_tmp_1"}},
       {"Out", {"hidden1"}}},
      {}));
  net.CompleteAddOp();
  auto bwd = Backward(net, {"x"});  // x@GRAD is not need.
  ASSERT_TRUE(bwd->IsNetOp());
  auto bwd_net = static_cast<ops::NetOp *>(bwd.get());

  auto output_vars = bwd_net->OutputVars(true);
  std::unordered_set<std::string> all_outputs =
      std::unordered_set<std::string>(output_vars.begin(), output_vars.end());
  all_outputs.erase(f::kEmptyVarName);

  for (auto &out : {"W1", "b1", "hidden0", "W2", "b2"}) {
    ASSERT_NE(all_outputs.find(f::GradVarName(out)), all_outputs.end());
  }

  // Not Generated X
  ASSERT_EQ(all_outputs.find(f::GradVarName("X")), all_outputs.end());

  ASSERT_EQ(2UL, bwd_net->ops_.size());
  ASSERT_TRUE(bwd_net->ops_[1]->IsNetOp());
  auto first_fc_grad = static_cast<ops::NetOp *>(bwd_net->ops_[1].get());
  ASSERT_EQ(3UL, first_fc_grad->ops_.size());
  ASSERT_EQ(f::kEmptyVarName,
            first_fc_grad->ops_[2]->Output(f::GradVarName("X")));
}

TEST(Backward, net_shared_weight) {
  ops::NetOp net;
  net.AddOp(f::OpRegistry::CreateOp("mul", {{"X", {"x"}}, {"Y", {"w"}}},
                                    {{"Out", {"out"}}}, {}));
  net.AddOp(f::OpRegistry::CreateOp("mul", {{"X", {"out"}}, {"Y", {"w"}}},
                                    {{"Out", {"FinalOut"}}}, {}));
  net.CompleteAddOp();

  auto bwd = f::Backward(net, {});
  ASSERT_TRUE(bwd->IsNetOp());
  auto bwd_net = static_cast<ops::NetOp *>(bwd.get());
  ASSERT_EQ(3UL, bwd_net->ops_.size());
Q
qiaolongfei 已提交
297
  ASSERT_EQ("add", bwd_net->ops_[2]->Type());
298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337
}

TEST(Backward, op_register_grad_not_for_network) {
  auto fwd =
      f::OpRegistry::CreateOp("fc", {{"X", {"x"}}, {"W", {"w"}}, {"b", {"b"}}},
                              {{"mul_result", {"mul_out"}},
                               {"add_result", {"add_out"}},
                               {"Out", {"out1"}}},
                              {{"temporary_index", std::vector<int>{0, 1}}});

  ASSERT_THROW(f::OpRegistry::CreateGradOp(*fwd), EnforceNotMet);
}

TEST(Backward, op_all_input_are_not_need) {
  auto fwd = f::OpRegistry::CreateOp(
      "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
  auto backward = f::Backward(*fwd, {"x", "b"});
  ASSERT_TRUE(backward->IsNetOp());
  auto net = static_cast<ops::NetOp *>(backward.get());
  ASSERT_TRUE(net->ops_.empty());
}

TEST(Backward, op_all_output_are_not_need) {
  auto fwd = f::OpRegistry::CreateOp(
      "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
  auto backward = f::Backward(*fwd, {"out"});
  ASSERT_TRUE(backward->IsNetOp());
  auto net = static_cast<ops::NetOp *>(backward.get());
  ASSERT_TRUE(net->ops_.empty());
}

TEST(Backward, op_part_of_output_are_not_need) {
  auto fwd = f::OpRegistry::CreateOp("many_output_op", {{"x", {"X"}}},
                                     {{"y", {"Y"}}, {"z", {"Z"}}}, {});
  auto backward = f::Backward(*fwd, {"Z"});
  ASSERT_TRUE(backward->IsNetOp());
  auto net = static_cast<ops::NetOp *>(backward.get());
  ASSERT_EQ(net->ops_.size(), 2UL);

  auto &fill_zero = *net->ops_[0];
Q
qiaolongfei 已提交
338
  ASSERT_EQ("fill_zeros_like", fill_zero.Type());
339 340 341 342 343 344
  ASSERT_EQ(1UL, fill_zero.Inputs("Src").size());
  ASSERT_EQ("Z", fill_zero.Input("Src"));
  ASSERT_EQ(1UL, fill_zero.Outputs("Dst").size());
  ASSERT_EQ(std::string("Z") + f::kZeroVarSuffix, fill_zero.Output("Dst"));

  auto &d_many_out = *net->ops_[1];
Q
qiaolongfei 已提交
345 346
  ASSERT_EQ("many_output_op_grad", d_many_out.Type());
  ASSERT_EQ(1UL + 2UL + 2UL, d_many_out.Inputs().size());  // I/O/OG
347 348 349 350 351 352 353 354 355 356 357
  ASSERT_EQ(std::string("Z") + f::kZeroVarSuffix,
            d_many_out.Input(f::GradVarName("z")));
  ASSERT_EQ(f::GradVarName("Y"), d_many_out.Input(f::GradVarName("y")));
  ASSERT_EQ(f::GradVarName("X"), d_many_out.Output(f::GradVarName("x")));
}

TEST(Backward, op_part_of_input_are_not_need) {
  auto fwd = f::OpRegistry::CreateOp("mul", {{"X", {"a"}}, {"Y", {"b"}}},
                                     {{"Out", {"out"}}}, {});
  auto backward = f::Backward(*fwd, {"a"});
  auto &grad_mul = *backward;
Q
qiaolongfei 已提交
358 359 360
  ASSERT_EQ(grad_mul.Type(), "mul_grad");
  ASSERT_EQ(grad_mul.Inputs().size(), 2UL + 1UL + 1UL);
  ASSERT_EQ(grad_mul.Outputs().size(), 2UL);
361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395
  ASSERT_EQ(grad_mul.Output(f::GradVarName("X")), f::kEmptyVarName);
  ASSERT_EQ(grad_mul.Output(f::GradVarName("Y")), f::GradVarName("b"));
  ASSERT_EQ(grad_mul.Input(f::GradVarName("Out")), f::GradVarName("out"));
  ASSERT_EQ(grad_mul.Input("X"), "a");
  ASSERT_EQ(grad_mul.Input("Y"), "b");
  ASSERT_EQ(grad_mul.Input("Out"), "out");
}

TEST(Backward, linear_net_intermediate_variable_has_no_grad) {
  ops::NetOp net;
  net.AddOp(f::OpRegistry::CreateOp(
      "fc", {{"X", {"x1"}}, {"W", {"w1"}}, {"b", {"b1"}}},
      {{"mul_result", {"mul_out1"}},
       {"add_result", {"add_out1"}},
       {"Out", {"out1"}}},
      {}));
  net.AddOp(f::OpRegistry::CreateOp(
      "fc", {{"X", {"out1"}}, {"W", {"w2"}}, {"b", {"b2"}}},
      {{"mul_result", {"mul_out2"}},
       {"add_result", {"tmp_out2"}},
       {"Out", {"out2"}}},
      {}));
  net.AddOp(f::OpRegistry::CreateOp(
      "fc", {{"X", {"out2"}}, {"W", {"w3"}}, {"b", {"b3"}}},
      {{"mul_result", {"mul_out3"}},
       {"add_result", {"tmp_out3"}},
       {"Out", {"out3"}}},
      {}));
  net.CompleteAddOp();

  auto backward = f::Backward(net, {"mul_out2", "tmp_out2", "out2"});
  ASSERT_TRUE(backward->IsNetOp());
  auto bwd_net = static_cast<ops::NetOp *>(backward.get());
  ASSERT_EQ(bwd_net->ops_.size(), 3UL);
  auto &grad_fc = *bwd_net->ops_[0];
Y
Yu Yang 已提交
396 397

  const char *all = paddle::operators::NetOp::kAll;
Q
qiaolongfei 已提交
398
  EXPECT_EQ(grad_fc.Inputs(all).size(),
399 400 401 402
            2UL       /* external input number */
                + 1UL /* external output number*/
                + 1UL /* number of gradient of external output*/
                + 2U /* internal variable number*/);
Q
qiaolongfei 已提交
403
  EXPECT_EQ(grad_fc.Outputs(all).size(),
404 405 406 407
            2UL       /* input number of mul*/
                + 2UL /* input number of rowwise_add
                       */
                + 1UL /* input number of sigmod */);
Q
qiaolongfei 已提交
408 409 410 411
  EXPECT_EQ(bwd_net->ops_[1]->Inputs(all).size(), 0UL);
  EXPECT_EQ(bwd_net->ops_[1]->Outputs(all).size(), 0UL);
  EXPECT_EQ(bwd_net->ops_[2]->Inputs(all).size(), 0UL);
  EXPECT_EQ(bwd_net->ops_[2]->Outputs(all).size(), 0UL);
412
}