backward_test.cc 14.5 KB
Newer Older
Y
Yu Yang 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

   http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License. */

Y
Yu Yang 已提交
15
#include "paddle/framework/backward.h"
D
dongzhihong 已提交
16

Y
Yu Yang 已提交
17 18
#include <gtest/gtest.h>
#include "paddle/framework/op_registry.h"
Y
Yan Chunwei 已提交
19
#include "paddle/operators/net_op.h"
Y
Yu Yang 已提交
20

Y
Yu Yang 已提交
21 22 23
namespace paddle {
namespace framework {

D
dongzhihong 已提交
24 25 26 27 28 29 30
using OperatorBase = framework::OperatorBase;
using OpProtoAndCheckerMaker = framework::OpProtoAndCheckerMaker;
using OpProto = framework::OpProto;
using OpAttrChecker = framework::OpAttrChecker;
using Scope = framework::Scope;
using DeviceContext = platform::DeviceContext;

Y
Yu Yang 已提交
31 32
class EmptyOp : public OperatorBase {
 public:
Y
Yu Yang 已提交
33
  using OperatorBase::OperatorBase;
Y
Yu Yang 已提交
34
  void InferShape(const Scope &scope) const override {}
D
dongzhihong 已提交
35
  void Run(const Scope &scope, const DeviceContext &dev_ctx) const override {}
Y
Yu Yang 已提交
36 37
};

Y
Yu Yang 已提交
38
class RowWiseAddOpMaker : public OpProtoAndCheckerMaker {
Y
Yu Yang 已提交
39
 public:
Y
Yu Yang 已提交
40
  RowWiseAddOpMaker(OpProto *proto, OpAttrChecker *op_checker)
Y
Yu Yang 已提交
41
      : OpProtoAndCheckerMaker(proto, op_checker) {
Y
Yu Yang 已提交
42 43 44
    AddInput("X", "Input X of Add").AsNoGradient();
    AddInput("b", "Bias of Add").AsNoGradient();
    AddOutput("Out", "Out of Add").AsNoGradient();
Y
Yu Yang 已提交
45 46 47 48
    AddComment("Add Op");
  }
};

Y
Yu Yang 已提交
49 50 51 52
class MulOpMaker : public OpProtoAndCheckerMaker {
 public:
  MulOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
Y
Yu Yang 已提交
53 54
    AddInput("X", "A");
    AddInput("Y", "B");
Y
Yu Yang 已提交
55 56 57 58 59 60 61 62 63 64
    AddOutput("Out", "Out");
    AddComment("Mul");
  }
};

class SigmoidOpMaker : public OpProtoAndCheckerMaker {
 public:
  SigmoidOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "X");
Y
Yu Yang 已提交
65
    AddOutput("Out", "Y");
Y
Yu Yang 已提交
66 67 68 69
    AddComment("Sigmoid");
  }
};

D
dongzhihong 已提交
70 71 72 73 74
class NoGradOpMaker : public OpProtoAndCheckerMaker {
 public:
  NoGradOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "X input");
Y
Yu Yang 已提交
75
    AddOutput("Out", "Y output");
D
dongzhihong 已提交
76 77 78 79
    AddComment("NoGradOp, same input output. no Grad");
  }
};

D
dongzhihong 已提交
80
class FcOp : public operators::NetOp {
Y
Yu Yang 已提交
81
 public:
Y
Yu Yang 已提交
82 83 84
  FcOp(const std::string &type, const VarNameMap &inputs,
       const VarNameMap &outputs, const AttributeMap &attrs)
      : NetOp(type, inputs, outputs, attrs) {
Y
Yu Yang 已提交
85 86 87
    AddOp(OpRegistry::CreateOp("mul",
                               {{"X", {Input("X")}}, {"Y", {Input("W")}}},
                               {{"Out", {Output("mul_result")}}}, {}));
88
    auto input_b = Inputs("b");
Y
Yu Yang 已提交
89
    std::string before_act = "mul_result";
90
    if (input_b.size() != 0) {
Y
Yu Yang 已提交
91
      AddOp(OpRegistry::CreateOp(
92
          "rowwise_add", {{"X", {Output("mul_result")}}, {"b", {input_b[0]}}},
Y
Yu Yang 已提交
93
          {{"Out", {Output("add_result")}}}, {}));
Y
Yu Yang 已提交
94 95 96
      before_act = "add_result";
    } else {
      auto out_varname = Output("add_result");
97 98
      if (out_varname != kEmptyVarName) {
        this->Rename(out_varname, kEmptyVarName);
Y
Yu Yang 已提交
99
      }
Y
Yu Yang 已提交
100
    }
Y
Yu Yang 已提交
101

Y
Yu Yang 已提交
102 103
    AddOp(OpRegistry::CreateOp("sigmoid", {{"X", {Output(before_act)}}},
                               {{"Out", {Output("Out")}}}, {}));
Y
Yu Yang 已提交
104 105 106 107 108 109 110 111 112 113 114
    CompleteAddOp(false);
  }
};

class FcOpMaker : public OpProtoAndCheckerMaker {
 public:
  FcOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "x");
    AddInput("W", "w");
    AddInput("b", "b");
Y
Yu Yang 已提交
115 116
    AddOutput("mul_result", "").AsIntermediate();
    AddOutput("add_result", "").AsIntermediate();
Y
Yu Yang 已提交
117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141
    AddOutput("Out", "");
    AddComment("");
  }
};

class ManyOutputOpMaker : public OpProtoAndCheckerMaker {
 public:
  ManyOutputOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("x", "x");
    AddOutput("y", "y");
    AddOutput("z", "z");
    AddComment("");
  }
};

class FillZeroOpMaker : public OpProtoAndCheckerMaker {
 public:
  FillZeroOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("x", "x");
    AddOutput("out", "out");
    AddComment("");
  }
};
Y
Yu Yang 已提交
142 143 144 145 146

class AddOpMaker : public OpProtoAndCheckerMaker {
 public:
  AddOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
Y
Yu Yang 已提交
147
    AddInput("X", "x").AsDuplicable();
Y
Yu Yang 已提交
148 149 150 151
    AddOutput("Y", "y");
    AddComment("");
  }
};
Y
Yu Yang 已提交
152 153 154 155
}  // namespace framework
}  // namespace paddle

namespace f = paddle::framework;
D
dongzhihong 已提交
156
namespace ops = paddle::operators;
Y
Yu Yang 已提交
157
using EnforceNotMet = paddle::platform::EnforceNotMet;
158 159 160 161
REGISTER_OP(rowwise_add, f::EmptyOp, f::RowWiseAddOpMaker, rowwise_add_grad,
            f::EmptyOp);
REGISTER_OP(mul, f::EmptyOp, f::MulOpMaker, mul_grad, f::EmptyOp);
REGISTER_OP(sigmoid, f::EmptyOp, f::SigmoidOpMaker, sigmoid_grad, f::EmptyOp);
F
fengjiayi 已提交
162 163
REGISTER_OP_WITHOUT_GRADIENT(nograd, f::EmptyOp, f::NoGradOpMaker);
REGISTER_OP_WITHOUT_GRADIENT(fill_zeros_like, f::EmptyOp, f::FillZeroOpMaker);
164
REGISTER_OP(add, f::EmptyOp, f::AddOpMaker, add_grad, f::EmptyOp);
F
fengjiayi 已提交
165 166
REGISTER_OP_WITHOUT_GRADIENT(fc, f::FcOp, f::FcOpMaker);
REGISTER_OP(many_output_op, f::EmptyOp, f::ManyOutputOpMaker,
167
            many_output_op_grad, f::EmptyOp);
Y
Yu Yang 已提交
168

Y
Yu Yang 已提交
169
TEST(Backward, simple_op_grad) {
170 171
  auto fwd = f::OpRegistry::CreateOp(
      "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
Y
Yu Yang 已提交
172
  ASSERT_NE(fwd, nullptr);
Y
Yu Yang 已提交
173
  auto gop = f::OpRegistry::CreateGradOp(*fwd);
174
  ASSERT_EQ(1UL, gop->inputs_.size());
Y
Yu Yang 已提交
175
  ASSERT_EQ("rowwise_add_grad", gop->type_);
176 177
  ASSERT_EQ(f::GradVarName("x"), gop->Output(f::GradVarName("X")));
  ASSERT_EQ(f::GradVarName("b"), gop->Output(f::GradVarName("b")));
Y
Yu Yang 已提交
178 179
}

D
dongzhihong 已提交
180
TEST(Backward, simple_op_not_need_grad) {
181 182
  auto fwd = f::OpRegistry::CreateOp(
      "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
D
dongzhihong 已提交
183
  ASSERT_NE(fwd, nullptr);
184 185
  auto gop = f::Backward(*fwd, {"x"});
  ASSERT_EQ(gop->Output(f::GradVarName("X")), f::kEmptyVarName);
D
dongzhihong 已提交
186

187
  auto no_input_gop = f::Backward(*fwd, {"x", "b"});
D
dongzhihong 已提交
188
  ASSERT_NE(no_input_gop, nullptr);
Y
Yu Yang 已提交
189
  ASSERT_TRUE(no_input_gop->IsNetOp());
Y
Yan Chunwei 已提交
190 191
  ASSERT_EQ(0UL,
            std::static_pointer_cast<ops::NetOp>(no_input_gop)->ops_.size());
D
dongzhihong 已提交
192 193
}

Y
Yu Yang 已提交
194
TEST(Backward, net_fc_backward_normal) {
195 196 197 198 199 200
  std::shared_ptr<f::OperatorBase> fwd =
      f::OpRegistry::CreateOp("fc", {{"X", {"x"}}, {"W", {"w"}}, {"b", {"b"}}},
                              {{"mul_result", {"mul_res"}},
                               {"add_result", {"add_re"}},
                               {"Out", {"out"}}},
                              {});
Y
Stash  
Yu Yang 已提交
201 202 203
  ASSERT_NE(fwd, nullptr);
  std::shared_ptr<f::OperatorBase> gop = f::Backward(*fwd, {});
  ASSERT_TRUE(gop->IsNetOp());
Y
Yan Chunwei 已提交
204
  auto net = static_cast<ops::NetOp *>(gop.get());
Y
Stash  
Yu Yang 已提交
205 206 207 208 209 210 211 212 213 214 215 216 217 218 219

  ASSERT_NO_THROW(net->DebugString());

  ASSERT_EQ(3UL, net->ops_.size());

  f::OperatorBase &d_sigmoid = *net->ops_[0];
  ASSERT_EQ("sigmoid_grad", d_sigmoid.type_);

  f::OperatorBase &d_add = *net->ops_[1];
  ASSERT_EQ("rowwise_add_grad", d_add.type_);

  f::OperatorBase &d_mul = *net->ops_[2];
  ASSERT_EQ("mul_grad", d_mul.type_);
}

Y
Yu Yang 已提交
220
TEST(Backward, net_fc_backward_not_have_b) {
Y
Yi Wang 已提交
221
  std::shared_ptr<f::OperatorBase> fwd =
222 223 224 225 226
      f::OpRegistry::CreateOp("fc", {{"X", {"x"}}, {"W", {"w"}}, {"b", {}}},
                              {{"mul_result", {"mul_res"}},
                               {"add_result", {"add_res"}},
                               {"Out", {"tmp"}}},
                              {});
Y
Stash  
Yu Yang 已提交
227 228 229
  ASSERT_NE(fwd, nullptr);
  std::shared_ptr<f::OperatorBase> gop = f::Backward(*fwd, {});
  ASSERT_TRUE(gop->IsNetOp());
Y
Yan Chunwei 已提交
230
  auto net = static_cast<ops::NetOp *>(gop.get());
Y
Stash  
Yu Yang 已提交
231 232 233 234 235 236 237 238 239 240 241 242

  ASSERT_NO_THROW(net->DebugString());

  ASSERT_EQ(2UL, net->ops_.size());

  f::OperatorBase &d_sigmoid = *net->ops_[0];
  ASSERT_EQ("sigmoid_grad", d_sigmoid.type_);

  f::OperatorBase &d_mul = *net->ops_[1];
  ASSERT_EQ("mul_grad", d_mul.type_);
}

Y
Yu Yang 已提交
243
TEST(Backward, net_input_of_network_not_need_grad) {
Y
Yan Chunwei 已提交
244
  ops::NetOp net;
245 246 247 248 249 250 251 252 253 254 255 256
  net.AddOp(f::OpRegistry::CreateOp(
      "fc", {{"X", {"x"}}, {"W", {"W1"}}, {"b", {"b1"}}},
      {{"mul_result", {"mul_tmp_0"}},
       {"add_result", {"add_tmp_0"}},
       {"Out", {"hidden0"}}},
      {}));
  net.AddOp(f::OpRegistry::CreateOp(
      "fc", {{"X", {"hidden0"}}, {"W", {"W2"}}, {"b", {"b2"}}},
      {{"mul_result", {"mul_tmp_1"}},
       {"add_result", {"add_tmp_1"}},
       {"Out", {"hidden1"}}},
      {}));
Y
Yu Yang 已提交
257
  net.CompleteAddOp();
258
  auto bwd = Backward(net, {"x"});  // x@GRAD is not need.
Y
Stash  
Yu Yang 已提交
259
  ASSERT_TRUE(bwd->IsNetOp());
Y
Yan Chunwei 已提交
260
  auto bwd_net = static_cast<ops::NetOp *>(bwd.get());
Y
Stash  
Yu Yang 已提交
261

262 263 264 265
  auto output_vars = bwd_net->OutputVars(true);
  std::unordered_set<std::string> all_outputs =
      std::unordered_set<std::string>(output_vars.begin(), output_vars.end());
  all_outputs.erase(f::kEmptyVarName);
Y
Stash  
Yu Yang 已提交
266 267

  for (auto &out : {"W1", "b1", "hidden0", "W2", "b2"}) {
268
    ASSERT_NE(all_outputs.find(f::GradVarName(out)), all_outputs.end());
Y
Stash  
Yu Yang 已提交
269
  }
Y
Yu Yang 已提交
270 271

  // Not Generated X
272
  ASSERT_EQ(all_outputs.find(f::GradVarName("X")), all_outputs.end());
Y
Yu Yang 已提交
273

D
dongzhihong 已提交
274
  ASSERT_EQ(2UL, bwd_net->ops_.size());
Y
Yu Yang 已提交
275
  ASSERT_TRUE(bwd_net->ops_[1]->IsNetOp());
Y
Yan Chunwei 已提交
276
  auto first_fc_grad = static_cast<ops::NetOp *>(bwd_net->ops_[1].get());
D
dongzhihong 已提交
277
  ASSERT_EQ(3UL, first_fc_grad->ops_.size());
Y
Yi Wang 已提交
278
  ASSERT_EQ(f::kEmptyVarName,
279
            first_fc_grad->ops_[2]->Output(f::GradVarName("X")));
Y
Yu Yang 已提交
280 281 282
}

TEST(Backward, net_shared_weight) {
Y
Yan Chunwei 已提交
283
  ops::NetOp net;
284 285 286 287
  net.AddOp(f::OpRegistry::CreateOp("mul", {{"X", {"x"}}, {"Y", {"w"}}},
                                    {{"Out", {"out"}}}, {}));
  net.AddOp(f::OpRegistry::CreateOp("mul", {{"X", {"out"}}, {"Y", {"w"}}},
                                    {{"Out", {"FinalOut"}}}, {}));
Y
Yu Yang 已提交
288 289 290 291
  net.CompleteAddOp();

  auto bwd = f::Backward(net, {});
  ASSERT_TRUE(bwd->IsNetOp());
Y
Yan Chunwei 已提交
292
  auto bwd_net = static_cast<ops::NetOp *>(bwd.get());
Y
Yu Yang 已提交
293
  ASSERT_EQ(3UL, bwd_net->ops_.size());
Y
Yu Yang 已提交
294
  ASSERT_EQ("add", bwd_net->ops_[2]->type_);
Y
Stash  
Yu Yang 已提交
295 296
}

Y
Yu Yang 已提交
297
TEST(Backward, op_register_grad_not_for_network) {
298 299 300 301 302 303
  auto fwd =
      f::OpRegistry::CreateOp("fc", {{"X", {"x"}}, {"W", {"w"}}, {"b", {"b"}}},
                              {{"mul_result", {"mul_out"}},
                               {"add_result", {"add_out"}},
                               {"Out", {"out1"}}},
                              {{"temporary_index", std::vector<int>{0, 1}}});
D
dongzhihong 已提交
304

Y
Yu Yang 已提交
305 306 307
  ASSERT_THROW(f::OpRegistry::CreateGradOp(*fwd), EnforceNotMet);
}

Y
Yu Yang 已提交
308
TEST(Backward, op_all_input_are_not_need) {
309 310 311
  auto fwd = f::OpRegistry::CreateOp(
      "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
  auto backward = f::Backward(*fwd, {"x", "b"});
Y
Yu Yang 已提交
312
  ASSERT_TRUE(backward->IsNetOp());
Y
Yan Chunwei 已提交
313
  auto net = static_cast<ops::NetOp *>(backward.get());
Y
Yu Yang 已提交
314 315 316
  ASSERT_TRUE(net->ops_.empty());
}

Y
Yu Yang 已提交
317
TEST(Backward, op_all_output_are_not_need) {
318 319 320
  auto fwd = f::OpRegistry::CreateOp(
      "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
  auto backward = f::Backward(*fwd, {"out"});
Y
Yu Yang 已提交
321
  ASSERT_TRUE(backward->IsNetOp());
Y
Yan Chunwei 已提交
322
  auto net = static_cast<ops::NetOp *>(backward.get());
Y
Yu Yang 已提交
323 324 325
  ASSERT_TRUE(net->ops_.empty());
}

Y
Yu Yang 已提交
326
TEST(Backward, op_part_of_output_are_not_need) {
327 328
  auto fwd = f::OpRegistry::CreateOp("many_output_op", {{"x", {"X"}}},
                                     {{"y", {"Y"}}, {"z", {"Z"}}}, {});
Y
Yu Yang 已提交
329 330
  auto backward = f::Backward(*fwd, {"Z"});
  ASSERT_TRUE(backward->IsNetOp());
Y
Yan Chunwei 已提交
331
  auto net = static_cast<ops::NetOp *>(backward.get());
Y
Stash  
Yu Yang 已提交
332
  ASSERT_EQ(net->ops_.size(), 2UL);
Y
Yu Yang 已提交
333 334 335

  auto &fill_zero = *net->ops_[0];
  ASSERT_EQ("fill_zeros_like", fill_zero.type_);
336 337 338 339
  ASSERT_EQ(1UL, fill_zero.Inputs("Src").size());
  ASSERT_EQ("Z", fill_zero.Input("Src"));
  ASSERT_EQ(1UL, fill_zero.Outputs("Dst").size());
  ASSERT_EQ(std::string("Z") + f::kZeroVarSuffix, fill_zero.Output("Dst"));
Y
Yu Yang 已提交
340 341 342

  auto &d_many_out = *net->ops_[1];
  ASSERT_EQ("many_output_op_grad", d_many_out.type_);
343
  ASSERT_EQ(1UL + 2UL + 2UL, d_many_out.inputs_.size());  // I/O/OG
344 345 346 347
  ASSERT_EQ(std::string("Z") + f::kZeroVarSuffix,
            d_many_out.Input(f::GradVarName("z")));
  ASSERT_EQ(f::GradVarName("Y"), d_many_out.Input(f::GradVarName("y")));
  ASSERT_EQ(f::GradVarName("X"), d_many_out.Output(f::GradVarName("x")));
Y
Yu Yang 已提交
348 349
}

Y
Yu Yang 已提交
350
TEST(Backward, op_part_of_input_are_not_need) {
351 352
  auto fwd = f::OpRegistry::CreateOp("mul", {{"X", {"a"}}, {"Y", {"b"}}},
                                     {{"Out", {"out"}}}, {});
353
  auto backward = f::Backward(*fwd, {"a"});
D
dongzhihong 已提交
354
  auto &grad_mul = *backward;
355 356 357
  ASSERT_EQ(grad_mul.type_, "mul_grad");
  ASSERT_EQ(grad_mul.inputs_.size(), 2UL + 1UL + 1UL);
  ASSERT_EQ(grad_mul.outputs_.size(), 2UL);
358 359
  ASSERT_EQ(grad_mul.Output(f::GradVarName("X")), f::kEmptyVarName);
  ASSERT_EQ(grad_mul.Output(f::GradVarName("Y")), f::GradVarName("b"));
360
  ASSERT_EQ(grad_mul.Input(f::GradVarName("Out")), f::GradVarName("out"));
361 362
  ASSERT_EQ(grad_mul.Input("X"), "a");
  ASSERT_EQ(grad_mul.Input("Y"), "b");
363 364 365
  ASSERT_EQ(grad_mul.Input("Out"), "out");
}

F
fengjiayi 已提交
366
TEST(Backward, linear_net_intermediate_variable_has_no_grad) {
Y
Yan Chunwei 已提交
367
  ops::NetOp net;
368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385
  net.AddOp(f::OpRegistry::CreateOp(
      "fc", {{"X", {"x1"}}, {"W", {"w1"}}, {"b", {"b1"}}},
      {{"mul_result", {"mul_out1"}},
       {"add_result", {"add_out1"}},
       {"Out", {"out1"}}},
      {}));
  net.AddOp(f::OpRegistry::CreateOp(
      "fc", {{"X", {"out1"}}, {"W", {"w2"}}, {"b", {"b2"}}},
      {{"mul_result", {"mul_out2"}},
       {"add_result", {"tmp_out2"}},
       {"Out", {"out2"}}},
      {}));
  net.AddOp(f::OpRegistry::CreateOp(
      "fc", {{"X", {"out2"}}, {"W", {"w3"}}, {"b", {"b3"}}},
      {{"mul_result", {"mul_out3"}},
       {"add_result", {"tmp_out3"}},
       {"Out", {"out3"}}},
      {}));
386
  net.CompleteAddOp();
387

388
  auto backward = f::Backward(net, {"mul_out2", "tmp_out2", "out2"});
389
  ASSERT_TRUE(backward->IsNetOp());
Y
Yan Chunwei 已提交
390
  auto bwd_net = static_cast<ops::NetOp *>(backward.get());
391
  ASSERT_EQ(bwd_net->ops_.size(), 3UL);
392
  auto &grad_fc = *bwd_net->ops_[0];
Y
Yu Yang 已提交
393 394 395

  const char *all = paddle::operators::NetOp::kAll;
  EXPECT_EQ(grad_fc.inputs_[all].size(),
396
            2UL       /* external input number */
397 398 399
                + 1UL /* external output number*/
                + 1UL /* number of gradient of external output*/
                + 2U /* internal variable number*/);
Y
Yu Yang 已提交
400
  EXPECT_EQ(grad_fc.outputs_[all].size(),
401 402 403 404
            2UL       /* input number of mul*/
                + 2UL /* input number of rowwise_add
                       */
                + 1UL /* input number of sigmod */);
Y
Yu Yang 已提交
405 406 407 408
  EXPECT_EQ(bwd_net->ops_[1]->inputs_[all].size(), 0UL);
  EXPECT_EQ(bwd_net->ops_[1]->outputs_[all].size(), 0UL);
  EXPECT_EQ(bwd_net->ops_[2]->inputs_[all].size(), 0UL);
  EXPECT_EQ(bwd_net->ops_[2]->outputs_[all].size(), 0UL);
D
dongzhihong 已提交
409
}