backward_test.cc 14.2 KB
Newer Older
Y
Yu Yang 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

   http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License. */

Y
Yu Yang 已提交
15
#include "paddle/framework/backward.h"
D
dongzhihong 已提交
16

Y
Yu Yang 已提交
17 18
#include <gtest/gtest.h>
#include "paddle/framework/op_registry.h"
Y
Yan Chunwei 已提交
19
#include "paddle/operators/net_op.h"
Y
Yu Yang 已提交
20

Y
Yu Yang 已提交
21 22 23
namespace paddle {
namespace framework {

D
dongzhihong 已提交
24 25 26 27 28 29 30
using OperatorBase = framework::OperatorBase;
using OpProtoAndCheckerMaker = framework::OpProtoAndCheckerMaker;
using OpProto = framework::OpProto;
using OpAttrChecker = framework::OpAttrChecker;
using Scope = framework::Scope;
using DeviceContext = platform::DeviceContext;

Y
Yu Yang 已提交
31
class RowWiseAddOpMaker : public OpProtoAndCheckerMaker {
Y
Yu Yang 已提交
32
 public:
Y
Yu Yang 已提交
33
  RowWiseAddOpMaker(OpProto *proto, OpAttrChecker *op_checker)
Y
Yu Yang 已提交
34
      : OpProtoAndCheckerMaker(proto, op_checker) {
Y
Yu Yang 已提交
35 36 37
    AddInput("X", "Input X of Add").AsNoGradient();
    AddInput("b", "Bias of Add").AsNoGradient();
    AddOutput("Out", "Out of Add").AsNoGradient();
Y
Yu Yang 已提交
38 39 40 41
    AddComment("Add Op");
  }
};

Y
Yu Yang 已提交
42 43 44 45
class MulOpMaker : public OpProtoAndCheckerMaker {
 public:
  MulOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
Y
Yu Yang 已提交
46 47
    AddInput("X", "A");
    AddInput("Y", "B");
Y
Yu Yang 已提交
48 49 50 51 52 53 54 55 56 57
    AddOutput("Out", "Out");
    AddComment("Mul");
  }
};

class SigmoidOpMaker : public OpProtoAndCheckerMaker {
 public:
  SigmoidOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "X");
Y
Yu Yang 已提交
58
    AddOutput("Out", "Y");
Y
Yu Yang 已提交
59 60 61 62
    AddComment("Sigmoid");
  }
};

D
dongzhihong 已提交
63 64 65 66 67
class NoGradOpMaker : public OpProtoAndCheckerMaker {
 public:
  NoGradOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "X input");
Y
Yu Yang 已提交
68
    AddOutput("Out", "Y output");
D
dongzhihong 已提交
69 70 71 72
    AddComment("NoGradOp, same input output. no Grad");
  }
};

D
dongzhihong 已提交
73
class FcOp : public operators::NetOp {
Y
Yu Yang 已提交
74
 public:
Y
Yu Yang 已提交
75 76 77
  FcOp(const std::string &type, const VarNameMap &inputs,
       const VarNameMap &outputs, const AttributeMap &attrs)
      : NetOp(type, inputs, outputs, attrs) {
Y
Yu Yang 已提交
78 79 80
    AddOp(OpRegistry::CreateOp("mul",
                               {{"X", {Input("X")}}, {"Y", {Input("W")}}},
                               {{"Out", {Output("mul_result")}}}, {}));
81
    auto input_b = Inputs("b");
Y
Yu Yang 已提交
82
    std::string before_act = "mul_result";
83
    if (input_b.size() != 0) {
Y
Yu Yang 已提交
84
      AddOp(OpRegistry::CreateOp(
85
          "rowwise_add", {{"X", {Output("mul_result")}}, {"b", {input_b[0]}}},
Y
Yu Yang 已提交
86
          {{"Out", {Output("add_result")}}}, {}));
Y
Yu Yang 已提交
87 88 89
      before_act = "add_result";
    } else {
      auto out_varname = Output("add_result");
90 91
      if (out_varname != kEmptyVarName) {
        this->Rename(out_varname, kEmptyVarName);
Y
Yu Yang 已提交
92
      }
Y
Yu Yang 已提交
93
    }
Y
Yu Yang 已提交
94

Y
Yu Yang 已提交
95 96
    AddOp(OpRegistry::CreateOp("sigmoid", {{"X", {Output(before_act)}}},
                               {{"Out", {Output("Out")}}}, {}));
Y
Yu Yang 已提交
97 98 99 100 101 102 103 104 105 106 107
    CompleteAddOp(false);
  }
};

class FcOpMaker : public OpProtoAndCheckerMaker {
 public:
  FcOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("X", "x");
    AddInput("W", "w");
    AddInput("b", "b");
Y
Yu Yang 已提交
108 109
    AddOutput("mul_result", "").AsIntermediate();
    AddOutput("add_result", "").AsIntermediate();
Y
Yu Yang 已提交
110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134
    AddOutput("Out", "");
    AddComment("");
  }
};

class ManyOutputOpMaker : public OpProtoAndCheckerMaker {
 public:
  ManyOutputOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("x", "x");
    AddOutput("y", "y");
    AddOutput("z", "z");
    AddComment("");
  }
};

class FillZeroOpMaker : public OpProtoAndCheckerMaker {
 public:
  FillZeroOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
    AddInput("x", "x");
    AddOutput("out", "out");
    AddComment("");
  }
};
Y
Yu Yang 已提交
135 136 137 138 139

class AddOpMaker : public OpProtoAndCheckerMaker {
 public:
  AddOpMaker(OpProto *proto, OpAttrChecker *op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {
Y
Yu Yang 已提交
140
    AddInput("X", "x").AsDuplicable();
Y
Yu Yang 已提交
141 142 143 144
    AddOutput("Y", "y");
    AddComment("");
  }
};
Y
Yu Yang 已提交
145 146 147 148
}  // namespace framework
}  // namespace paddle

namespace f = paddle::framework;
D
dongzhihong 已提交
149
namespace ops = paddle::operators;
Y
Yu Yang 已提交
150
using EnforceNotMet = paddle::platform::EnforceNotMet;
F
fengjiayi 已提交
151 152 153 154 155 156 157
REGISTER_OP(rowwise_add, f::NOP, f::RowWiseAddOpMaker, rowwise_add_grad,
            f::NOP);
REGISTER_OP(mul, f::NOP, f::MulOpMaker, mul_grad, f::NOP);
REGISTER_OP(sigmoid, f::NOP, f::SigmoidOpMaker, sigmoid_grad, f::NOP);
REGISTER_OP_WITHOUT_GRADIENT(nograd, f::NOP, f::NoGradOpMaker);
REGISTER_OP_WITHOUT_GRADIENT(fill_zeros_like, f::NOP, f::FillZeroOpMaker);
REGISTER_OP(add, f::NOP, f::AddOpMaker, add_grad, f::NOP);
F
fengjiayi 已提交
158
REGISTER_OP_WITHOUT_GRADIENT(fc, f::FcOp, f::FcOpMaker);
F
fengjiayi 已提交
159 160
REGISTER_OP(many_output_op, f::NOP, f::ManyOutputOpMaker, many_output_op_grad,
            f::NOP);
Y
Yu Yang 已提交
161

Y
Yu Yang 已提交
162
TEST(Backward, simple_op_grad) {
163 164
  auto fwd = f::OpRegistry::CreateOp(
      "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
Y
Yu Yang 已提交
165
  ASSERT_NE(fwd, nullptr);
Y
Yu Yang 已提交
166
  auto gop = f::OpRegistry::CreateGradOp(*fwd);
167
  ASSERT_EQ(1UL, gop->inputs_.size());
Y
Yu Yang 已提交
168
  ASSERT_EQ("rowwise_add_grad", gop->type_);
169 170
  ASSERT_EQ(f::GradVarName("x"), gop->Output(f::GradVarName("X")));
  ASSERT_EQ(f::GradVarName("b"), gop->Output(f::GradVarName("b")));
Y
Yu Yang 已提交
171 172
}

D
dongzhihong 已提交
173
TEST(Backward, simple_op_not_need_grad) {
174 175
  auto fwd = f::OpRegistry::CreateOp(
      "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
D
dongzhihong 已提交
176
  ASSERT_NE(fwd, nullptr);
177 178
  auto gop = f::Backward(*fwd, {"x"});
  ASSERT_EQ(gop->Output(f::GradVarName("X")), f::kEmptyVarName);
D
dongzhihong 已提交
179

180
  auto no_input_gop = f::Backward(*fwd, {"x", "b"});
D
dongzhihong 已提交
181
  ASSERT_NE(no_input_gop, nullptr);
Y
Yu Yang 已提交
182
  ASSERT_TRUE(no_input_gop->IsNetOp());
Y
Yan Chunwei 已提交
183 184
  ASSERT_EQ(0UL,
            std::static_pointer_cast<ops::NetOp>(no_input_gop)->ops_.size());
D
dongzhihong 已提交
185 186
}

Y
Yu Yang 已提交
187
TEST(Backward, net_fc_backward_normal) {
188 189 190 191 192 193
  std::shared_ptr<f::OperatorBase> fwd =
      f::OpRegistry::CreateOp("fc", {{"X", {"x"}}, {"W", {"w"}}, {"b", {"b"}}},
                              {{"mul_result", {"mul_res"}},
                               {"add_result", {"add_re"}},
                               {"Out", {"out"}}},
                              {});
Y
Stash  
Yu Yang 已提交
194 195 196
  ASSERT_NE(fwd, nullptr);
  std::shared_ptr<f::OperatorBase> gop = f::Backward(*fwd, {});
  ASSERT_TRUE(gop->IsNetOp());
Y
Yan Chunwei 已提交
197
  auto net = static_cast<ops::NetOp *>(gop.get());
Y
Stash  
Yu Yang 已提交
198 199 200 201 202 203 204 205 206 207 208 209 210 211 212

  ASSERT_NO_THROW(net->DebugString());

  ASSERT_EQ(3UL, net->ops_.size());

  f::OperatorBase &d_sigmoid = *net->ops_[0];
  ASSERT_EQ("sigmoid_grad", d_sigmoid.type_);

  f::OperatorBase &d_add = *net->ops_[1];
  ASSERT_EQ("rowwise_add_grad", d_add.type_);

  f::OperatorBase &d_mul = *net->ops_[2];
  ASSERT_EQ("mul_grad", d_mul.type_);
}

Y
Yu Yang 已提交
213
TEST(Backward, net_fc_backward_not_have_b) {
Y
Yi Wang 已提交
214
  std::shared_ptr<f::OperatorBase> fwd =
215 216 217 218 219
      f::OpRegistry::CreateOp("fc", {{"X", {"x"}}, {"W", {"w"}}, {"b", {}}},
                              {{"mul_result", {"mul_res"}},
                               {"add_result", {"add_res"}},
                               {"Out", {"tmp"}}},
                              {});
Y
Stash  
Yu Yang 已提交
220 221 222
  ASSERT_NE(fwd, nullptr);
  std::shared_ptr<f::OperatorBase> gop = f::Backward(*fwd, {});
  ASSERT_TRUE(gop->IsNetOp());
Y
Yan Chunwei 已提交
223
  auto net = static_cast<ops::NetOp *>(gop.get());
Y
Stash  
Yu Yang 已提交
224 225 226 227 228 229 230 231 232 233 234 235

  ASSERT_NO_THROW(net->DebugString());

  ASSERT_EQ(2UL, net->ops_.size());

  f::OperatorBase &d_sigmoid = *net->ops_[0];
  ASSERT_EQ("sigmoid_grad", d_sigmoid.type_);

  f::OperatorBase &d_mul = *net->ops_[1];
  ASSERT_EQ("mul_grad", d_mul.type_);
}

Y
Yu Yang 已提交
236
TEST(Backward, net_input_of_network_not_need_grad) {
Y
Yan Chunwei 已提交
237
  ops::NetOp net;
238 239 240 241 242 243 244 245 246 247 248 249
  net.AddOp(f::OpRegistry::CreateOp(
      "fc", {{"X", {"x"}}, {"W", {"W1"}}, {"b", {"b1"}}},
      {{"mul_result", {"mul_tmp_0"}},
       {"add_result", {"add_tmp_0"}},
       {"Out", {"hidden0"}}},
      {}));
  net.AddOp(f::OpRegistry::CreateOp(
      "fc", {{"X", {"hidden0"}}, {"W", {"W2"}}, {"b", {"b2"}}},
      {{"mul_result", {"mul_tmp_1"}},
       {"add_result", {"add_tmp_1"}},
       {"Out", {"hidden1"}}},
      {}));
Y
Yu Yang 已提交
250
  net.CompleteAddOp();
251
  auto bwd = Backward(net, {"x"});  // x@GRAD is not need.
Y
Stash  
Yu Yang 已提交
252
  ASSERT_TRUE(bwd->IsNetOp());
Y
Yan Chunwei 已提交
253
  auto bwd_net = static_cast<ops::NetOp *>(bwd.get());
Y
Stash  
Yu Yang 已提交
254

255 256 257 258
  auto output_vars = bwd_net->OutputVars(true);
  std::unordered_set<std::string> all_outputs =
      std::unordered_set<std::string>(output_vars.begin(), output_vars.end());
  all_outputs.erase(f::kEmptyVarName);
Y
Stash  
Yu Yang 已提交
259 260

  for (auto &out : {"W1", "b1", "hidden0", "W2", "b2"}) {
261
    ASSERT_NE(all_outputs.find(f::GradVarName(out)), all_outputs.end());
Y
Stash  
Yu Yang 已提交
262
  }
Y
Yu Yang 已提交
263 264

  // Not Generated X
265
  ASSERT_EQ(all_outputs.find(f::GradVarName("X")), all_outputs.end());
Y
Yu Yang 已提交
266

D
dongzhihong 已提交
267
  ASSERT_EQ(2UL, bwd_net->ops_.size());
Y
Yu Yang 已提交
268
  ASSERT_TRUE(bwd_net->ops_[1]->IsNetOp());
Y
Yan Chunwei 已提交
269
  auto first_fc_grad = static_cast<ops::NetOp *>(bwd_net->ops_[1].get());
D
dongzhihong 已提交
270
  ASSERT_EQ(3UL, first_fc_grad->ops_.size());
Y
Yi Wang 已提交
271
  ASSERT_EQ(f::kEmptyVarName,
272
            first_fc_grad->ops_[2]->Output(f::GradVarName("X")));
Y
Yu Yang 已提交
273 274 275
}

TEST(Backward, net_shared_weight) {
Y
Yan Chunwei 已提交
276
  ops::NetOp net;
277 278 279 280
  net.AddOp(f::OpRegistry::CreateOp("mul", {{"X", {"x"}}, {"Y", {"w"}}},
                                    {{"Out", {"out"}}}, {}));
  net.AddOp(f::OpRegistry::CreateOp("mul", {{"X", {"out"}}, {"Y", {"w"}}},
                                    {{"Out", {"FinalOut"}}}, {}));
Y
Yu Yang 已提交
281 282 283 284
  net.CompleteAddOp();

  auto bwd = f::Backward(net, {});
  ASSERT_TRUE(bwd->IsNetOp());
Y
Yan Chunwei 已提交
285
  auto bwd_net = static_cast<ops::NetOp *>(bwd.get());
Y
Yu Yang 已提交
286
  ASSERT_EQ(3UL, bwd_net->ops_.size());
Y
Yu Yang 已提交
287
  ASSERT_EQ("add", bwd_net->ops_[2]->type_);
Y
Stash  
Yu Yang 已提交
288 289
}

Y
Yu Yang 已提交
290
TEST(Backward, op_register_grad_not_for_network) {
291 292 293 294 295 296
  auto fwd =
      f::OpRegistry::CreateOp("fc", {{"X", {"x"}}, {"W", {"w"}}, {"b", {"b"}}},
                              {{"mul_result", {"mul_out"}},
                               {"add_result", {"add_out"}},
                               {"Out", {"out1"}}},
                              {{"temporary_index", std::vector<int>{0, 1}}});
D
dongzhihong 已提交
297

Y
Yu Yang 已提交
298 299 300
  ASSERT_THROW(f::OpRegistry::CreateGradOp(*fwd), EnforceNotMet);
}

Y
Yu Yang 已提交
301
TEST(Backward, op_all_input_are_not_need) {
302 303 304
  auto fwd = f::OpRegistry::CreateOp(
      "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
  auto backward = f::Backward(*fwd, {"x", "b"});
Y
Yu Yang 已提交
305
  ASSERT_TRUE(backward->IsNetOp());
Y
Yan Chunwei 已提交
306
  auto net = static_cast<ops::NetOp *>(backward.get());
Y
Yu Yang 已提交
307 308 309
  ASSERT_TRUE(net->ops_.empty());
}

Y
Yu Yang 已提交
310
TEST(Backward, op_all_output_are_not_need) {
311 312 313
  auto fwd = f::OpRegistry::CreateOp(
      "rowwise_add", {{"X", {"x"}}, {"b", {"b"}}}, {{"Out", {"out"}}}, {});
  auto backward = f::Backward(*fwd, {"out"});
Y
Yu Yang 已提交
314
  ASSERT_TRUE(backward->IsNetOp());
Y
Yan Chunwei 已提交
315
  auto net = static_cast<ops::NetOp *>(backward.get());
Y
Yu Yang 已提交
316 317 318
  ASSERT_TRUE(net->ops_.empty());
}

Y
Yu Yang 已提交
319
TEST(Backward, op_part_of_output_are_not_need) {
320 321
  auto fwd = f::OpRegistry::CreateOp("many_output_op", {{"x", {"X"}}},
                                     {{"y", {"Y"}}, {"z", {"Z"}}}, {});
Y
Yu Yang 已提交
322 323
  auto backward = f::Backward(*fwd, {"Z"});
  ASSERT_TRUE(backward->IsNetOp());
Y
Yan Chunwei 已提交
324
  auto net = static_cast<ops::NetOp *>(backward.get());
Y
Stash  
Yu Yang 已提交
325
  ASSERT_EQ(net->ops_.size(), 2UL);
Y
Yu Yang 已提交
326 327 328

  auto &fill_zero = *net->ops_[0];
  ASSERT_EQ("fill_zeros_like", fill_zero.type_);
329 330 331 332
  ASSERT_EQ(1UL, fill_zero.Inputs("Src").size());
  ASSERT_EQ("Z", fill_zero.Input("Src"));
  ASSERT_EQ(1UL, fill_zero.Outputs("Dst").size());
  ASSERT_EQ(std::string("Z") + f::kZeroVarSuffix, fill_zero.Output("Dst"));
Y
Yu Yang 已提交
333 334 335

  auto &d_many_out = *net->ops_[1];
  ASSERT_EQ("many_output_op_grad", d_many_out.type_);
336
  ASSERT_EQ(1UL + 2UL + 2UL, d_many_out.inputs_.size());  // I/O/OG
337 338 339 340
  ASSERT_EQ(std::string("Z") + f::kZeroVarSuffix,
            d_many_out.Input(f::GradVarName("z")));
  ASSERT_EQ(f::GradVarName("Y"), d_many_out.Input(f::GradVarName("y")));
  ASSERT_EQ(f::GradVarName("X"), d_many_out.Output(f::GradVarName("x")));
Y
Yu Yang 已提交
341 342
}

Y
Yu Yang 已提交
343
TEST(Backward, op_part_of_input_are_not_need) {
344 345
  auto fwd = f::OpRegistry::CreateOp("mul", {{"X", {"a"}}, {"Y", {"b"}}},
                                     {{"Out", {"out"}}}, {});
346
  auto backward = f::Backward(*fwd, {"a"});
D
dongzhihong 已提交
347
  auto &grad_mul = *backward;
348 349 350
  ASSERT_EQ(grad_mul.type_, "mul_grad");
  ASSERT_EQ(grad_mul.inputs_.size(), 2UL + 1UL + 1UL);
  ASSERT_EQ(grad_mul.outputs_.size(), 2UL);
351 352
  ASSERT_EQ(grad_mul.Output(f::GradVarName("X")), f::kEmptyVarName);
  ASSERT_EQ(grad_mul.Output(f::GradVarName("Y")), f::GradVarName("b"));
353
  ASSERT_EQ(grad_mul.Input(f::GradVarName("Out")), f::GradVarName("out"));
354 355
  ASSERT_EQ(grad_mul.Input("X"), "a");
  ASSERT_EQ(grad_mul.Input("Y"), "b");
356 357 358
  ASSERT_EQ(grad_mul.Input("Out"), "out");
}

F
fengjiayi 已提交
359
TEST(Backward, linear_net_intermediate_variable_has_no_grad) {
Y
Yan Chunwei 已提交
360
  ops::NetOp net;
361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378
  net.AddOp(f::OpRegistry::CreateOp(
      "fc", {{"X", {"x1"}}, {"W", {"w1"}}, {"b", {"b1"}}},
      {{"mul_result", {"mul_out1"}},
       {"add_result", {"add_out1"}},
       {"Out", {"out1"}}},
      {}));
  net.AddOp(f::OpRegistry::CreateOp(
      "fc", {{"X", {"out1"}}, {"W", {"w2"}}, {"b", {"b2"}}},
      {{"mul_result", {"mul_out2"}},
       {"add_result", {"tmp_out2"}},
       {"Out", {"out2"}}},
      {}));
  net.AddOp(f::OpRegistry::CreateOp(
      "fc", {{"X", {"out2"}}, {"W", {"w3"}}, {"b", {"b3"}}},
      {{"mul_result", {"mul_out3"}},
       {"add_result", {"tmp_out3"}},
       {"Out", {"out3"}}},
      {}));
379
  net.CompleteAddOp();
380

381
  auto backward = f::Backward(net, {"mul_out2", "tmp_out2", "out2"});
382
  ASSERT_TRUE(backward->IsNetOp());
Y
Yan Chunwei 已提交
383
  auto bwd_net = static_cast<ops::NetOp *>(backward.get());
384
  ASSERT_EQ(bwd_net->ops_.size(), 3UL);
385
  auto &grad_fc = *bwd_net->ops_[0];
Y
Yu Yang 已提交
386 387 388

  const char *all = paddle::operators::NetOp::kAll;
  EXPECT_EQ(grad_fc.inputs_[all].size(),
389
            2UL       /* external input number */
390 391 392
                + 1UL /* external output number*/
                + 1UL /* number of gradient of external output*/
                + 2U /* internal variable number*/);
Y
Yu Yang 已提交
393
  EXPECT_EQ(grad_fc.outputs_[all].size(),
394 395 396 397
            2UL       /* input number of mul*/
                + 2UL /* input number of rowwise_add
                       */
                + 1UL /* input number of sigmod */);
Y
Yu Yang 已提交
398 399 400 401
  EXPECT_EQ(bwd_net->ops_[1]->inputs_[all].size(), 0UL);
  EXPECT_EQ(bwd_net->ops_[1]->outputs_[all].size(), 0UL);
  EXPECT_EQ(bwd_net->ops_[2]->inputs_[all].size(), 0UL);
  EXPECT_EQ(bwd_net->ops_[2]->outputs_[all].size(), 0UL);
D
dongzhihong 已提交
402
}