backward.cc 15.9 KB
Newer Older
Y
Yu Yang 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

   http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License. */

15
#include "paddle/framework/backward.h"
Y
Yu Yang 已提交
16
#include "paddle/operators/net_op.h"
D
dongzhihong 已提交
17

F
fengjiayi 已提交
18
#include <deque>
D
dongzhihong 已提交
19
#include <list>
Y
Yu Yang 已提交
20 21
#include <memory>

F
fengjiayi 已提交
22
#include "paddle/framework/block_desc.h"
23
#include "paddle/framework/op_registry.h"
Y
Yan Chunwei 已提交
24
#include "paddle/operators/net_op.h"
Y
Yan Chunwei 已提交
25
#include "paddle/operators/recurrent_op.h"
Y
Yu Yang 已提交
26 27 28 29

namespace paddle {
namespace framework {

Y
Yu Yang 已提交
30 31 32 33 34 35 36 37
static inline std::unique_ptr<OperatorBase> CreateGradOp(
    const OperatorBase& op) {
  OpDescBind op_desc;
  op_desc.SetInputMap(op.Inputs());
  op_desc.SetOutputMap(op.Outputs());
  op_desc.SetType(op.Type());
  op_desc.SetAttrMap(op.Attrs());
  auto& info = OpInfoMap::Instance().Get(op.Type());
Y
Yu Yang 已提交
38
  auto grad_descs = info.GradOpMaker()(op_desc);
Y
Yu Yang 已提交
39 40
  std::vector<std::unique_ptr<OperatorBase>> grad_ops;
  grad_ops.reserve(grad_descs.size());
Y
Yu Yang 已提交
41 42 43
  std::transform(grad_descs.begin(), grad_descs.end(),
                 std::back_inserter(grad_ops),
                 [](const std::unique_ptr<OpDescBind>& grad_desc) {
Y
Yu Yang 已提交
44
                   return OpRegistry::CreateOp(*grad_desc);
Y
Yu Yang 已提交
45
                 });
Y
Yu Yang 已提交
46
  PADDLE_ENFORCE(!grad_ops.empty());
Y
Yu Yang 已提交
47 48 49 50 51 52 53
  if (grad_ops.size() == 1) {
    return std::move(grad_ops[0]);
  } else {
    auto net_op = new operators::NetOp();
    for (auto& grad_op : grad_ops) {
      net_op->AppendOp(std::move(grad_op));
    }
Y
Yu Yang 已提交
54
    net_op->CompleteAddOp();
Y
Yu Yang 已提交
55 56 57 58
    return std::unique_ptr<OperatorBase>(net_op);
  }
}

Y
Yu Yang 已提交
59
template <typename Map, typename T>
Q
qiaolongfei 已提交
60
static void ForEachVarName(const Map& names, T callback) {
Y
Yu Yang 已提交
61
  for (auto& name : names) {
Y
Yu Yang 已提交
62
    for (auto& n : name.second) {
63
      if (callback(n)) return;
Y
Yu Yang 已提交
64 65
    }
  }
Y
Yu Yang 已提交
66 67
}

Y
Yan Chunwei 已提交
68
// return whether all the names + suffixes in the set
Y
Yu Yang 已提交
69
static bool AllInSet(
Y
Yu Yang 已提交
70
    const std::map<std::string, std::vector<std::string>>& names,
Y
Yu Yang 已提交
71
    const std::string& suffix, const std::unordered_set<std::string>& set) {
72 73 74 75
  bool all_in_set = true;
  ForEachVarName(names, [&all_in_set, &set, &suffix](const std::string& n) {
    all_in_set = set.find(n + suffix) != set.end();
    return !all_in_set;
Y
Yu Yang 已提交
76
  });
77
  return all_in_set;
Y
Yu Yang 已提交
78 79
}

Y
Yu Yang 已提交
80 81
static std::unique_ptr<OperatorBase> NOP() {
  auto net_op = new operators::NetOp();
Q
qiaolongfei 已提交
82
  net_op->SetType("@NOP@");
Y
Yu Yang 已提交
83
  net_op->CompleteAddOp();
Y
Yu Yang 已提交
84
  return std::unique_ptr<OperatorBase>(net_op);
Y
Yu Yang 已提交
85 86
}

Y
Yan Chunwei 已提交
87
//  Get backward operator from a forward operator, a recursive implementation.
Y
Yu Yang 已提交
88 89 90
//
//  no_grad_names the gradient variable names without gradient calculating.
//
91 92 93
//  uniq_id is a unique index used inside recursively calling
//  BackwardRecursive. use `uid = uniq_id++;` to get the unique index, and
//  pass `uniq_id` through recursive calling.
Y
Yu Yang 已提交
94
//
Y
Yan Chunwei 已提交
95 96
//  returns The backward operator. In a simple situation, it may be a simple
//  operator, in a complex situation, it maybe a NetOp.
Y
Yu Yang 已提交
97 98
//
//  See Backward.h for details
Y
Yu Yang 已提交
99
static std::unique_ptr<OperatorBase> BackwardRecursive(
Y
Yu Yang 已提交
100 101
    const OperatorBase& forwardOp,
    std::unordered_set<std::string>& no_grad_names, size_t& uniq_id) {
Y
Yu Yang 已提交
102 103
  //  If all input gradients of forwarding operator do not need to calculate,
  //  just return an NOP. Not return null ptr because NOP does not take
Q
typo  
qiaolongfei 已提交
104
  //  too much time for calculation, but it is useful for simplifying logic.
105
  if (AllInSet(forwardOp.Inputs() /*names*/, kGradVarSuffix /*suffix*/,
Y
Yan Chunwei 已提交
106
               no_grad_names /*set*/)) {
Y
Yu Yang 已提交
107
    return NOP();
Y
Yu Yang 已提交
108 109
  }

110 111
  //  All output gradients of forwarding operator do not need to calculate.
  //  Then all input gradients cannot be computed at all, and we put them into
Y
Yu Yang 已提交
112
  //  `no_grad_names` set. Return an NOP.
Q
qiaolongfei 已提交
113
  if (AllInSet(forwardOp.Outputs() /*names*/, kGradVarSuffix /*suffix*/,
Y
Yan Chunwei 已提交
114
               no_grad_names /*set*/)) {
Q
qiaolongfei 已提交
115
    ForEachVarName(forwardOp.Inputs(),
Y
Yu Yang 已提交
116 117 118 119
                   [&no_grad_names](const std::string& name) -> bool {
                     no_grad_names.insert(GradVarName(name));
                     return false;
                   });
Y
Yu Yang 已提交
120
    return NOP();
Y
Yu Yang 已提交
121 122
  }

Y
Yu Yang 已提交
123
  // Returned gradient network
Y
Yu Yang 已提交
124
  auto net = std::unique_ptr<operators::NetOp>(new operators::NetOp());
Y
Yu Yang 已提交
125 126

  if (forwardOp.IsNetOp()) {
Y
Yu Yang 已提交
127
    // Because forwardOp is a net op, it can static_cast.
Y
Yan Chunwei 已提交
128
    auto& forwardNet = static_cast<const operators::NetOp&>(forwardOp);
Y
Yu Yang 已提交
129

130
    // Map from output gradient variable name to operator's indices in
Y
Yan Chunwei 已提交
131
    // backward net's ops_. That operator generates that variable.
Y
Yu Yang 已提交
132 133 134
    std::unordered_map<std::string, std::vector<size_t>> dup_output_ops;

    size_t local_op_id = 0;
Y
Yan Chunwei 已提交
135
    // reversely travel forwardNet and collect all duplicate outputs.
Y
Yu Yang 已提交
136
    for (auto it = forwardNet.ops_.rbegin(); it != forwardNet.ops_.rend();
Y
Yu Yang 已提交
137
         ++it, ++local_op_id) {
Y
Yu Yang 已提交
138
      auto& fwd = *it;
Y
Yu Yang 已提交
139
      auto bwd = BackwardRecursive(*fwd, no_grad_names, uniq_id);
Q
qiaolongfei 已提交
140
      ForEachVarName(bwd->Outputs(),
Y
Yu Yang 已提交
141 142 143 144
                     [&dup_output_ops, local_op_id](const std::string& out) {
                       dup_output_ops[out].emplace_back(local_op_id);
                       return false;
                     });
Y
Yu Yang 已提交
145
      net->AppendOp(std::move(bwd));
D
dongzhihong 已提交
146
    }
Y
Yu Yang 已提交
147
    // Get unique ID for this method.
D
dongzhihong 已提交
148
    auto uid = uniq_id++;
D
dongzhihong 已提交
149
    // TODO(dzh): more comment
Y
Yan Chunwei 已提交
150 151 152 153 154
    // multiple operators which have the same output (y for example) may
    // overwrite the same y variable when backward, special operations are token
    // to handle this case. For each duplicate output, rename it to an alias
    // (original name with a offset), append an `add` op for its operator,
    // and finally sum all the alias variable to the final output variable y.
Y
Yu Yang 已提交
155
    using Pos = std::pair<size_t, std::unique_ptr<OperatorBase>>;
Y
Yu Yang 已提交
156
    std::list<Pos> insert_position;
D
dongzhihong 已提交
157
    for (auto& dup_output_op : dup_output_ops) {
D
dongzhihong 已提交
158
      const std::string& name = dup_output_op.first;
Q
qijun 已提交
159 160 161
      // duplicate @Empty@ don't need to be added
      if (name == kEmptyVarName) continue;

D
dongzhihong 已提交
162
      auto& dup_op = dup_output_op.second;
Y
Yan Chunwei 已提交
163
      // no duplicate output
D
dongzhihong 已提交
164 165
      if (dup_op.size() == 1) continue;

Y
Yan Chunwei 已提交
166 167
      // process the duplicate outputs
      std::vector<std::string> dup_outputs;
D
dongzhihong 已提交
168
      for (size_t i = 0; i < dup_op.size(); ++i) {
Y
Yan Chunwei 已提交
169
        // rename each duplicate output to an alias
D
dongzhihong 已提交
170
        auto op_offset = dup_op[i];
D
dongzhihong 已提交
171 172 173
        dup_outputs.push_back(name + "@RENAME@" + std::to_string(uid) + "@" +
                              std::to_string(i));
        net->ops_[op_offset]->Rename(name, dup_outputs.back());
D
dongzhihong 已提交
174
      }
Y
Yan Chunwei 已提交
175
      // collect all the offset to append `add` op for each alias
D
dzhwinter 已提交
176 177 178
      //
      // one variable is shared between multiple operators.
      // insert add operator one by one, then add it to output
D
dongzhihong 已提交
179 180 181
      for (size_t output_idx = 0; output_idx < dup_outputs.size() - 1;
           ++output_idx) {
        auto insert_add_x = dup_outputs[output_idx];
F
Bug fix  
fengjiayi 已提交
182
        auto insert_add_y = dup_outputs[output_idx + 1];
D
dongzhihong 已提交
183 184 185 186 187 188 189 190
        auto insert_add_out = name + "@SHARED@" + std::to_string(output_idx);
        // first add op inserted
        if (output_idx == dup_outputs.size() - 2) {
          insert_add_out = name;
        }
        if (output_idx != 0) {
          insert_add_y = name + "@SHARED@" + std::to_string(output_idx - 1);
        }
D
dzhwinter 已提交
191 192
        insert_position.push_back(
            {dup_op.back(),
F
Fix bug  
fengjiayi 已提交
193 194
             OpRegistry::CreateOp("sum", {{"X", {insert_add_x, insert_add_y}}},
                                  {{"Out", {insert_add_out}}}, {})});
D
dzhwinter 已提交
195
      }
D
dongzhihong 已提交
196
    }
Y
Yu Yang 已提交
197

Y
Yan Chunwei 已提交
198
    // make sure the inserted `add` ops follow the BFS order.
Y
Yu Yang 已提交
199
    insert_position.sort(
D
dongzhihong 已提交
200
        [](const Pos& l, const Pos& r) { return l.first > r.first; });
Y
Yu Yang 已提交
201 202

    for (auto& pos : insert_position) {
Y
Yu Yang 已提交
203
      net->InsertOp(pos.first + 1, std::move(pos.second));
D
dongzhihong 已提交
204
    }
Y
Yu Yang 已提交
205
  } else {
Y
Yu Yang 已提交
206
    std::unique_ptr<OperatorBase> grad_op(CreateGradOp(forwardOp));
Y
Yu Yang 已提交
207

Y
Yu Yang 已提交
208 209
    ForEachVarName(grad_op->Inputs(), [&no_grad_names, &net, &grad_op](
                                          const std::string& grad_input) {
210
      if (no_grad_names.count(grad_input)) {
Y
Yu Yang 已提交
211
        // +1 for \0
212
        std::string prefix = grad_input.substr(
Y
Yu Yang 已提交
213
            0, grad_input.size() - sizeof(kGradVarSuffix) / sizeof(char) + 1);
Q
qiaolongfei 已提交
214
        grad_op->Rename(grad_input, prefix + kZeroVarSuffix);
Y
Yu Yang 已提交
215 216 217

        // If part of input gradient of that operator is not calculated, fill
        // zero variables to that input gradient.
D
dangqingqing 已提交
218 219
        net->AppendOp(OpRegistry::CreateOp("fill_zeros_like", {{"X", {prefix}}},
                                           {{"Y", {grad_input}}}, {}));
220
      }
Y
Yu Yang 已提交
221 222 223
      return false;
    });

Q
qiaolongfei 已提交
224 225
    ForEachVarName(grad_op->Outputs(),
                   [&no_grad_names, &grad_op](const std::string& grad_output) {
Y
Yu Yang 已提交
226
                     if (no_grad_names.count(grad_output)) {
Q
qiaolongfei 已提交
227
                       grad_op->Rename(grad_output, kEmptyVarName);
Y
Yu Yang 已提交
228 229 230
                     }
                     return false;
                   });
Y
Yu Yang 已提交
231

Y
Yan Chunwei 已提交
232
    // process recurrent gradient op as a special operator.
233
    if (forwardOp.Type() == "recurrent") {
F
Fix bug  
fengjiayi 已提交
234 235
      // NOTE clean up cycle call somewhere (RNN's stepnet constains itself),
      // or
Y
Yan Chunwei 已提交
236 237 238 239 240 241 242 243 244
      // this will result in infinite loop.
      const auto& rnnop =
          *static_cast<const operators::RecurrentOp*>(&forwardOp);
      auto rnn_grad_op =
          static_cast<operators::RecurrentGradientOp*>(grad_op.get());
      const auto& stepnet_op =
          *static_cast<const OperatorBase*>(&rnnop.stepnet());
      // create stepnet's gradient op
      rnn_grad_op->set_stepnet(
Y
Yu Yang 已提交
245
          BackwardRecursive(stepnet_op, no_grad_names, uniq_id));
Y
Yan Chunwei 已提交
246 247
    }

Y
Yu Yang 已提交
248 249 250
    if (net->ops_.empty()) {  // Current no aux op is added to network
      return grad_op;
    }
Y
Yu Yang 已提交
251
    net->AppendOp(std::move(grad_op));
Y
Yu Yang 已提交
252
  }
Q
qiaolongfei 已提交
253
  net->SetType("@GENERATED_BACKWARD@");
Y
Yu Yang 已提交
254
  net->CompleteAddOp();
Y
Yu Yang 已提交
255 256 257
  return std::unique_ptr<OperatorBase>(
      static_cast<OperatorBase*>(net.release()));
}
Y
Yu Yang 已提交
258

Y
Yu Yang 已提交
259
// See header for comments
Y
Yu Yang 已提交
260
std::unique_ptr<OperatorBase> Backward(
Y
Yu Yang 已提交
261
    const OperatorBase& forwardOp,
Y
Yu Yang 已提交
262 263
    const std::unordered_set<std::string>& no_grad_vars) {
  std::unordered_set<std::string> no_grad_names;
Q
qijun 已提交
264
  no_grad_names.reserve(no_grad_vars.size() + 1);
Y
Yu Yang 已提交
265

266
  no_grad_names.insert(std::string(kEmptyVarName) + kGradVarSuffix);
267

Y
Yu Yang 已提交
268
  for (auto& name : no_grad_vars) {
269
    no_grad_names.insert(name + kGradVarSuffix);
Y
Yu Yang 已提交
270
  }
Y
Yu Yang 已提交
271
  size_t uid = 0;
Y
Yu Yang 已提交
272
  return BackwardRecursive(forwardOp, no_grad_names, uid);
Y
Yu Yang 已提交
273
}
Y
Yi Wang 已提交
274

F
fengjiayi 已提交
275 276 277 278 279 280 281 282 283 284 285 286
// ====================================  //

static bool AllGradInSet(const std::vector<std::string>& names,
                         const std::unordered_set<std::string>& set) {
  for (const std::string& name : names) {
    if (!set.count(GradVarName(name))) {
      return false;
    }
  }
  return true;
}

F
fengjiayi 已提交
287
std::vector<std::unique_ptr<OpDescBind>> MakeOpGrad(
F
Update  
fengjiayi 已提交
288
    const std::unique_ptr<OpDescBind>& op_desc,
F
fengjiayi 已提交
289
    std::unordered_set<std::string>& no_grad_vars) {
F
Update  
fengjiayi 已提交
290
  std::vector<std::unique_ptr<OpDescBind>> grad_op_descs;
F
fengjiayi 已提交
291
  // All input gradients of forwarding operator do not need to calculat.
F
fengjiayi 已提交
292
  const std::vector<std::string>& inputs = op_desc->InputArgumentNames();
293
  if (AllGradInSet(inputs, no_grad_vars)) {
F
fengjiayi 已提交
294 295 296
    return grad_op_descs;  // empty vector
  }
  // All output gradients of forwarding operator do not need to calculate.
F
fengjiayi 已提交
297 298
  const std::vector<std::string>& outputs = op_desc->OutputArgumentNames();
  if (AllGradInSet(outputs, no_grad_vars)) {
299
    for (const std::string& name : inputs) {
F
fengjiayi 已提交
300 301 302 303 304
      no_grad_vars.insert(GradVarName(name));
    }
    return grad_op_descs;  // empty vector
  }

305
  grad_op_descs = OpRegistry::CreateGradOpDescs(op_desc.get());
F
fengjiayi 已提交
306

F
Update  
fengjiayi 已提交
307 308 309
  std::list<std::unique_ptr<OpDescBind>> pending_fill_zeros_ops;
  for (auto& desc : grad_op_descs) {
    for (const std::string& in_name : desc->InputArgumentNames()) {
F
fengjiayi 已提交
310 311 312 313
      if (no_grad_vars.count(in_name)) {
        std::string prefix = in_name.substr(
            0, in_name.size() - sizeof(kGradVarSuffix) / sizeof(char) + 1);
        std::string new_name = prefix + kZeroVarSuffix;
F
Update  
fengjiayi 已提交
314
        desc->Rename(in_name, new_name);
F
fengjiayi 已提交
315 316 317
        std::unique_ptr<OpDescBind> fill_zeros_op(new OpDescBind(
            "fill_zeros_like", {{"X", {prefix}}}, {{"Y", {new_name}}}, {}));
        pending_fill_zeros_ops.push_back(std::move(fill_zeros_op));
F
fengjiayi 已提交
318 319
      }
    }
F
fengjiayi 已提交
320
    for (const std::string& out_name : desc->OutputArgumentNames()) {
F
fengjiayi 已提交
321
      if (no_grad_vars.count(out_name)) {
F
Update  
fengjiayi 已提交
322
        desc->Rename(out_name, kEmptyVarName);
F
fengjiayi 已提交
323 324 325
      }
    }
  }
F
fengjiayi 已提交
326

F
fengjiayi 已提交
327
  for (auto& p : pending_fill_zeros_ops) {
F
fengjiayi 已提交
328
    grad_op_descs.insert(grad_op_descs.begin(), std::move(p));
F
fengjiayi 已提交
329
  }
F
fengjiayi 已提交
330 331 332
  return grad_op_descs;
}

F
fengjiayi 已提交
333 334 335 336 337
std::vector<std::unique_ptr<OpDescBind>> MakeBlockBackward(
    ProgramDescBind& program_desc, int block_idx,
    std::unordered_set<std::string>& no_grad_vars) {
  BlockDescBind* cur_block = program_desc.Block(block_idx);
  std::deque<std::unique_ptr<OpDescBind>>& op_descs = cur_block->ops_;
F
Update  
fengjiayi 已提交
338 339
  std::unordered_map<std::string, std::vector<size_t>> dup_out_ops;
  size_t grad_desc_idx = 0;
F
Update  
fengjiayi 已提交
340
  std::vector<std::unique_ptr<OpDescBind>> backward_descs;
F
fengjiayi 已提交
341
  for (auto it = op_descs.rbegin(); it != op_descs.rend(); ++it) {
F
Update  
fengjiayi 已提交
342
    std::vector<std::unique_ptr<OpDescBind>> op_grads =
F
fengjiayi 已提交
343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358
        MakeOpGrad(*it, no_grad_vars);

    if ((*it)->Type() == "recurrent") {
      PADDLE_ENFORCE_EQ(
          op_grads.size(), size_t(1),
          "rnn_op's gradient process should contain only one op.");
      int step_block_idx = (*it)->GetBlockAttr("stop_block");
      auto backward_block_op_descs =
          MakeBlockBackward(program_desc, step_block_idx, no_grad_vars);
      BlockDescBind* backward_block = program_desc.AppendBlock(*cur_block);
      for (auto& ptr : backward_block_op_descs) {
        backward_block->ops_.push_back(std::move(ptr));
      }
      op_grads[0]->SetBlockAttr("step_block", *backward_block);
    }

F
Update  
fengjiayi 已提交
359
    for (const auto& desc : op_grads) {
F
fengjiayi 已提交
360
      for (const std::string& out_name : desc->OutputArgumentNames()) {
F
Update  
fengjiayi 已提交
361 362 363 364
        dup_out_ops[out_name].emplace_back(grad_desc_idx);
      }
      ++grad_desc_idx;
    }
F
fengjiayi 已提交
365 366 367
    std::transform(
        op_grads.begin(), op_grads.end(), std::back_inserter(backward_descs),
        [](std::unique_ptr<OpDescBind>& ptr) { return std::move(ptr); });
F
Update  
fengjiayi 已提交
368 369
  }
  // Check whether some variables are written more than once
F
Update  
fengjiayi 已提交
370
  std::list<std::pair<size_t, std::unique_ptr<OpDescBind>>> pending_sum_ops;
F
Update  
fengjiayi 已提交
371 372 373 374 375 376 377
  for (const auto& dup : dup_out_ops) {
    const std::string& out_name = dup.first;
    const std::vector<size_t> dup_op = dup.second;
    if (out_name != kEmptyVarName && dup_op.size() > 1) {
      std::vector<std::string> sum_op_inputs;
      for (size_t i = 0; i < dup_op.size(); ++i) {
        std::string new_name = out_name + "@RENAME@" + std::to_string(i);
F
Update  
fengjiayi 已提交
378
        backward_descs[dup_op[i]]->Rename(out_name, new_name);
F
Update  
fengjiayi 已提交
379 380
        sum_op_inputs.emplace_back(new_name);
      }
F
fengjiayi 已提交
381 382 383
      std::unique_ptr<OpDescBind> sum_op(new OpDescBind(
          "sum", {{"X", sum_op_inputs}}, {{"Out", {out_name}}}, {}));
      pending_sum_ops.push_back({dup_op.back(), std::move(sum_op)});
F
Update  
fengjiayi 已提交
384 385 386
    }
  }
  pending_sum_ops.sort(
F
Update  
fengjiayi 已提交
387 388 389 390
      [](const std::pair<size_t, std::unique_ptr<OpDescBind>>& a,
         const std::pair<size_t, std::unique_ptr<OpDescBind>>& b) {
        return a.first > b.first;
      });
F
Update  
fengjiayi 已提交
391
  for (auto& p : pending_sum_ops) {
F
Update  
fengjiayi 已提交
392 393
    backward_descs.insert(backward_descs.begin() + p.first + 1,
                          std::move(p.second));
F
Update  
fengjiayi 已提交
394
  }
F
fengjiayi 已提交
395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411
  return backward_descs;
}

void AppendBackward(ProgramDescBind& program_desc,
                    const std::unordered_set<std::string>& no_grad_vars) {
  std::unordered_set<std::string> no_grad_var_names;
  no_grad_var_names.reserve(no_grad_vars.size() + 1);
  no_grad_var_names.insert(std::string(kEmptyVarName) + kGradVarSuffix);
  for (auto& name : no_grad_vars) {
    no_grad_var_names.insert(GradVarName(name));
  }
  const int root_block_idx = 0;
  auto backward_op_descs =
      MakeBlockBackward(program_desc, root_block_idx, no_grad_var_names);
  auto& forw_op_descs = program_desc.Block(root_block_idx)->ops_;
  for (auto& ptr : backward_op_descs) {
    forw_op_descs.push_back(std::move(ptr));
F
fengjiayi 已提交
412
  }
F
Update  
fengjiayi 已提交
413 414
}

Y
Yu Yang 已提交
415 416
}  // namespace framework
}  // namespace paddle