backward.cc 10.4 KB
Newer Older
Y
Yu Yang 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

   http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License. */

15
#include "paddle/framework/backward.h"
Y
Yu Yang 已提交
16
#include "paddle/operators/net_op.h"
D
dongzhihong 已提交
17

D
dongzhihong 已提交
18
#include <list>
Y
Yu Yang 已提交
19 20
#include <memory>

21
#include "paddle/framework/op_registry.h"
Y
Yan Chunwei 已提交
22
#include "paddle/operators/net_op.h"
Y
Yan Chunwei 已提交
23
#include "paddle/operators/recurrent_op.h"
Y
Yu Yang 已提交
24 25 26 27

namespace paddle {
namespace framework {

Y
Yu Yang 已提交
28 29 30 31 32 33 34 35
static inline std::unique_ptr<OperatorBase> CreateGradOp(
    const OperatorBase& op) {
  OpDescBind op_desc;
  op_desc.SetInputMap(op.Inputs());
  op_desc.SetOutputMap(op.Outputs());
  op_desc.SetType(op.Type());
  op_desc.SetAttrMap(op.Attrs());
  auto& info = OpInfoMap::Instance().Get(op.Type());
Y
Yu Yang 已提交
36
  auto grad_descs = info.GradOpMaker()(op_desc);
Y
Yu Yang 已提交
37 38
  std::vector<std::unique_ptr<OperatorBase>> grad_ops;
  grad_ops.reserve(grad_descs.size());
Y
Yu Yang 已提交
39 40 41 42 43
  std::transform(grad_descs.begin(), grad_descs.end(),
                 std::back_inserter(grad_ops),
                 [](const std::unique_ptr<OpDescBind>& grad_desc) {
                   return OpRegistry::CreateOp(grad_desc.get());
                 });
Y
Yu Yang 已提交
44 45 46 47 48 49 50 51
  PADDLE_ENFORCE_GT(grad_ops.size(), 0);
  if (grad_ops.size() == 1) {
    return std::move(grad_ops[0]);
  } else {
    auto net_op = new operators::NetOp();
    for (auto& grad_op : grad_ops) {
      net_op->AppendOp(std::move(grad_op));
    }
Y
Yu Yang 已提交
52
    net_op->CompleteAddOp();
Y
Yu Yang 已提交
53 54 55 56
    return std::unique_ptr<OperatorBase>(net_op);
  }
}

Y
Yu Yang 已提交
57
template <typename Map, typename T>
Q
qiaolongfei 已提交
58
static void ForEachVarName(const Map& names, T callback) {
Y
Yu Yang 已提交
59
  for (auto& name : names) {
Y
Yu Yang 已提交
60
    for (auto& n : name.second) {
61
      if (callback(n)) return;
Y
Yu Yang 已提交
62 63
    }
  }
Y
Yu Yang 已提交
64 65
}

Y
Yan Chunwei 已提交
66
// return whether all the names + suffixes in the set
Y
Yu Yang 已提交
67
static bool AllInSet(
Y
Yu Yang 已提交
68
    const std::map<std::string, std::vector<std::string>>& names,
Y
Yu Yang 已提交
69
    const std::string& suffix, const std::unordered_set<std::string>& set) {
70 71 72 73
  bool all_in_set = true;
  ForEachVarName(names, [&all_in_set, &set, &suffix](const std::string& n) {
    all_in_set = set.find(n + suffix) != set.end();
    return !all_in_set;
Y
Yu Yang 已提交
74
  });
75
  return all_in_set;
Y
Yu Yang 已提交
76 77
}

Y
Yu Yang 已提交
78 79
static std::unique_ptr<OperatorBase> NOP() {
  auto net_op = new operators::NetOp();
Q
qiaolongfei 已提交
80
  net_op->SetType("@NOP@");
Y
Yu Yang 已提交
81
  net_op->CompleteAddOp();
Y
Yu Yang 已提交
82
  return std::unique_ptr<OperatorBase>(net_op);
Y
Yu Yang 已提交
83 84
}

Y
Yan Chunwei 已提交
85
//  Get backward operator from a forward operator, a recursive implementation.
Y
Yu Yang 已提交
86 87 88
//
//  no_grad_names the gradient variable names without gradient calculating.
//
89 90 91
//  uniq_id is a unique index used inside recursively calling
//  BackwardRecursive. use `uid = uniq_id++;` to get the unique index, and
//  pass `uniq_id` through recursive calling.
Y
Yu Yang 已提交
92
//
Y
Yan Chunwei 已提交
93 94
//  returns The backward operator. In a simple situation, it may be a simple
//  operator, in a complex situation, it maybe a NetOp.
Y
Yu Yang 已提交
95 96
//
//  See Backward.h for details
Y
Yu Yang 已提交
97
static std::unique_ptr<OperatorBase> BackwardRecursive(
Y
Yu Yang 已提交
98 99
    const OperatorBase& forwardOp,
    std::unordered_set<std::string>& no_grad_names, size_t& uniq_id) {
Y
Yu Yang 已提交
100 101
  //  If all input gradients of forwarding operator do not need to calculate,
  //  just return an NOP. Not return null ptr because NOP does not take
Q
typo  
qiaolongfei 已提交
102
  //  too much time for calculation, but it is useful for simplifying logic.
103
  if (AllInSet(forwardOp.Inputs() /*names*/, kGradVarSuffix /*suffix*/,
Y
Yan Chunwei 已提交
104
               no_grad_names /*set*/)) {
Y
Yu Yang 已提交
105
    return NOP();
Y
Yu Yang 已提交
106 107
  }

108 109
  //  All output gradients of forwarding operator do not need to calculate.
  //  Then all input gradients cannot be computed at all, and we put them into
Y
Yu Yang 已提交
110
  //  `no_grad_names` set. Return an NOP.
Q
qiaolongfei 已提交
111
  if (AllInSet(forwardOp.Outputs() /*names*/, kGradVarSuffix /*suffix*/,
Y
Yan Chunwei 已提交
112
               no_grad_names /*set*/)) {
Q
qiaolongfei 已提交
113
    ForEachVarName(forwardOp.Inputs(),
Y
Yu Yang 已提交
114 115 116 117
                   [&no_grad_names](const std::string& name) -> bool {
                     no_grad_names.insert(GradVarName(name));
                     return false;
                   });
Y
Yu Yang 已提交
118
    return NOP();
Y
Yu Yang 已提交
119 120
  }

Y
Yu Yang 已提交
121
  // Returned gradient network
Y
Yu Yang 已提交
122
  auto net = std::unique_ptr<operators::NetOp>(new operators::NetOp());
Y
Yu Yang 已提交
123 124

  if (forwardOp.IsNetOp()) {
Y
Yu Yang 已提交
125
    // Because forwardOp is a net op, it can static_cast.
Y
Yan Chunwei 已提交
126
    auto& forwardNet = static_cast<const operators::NetOp&>(forwardOp);
Y
Yu Yang 已提交
127

128
    // Map from output gradient variable name to operator's indices in
Y
Yan Chunwei 已提交
129
    // backward net's ops_. That operator generates that variable.
Y
Yu Yang 已提交
130 131 132
    std::unordered_map<std::string, std::vector<size_t>> dup_output_ops;

    size_t local_op_id = 0;
Y
Yan Chunwei 已提交
133
    // reversely travel forwardNet and collect all duplicate outputs.
Y
Yu Yang 已提交
134
    for (auto it = forwardNet.ops_.rbegin(); it != forwardNet.ops_.rend();
Y
Yu Yang 已提交
135
         ++it, ++local_op_id) {
Y
Yu Yang 已提交
136
      auto& fwd = *it;
Y
Yu Yang 已提交
137
      auto bwd = BackwardRecursive(*fwd, no_grad_names, uniq_id);
Q
qiaolongfei 已提交
138
      ForEachVarName(bwd->Outputs(),
Y
Yu Yang 已提交
139 140 141 142
                     [&dup_output_ops, local_op_id](const std::string& out) {
                       dup_output_ops[out].emplace_back(local_op_id);
                       return false;
                     });
Y
Yu Yang 已提交
143
      net->AppendOp(std::move(bwd));
D
dongzhihong 已提交
144
    }
Y
Yu Yang 已提交
145
    // Get unique ID for this method.
D
dongzhihong 已提交
146
    auto uid = uniq_id++;
D
dongzhihong 已提交
147
    // TODO(dzh): more comment
Y
Yan Chunwei 已提交
148 149 150 151 152
    // multiple operators which have the same output (y for example) may
    // overwrite the same y variable when backward, special operations are token
    // to handle this case. For each duplicate output, rename it to an alias
    // (original name with a offset), append an `add` op for its operator,
    // and finally sum all the alias variable to the final output variable y.
Y
Yu Yang 已提交
153
    using Pos = std::pair<size_t, std::unique_ptr<OperatorBase>>;
Y
Yu Yang 已提交
154
    std::list<Pos> insert_position;
D
dongzhihong 已提交
155
    for (auto& dup_output_op : dup_output_ops) {
D
dongzhihong 已提交
156
      const std::string& name = dup_output_op.first;
Q
qijun 已提交
157 158 159
      // duplicate @Empty@ don't need to be added
      if (name == kEmptyVarName) continue;

D
dongzhihong 已提交
160
      auto& dup_op = dup_output_op.second;
Y
Yan Chunwei 已提交
161
      // no duplicate output
D
dongzhihong 已提交
162 163
      if (dup_op.size() == 1) continue;

Y
Yan Chunwei 已提交
164 165
      // process the duplicate outputs
      std::vector<std::string> dup_outputs;
D
dongzhihong 已提交
166
      for (size_t i = 0; i < dup_op.size(); ++i) {
Y
Yan Chunwei 已提交
167
        // rename each duplicate output to an alias
D
dongzhihong 已提交
168
        auto op_offset = dup_op[i];
D
dongzhihong 已提交
169 170 171
        dup_outputs.push_back(name + "@RENAME@" + std::to_string(uid) + "@" +
                              std::to_string(i));
        net->ops_[op_offset]->Rename(name, dup_outputs.back());
D
dongzhihong 已提交
172
      }
Y
Yan Chunwei 已提交
173
      // collect all the offset to append `add` op for each alias
D
dzhwinter 已提交
174 175 176
      //
      // one variable is shared between multiple operators.
      // insert add operator one by one, then add it to output
D
dongzhihong 已提交
177 178 179 180 181 182 183 184 185 186 187 188
      for (size_t output_idx = 0; output_idx < dup_outputs.size() - 1;
           ++output_idx) {
        auto insert_add_x = dup_outputs[output_idx];
        auto insert_add_y = dup_outputs[output_idx];
        auto insert_add_out = name + "@SHARED@" + std::to_string(output_idx);
        // first add op inserted
        if (output_idx == dup_outputs.size() - 2) {
          insert_add_out = name;
        }
        if (output_idx != 0) {
          insert_add_y = name + "@SHARED@" + std::to_string(output_idx - 1);
        }
D
dzhwinter 已提交
189 190 191
        insert_position.push_back(
            {dup_op.back(),
             OpRegistry::CreateOp(
D
dongzhihong 已提交
192
                 "sum", {{"X", {insert_add_x}}, {"X", {insert_add_y}}},
D
dongzhihong 已提交
193
                 {{"Out", {insert_add_out}}}, {})});
D
dzhwinter 已提交
194
      }
D
dongzhihong 已提交
195
    }
Y
Yu Yang 已提交
196

Y
Yan Chunwei 已提交
197
    // make sure the inserted `add` ops follow the BFS order.
Y
Yu Yang 已提交
198
    insert_position.sort(
D
dongzhihong 已提交
199
        [](const Pos& l, const Pos& r) { return l.first > r.first; });
Y
Yu Yang 已提交
200 201

    for (auto& pos : insert_position) {
Y
Yu Yang 已提交
202
      net->InsertOp(pos.first + 1, std::move(pos.second));
D
dongzhihong 已提交
203
    }
Y
Yu Yang 已提交
204
  } else {
Y
Yu Yang 已提交
205 206
    std::unique_ptr<OperatorBase> grad_op(CreateGradOp(forwardOp));
    PADDLE_ENFORCE(grad_op != nullptr);
Y
Yu Yang 已提交
207

Y
Yu Yang 已提交
208 209
    ForEachVarName(grad_op->Inputs(), [&no_grad_names, &net, &grad_op](
                                          const std::string& grad_input) {
210
      if (no_grad_names.count(grad_input)) {
Y
Yu Yang 已提交
211
        // +1 for \0
212
        std::string prefix = grad_input.substr(
Y
Yu Yang 已提交
213
            0, grad_input.size() - sizeof(kGradVarSuffix) / sizeof(char) + 1);
Q
qiaolongfei 已提交
214
        grad_op->Rename(grad_input, prefix + kZeroVarSuffix);
Y
Yu Yang 已提交
215 216 217

        // If part of input gradient of that operator is not calculated, fill
        // zero variables to that input gradient.
D
dangqingqing 已提交
218 219
        net->AppendOp(OpRegistry::CreateOp("fill_zeros_like", {{"X", {prefix}}},
                                           {{"Y", {grad_input}}}, {}));
220
      }
Y
Yu Yang 已提交
221 222 223
      return false;
    });

Q
qiaolongfei 已提交
224 225
    ForEachVarName(grad_op->Outputs(),
                   [&no_grad_names, &grad_op](const std::string& grad_output) {
Y
Yu Yang 已提交
226
                     if (no_grad_names.count(grad_output)) {
Q
qiaolongfei 已提交
227
                       grad_op->Rename(grad_output, kEmptyVarName);
Y
Yu Yang 已提交
228 229 230
                     }
                     return false;
                   });
Y
Yu Yang 已提交
231

Y
Yan Chunwei 已提交
232
    // process recurrent gradient op as a special operator.
233
    if (forwardOp.Type() == "recurrent") {
Y
Yan Chunwei 已提交
234 235 236 237 238 239 240 241 242 243
      // NOTE clean up cycle call somewhere (RNN's stepnet constains itself), or
      // this will result in infinite loop.
      const auto& rnnop =
          *static_cast<const operators::RecurrentOp*>(&forwardOp);
      auto rnn_grad_op =
          static_cast<operators::RecurrentGradientOp*>(grad_op.get());
      const auto& stepnet_op =
          *static_cast<const OperatorBase*>(&rnnop.stepnet());
      // create stepnet's gradient op
      rnn_grad_op->set_stepnet(
Y
Yu Yang 已提交
244
          BackwardRecursive(stepnet_op, no_grad_names, uniq_id));
Y
Yan Chunwei 已提交
245 246
    }

Y
Yu Yang 已提交
247 248 249
    if (net->ops_.empty()) {  // Current no aux op is added to network
      return grad_op;
    }
Y
Yu Yang 已提交
250
    net->AppendOp(std::move(grad_op));
Y
Yu Yang 已提交
251
  }
Q
qiaolongfei 已提交
252
  net->SetType("@GENERATED_BACKWARD@");
Y
Yu Yang 已提交
253
  net->CompleteAddOp();
Y
Yu Yang 已提交
254 255 256
  return std::unique_ptr<OperatorBase>(
      static_cast<OperatorBase*>(net.release()));
}
Y
Yu Yang 已提交
257

Y
Yu Yang 已提交
258
// See header for comments
Y
Yu Yang 已提交
259
std::unique_ptr<OperatorBase> Backward(
Y
Yu Yang 已提交
260
    const OperatorBase& forwardOp,
Y
Yu Yang 已提交
261 262
    const std::unordered_set<std::string>& no_grad_vars) {
  std::unordered_set<std::string> no_grad_names;
Q
qijun 已提交
263
  no_grad_names.reserve(no_grad_vars.size() + 1);
Y
Yu Yang 已提交
264

265
  no_grad_names.insert(std::string(kEmptyVarName) + kGradVarSuffix);
266

Y
Yu Yang 已提交
267
  for (auto& name : no_grad_vars) {
268
    no_grad_names.insert(name + kGradVarSuffix);
Y
Yu Yang 已提交
269
  }
Y
Yu Yang 已提交
270
  size_t uid = 0;
Y
Yu Yang 已提交
271
  return BackwardRecursive(forwardOp, no_grad_names, uid);
Y
Yu Yang 已提交
272
}
Y
Yi Wang 已提交
273

Y
Yu Yang 已提交
274 275
}  // namespace framework
}  // namespace paddle