backward.cc 6.3 KB
Newer Older
Y
Yu Yang 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

   http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License. */

15
#include "paddle/framework/backward.h"
D
dongzhihong 已提交
16
#include <list>
17 18
#include "paddle/framework/net.h"
#include "paddle/framework/op_registry.h"
Y
Yu Yang 已提交
19 20 21 22 23 24 25 26 27 28 29 30 31 32 33

namespace paddle {
namespace framework {

static bool AllInSet(const std::vector<std::string>& names,
                     const std::string& suffix,
                     const std::unordered_set<std::string>& set) {
  for (auto& name : names) {
    if (set.find(name + suffix) == set.end()) {
      return false;
    }
  }
  return true;
}

Y
Yu Yang 已提交
34
static std::shared_ptr<OperatorBase> NOP() {
Y
Yu Yang 已提交
35
  auto net_op = std::make_shared<NetOp>();
Y
Yu Yang 已提交
36
  net_op->type_ = "@NOP@";
Y
Yu Yang 已提交
37 38 39 40
  net_op->CompleteAddOp();
  return net_op;
}

Y
Yu Yang 已提交
41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56
//  Get backward operator from a forward operator, recursively implementation.
//
//  no_grad_names the gradient variable names without gradient calculating.
//
//  uniq_id is a unique index used inside recursively calling BackwardRecursive.
//  use `uid = uniq_id++;` to get the unique index, and pass `uniq_id` through
//  recursive calling.
//
//  returns The backward operator. For simple situation, it is a simple
//  operator. For complex situation, it is a NetOp.
//
//  See Backward.h for details
static std::shared_ptr<OperatorBase> BackwardRecursive(
    const OperatorBase& forwardOp,
    std::unordered_set<std::string>& no_grad_names, size_t& uniq_id);
std::shared_ptr<OperatorBase> BackwardRecursive(
Y
Yu Yang 已提交
57 58
    const OperatorBase& forwardOp,
    std::unordered_set<std::string>& no_grad_names, size_t& uniq_id) {
Y
Yu Yang 已提交
59 60 61
  //  If all input gradients of forwarding operator do not need to calculate,
  //  just return an NOP. Not return null ptr because NOP does not take
  //  too much time for calculation, but it is useful for simplifying logic.
Y
Yu Yang 已提交
62 63
  if (AllInSet(forwardOp.inputs_, OperatorBase::GRAD_VAR_SUFFIX(),
               no_grad_names)) {
Y
Yu Yang 已提交
64
    return NOP();
Y
Yu Yang 已提交
65 66
  }

Y
Yu Yang 已提交
67 68 69
  //  All output gradients of forwarding operator do not need to calculate. Then
  //  all input gradients cannot be computed at all, and we put them into
  //  `no_grad_names` set. Return an NOP.
Y
Yu Yang 已提交
70 71 72
  if (AllInSet(forwardOp.outputs_, OperatorBase::GRAD_VAR_SUFFIX(),
               no_grad_names)) {
    for (auto& name : forwardOp.inputs_) {
Y
Yu Yang 已提交
73
      // Mark all input is not need
Y
Yu Yang 已提交
74 75
      no_grad_names.insert(name + OperatorBase::GRAD_VAR_SUFFIX());
    }
Y
Yu Yang 已提交
76
    return NOP();
Y
Yu Yang 已提交
77 78
  }

Y
Yu Yang 已提交
79
  // Returned gradient network
Y
Yu Yang 已提交
80
  auto net = std::make_shared<NetOp>();
Y
Yu Yang 已提交
81 82

  if (forwardOp.IsNetOp()) {
Y
Yu Yang 已提交
83
    // Because forwardOp is a net op, it can static_cast.
Y
Yu Yang 已提交
84 85
    auto& forwardNet = static_cast<const NetOp&>(forwardOp);

Y
Yu Yang 已提交
86 87
    // Map from output gradient variable name to operator's indices in backward
    // net. That operator generates that variable.
Y
Yu Yang 已提交
88 89 90
    std::unordered_map<std::string, std::vector<size_t>> dup_output_ops;

    size_t local_op_id = 0;
Y
Yu Yang 已提交
91
    // reversely travel forwardNet
Y
Yu Yang 已提交
92
    for (auto it = forwardNet.ops_.rbegin(); it != forwardNet.ops_.rend();
Y
Yu Yang 已提交
93
         ++it, ++local_op_id) {
D
dongzhihong 已提交
94
      auto fwd = *it;
Y
Yu Yang 已提交
95
      auto bwd = BackwardRecursive(*fwd, no_grad_names, uniq_id);
D
dongzhihong 已提交
96
      net->AddOp(bwd);
Y
Yu Yang 已提交
97 98
      for (auto& out : bwd->outputs_) {
        dup_output_ops[out].emplace_back(local_op_id);
D
dongzhihong 已提交
99 100
      }
    }
Y
Yu Yang 已提交
101
    // Get unique ID for this method.
D
dongzhihong 已提交
102
    auto uid = uniq_id++;
D
dongzhihong 已提交
103
    // TODO(dzh): more comment
Y
Yu Yang 已提交
104 105
    using Pos = std::pair<size_t, std::shared_ptr<OperatorBase>>;
    std::list<Pos> insert_position;
D
dongzhihong 已提交
106
    for (auto& dup_output_op : dup_output_ops) {
D
dongzhihong 已提交
107
      const std::string& name = dup_output_op.first;
D
dongzhihong 已提交
108 109 110 111 112 113
      auto& dup_op = dup_output_op.second;
      if (dup_op.size() == 1) continue;
      std::vector<std::string> dup_outputs;

      for (size_t i = 0; i < dup_op.size(); ++i) {
        auto op_offset = dup_op[i];
D
dongzhihong 已提交
114 115 116
        dup_outputs.push_back(name + "@RENAME@" + std::to_string(uid) + "@" +
                              std::to_string(i));
        net->ops_[op_offset]->Rename(name, dup_outputs.back());
D
dongzhihong 已提交
117
      }
Y
Yu Yang 已提交
118
      insert_position.push_back(
D
dongzhihong 已提交
119 120
          {dup_op.back(),
           OpRegistry::CreateOp(
Y
Yu Yang 已提交
121
               "add", {dup_outputs}, {name},
D
dongzhihong 已提交
122
               {{"input_format",
Y
Yu Yang 已提交
123
                 std::vector<int>{0, static_cast<int>(dup_outputs.size())}}})});
D
dongzhihong 已提交
124
    }
Y
Yu Yang 已提交
125 126

    insert_position.sort(
D
dongzhihong 已提交
127
        [](const Pos& l, const Pos& r) { return l.first > r.first; });
Y
Yu Yang 已提交
128 129

    for (auto& pos : insert_position) {
Y
Yu Yang 已提交
130
      net->InsertOp(pos.first + 1, pos.second);
D
dongzhihong 已提交
131 132
    }

Y
Yu Yang 已提交
133
  } else {
134 135 136 137 138 139
    std::shared_ptr<OperatorBase> grad_op = OpRegistry::CreateGradOp(forwardOp);
    for (std::string& grad_input : grad_op->inputs_) {
      if (no_grad_names.count(grad_input)) {
        std::string prefix = grad_input.substr(
            0, grad_input.size() - OperatorBase::GRAD_VAR_SUFFIX().size());
        grad_input = prefix + OperatorBase::ZERO_VAR_SUFFIX();
Y
Yu Yang 已提交
140 141 142

        // If part of input gradient of that operator is not calculated, fill
        // zero variables to that input gradient.
F
fengjiayi 已提交
143 144
        net->AddOp(OpRegistry::CreateOp("fill_zeros_like", {prefix},
                                        {grad_input}, {}));
145 146
      }
    }
Y
Yu Yang 已提交
147

F
fengjiayi 已提交
148
    for (std::string& grad_output : grad_op->outputs_) {
149 150 151 152
      if (no_grad_names.count(grad_output)) {
        grad_output = OperatorBase::EMPTY_VAR_NAME();
      }
    }
Y
Yu Yang 已提交
153 154 155 156

    if (net->ops_.empty()) {  // Current no aux op is added to network
      return grad_op;
    }
F
fengjiayi 已提交
157
    net->AddOp(grad_op);
Y
Yu Yang 已提交
158
  }
Y
Yu Yang 已提交
159
  net->type_ = "@GENERATED_BACKWARD@";
Y
Yu Yang 已提交
160
  net->CompleteAddOp();
Y
Yu Yang 已提交
161
  return net;
Y
Yu Yang 已提交
162 163
}

Y
Yu Yang 已提交
164 165
// See header for comments
std::shared_ptr<OperatorBase> Backward(
Y
Yu Yang 已提交
166
    const OperatorBase& forwardOp,
Y
Yu Yang 已提交
167 168 169 170 171 172 173
    const std::unordered_set<std::string>& no_grad_vars) {
  std::unordered_set<std::string> no_grad_names;
  no_grad_names.reserve(no_grad_vars.size());

  for (auto& name : no_grad_vars) {
    no_grad_names.insert(name + OperatorBase::GRAD_VAR_SUFFIX());
  }
Y
Yu Yang 已提交
174
  size_t uid = 0;
Y
Yu Yang 已提交
175
  return BackwardRecursive(forwardOp, no_grad_names, uid);
Y
Yu Yang 已提交
176 177 178
}
}  // namespace framework
}  // namespace paddle