prune.cc 12.4 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Y
Yang Yang 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

Y
Yi Wang 已提交
15
#include "paddle/fluid/framework/prune.h"
Y
Yang Yang 已提交
16

17 18
#include <glog/logging.h>

Y
Yang Yang 已提交
19
#include <algorithm>
20
#include <memory>
Y
Yang Yang 已提交
21 22
#include <set>
#include <string>
K
Kexin Zhao 已提交
23
#include <unordered_map>
24
#include <unordered_set>
Y
Yang Yang 已提交
25 26
#include <vector>

27 28 29 30 31
#include "paddle/fluid/framework/block_desc.h"
#include "paddle/fluid/framework/op_desc.h"
#include "paddle/fluid/framework/op_proto_maker.h"
#include "paddle/fluid/framework/program_desc.h"

Y
Yang Yang 已提交
32 33 34
namespace paddle {
namespace framework {

35 36
const char kFeedOpType[] = "feed";
const char kFetchOpType[] = "fetch";
Y
Yang Yang 已提交
37

38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53
bool HasDependentInputVar(
    const proto::OpDesc& op_desc,
    const std::unordered_set<std::string>& dependent_vars) {
  for (auto& var : op_desc.inputs()) {
    for (auto& argu : var.arguments()) {
      if (dependent_vars.count(argu) != 0) {
        return true;
      }
    }
  }
  return false;
}

bool HasDependentOutputVar(
    const proto::OpDesc& op_desc,
    const std::unordered_set<std::string>& dependent_vars) {
Y
Yang Yang 已提交
54 55 56 57 58 59 60 61 62 63
  for (auto& var : op_desc.outputs()) {
    for (auto& argu : var.arguments()) {
      if (dependent_vars.count(argu) != 0) {
        return true;
      }
    }
  }
  return false;
}

64
bool IsTarget(const proto::OpDesc& op_desc) {
Y
Yang Yang 已提交
65 66 67 68 69 70
  if (op_desc.has_is_target()) {
    return op_desc.is_target();
  }
  return false;
}

71 72 73 74 75 76 77 78
bool HasTrueTarget(const proto::OpDesc& op_desc) {
  return op_desc.has_is_target() && op_desc.is_target();
}

bool HasFalseTarget(const proto::OpDesc& op_desc) {
  return op_desc.has_is_target() && !op_desc.is_target();
}

K
Kexin Zhao 已提交
79 80 81 82 83 84 85 86 87
int GetSubBlockIndex(const proto::OpDesc& op_desc) {
  for (auto& attr : op_desc.attrs()) {
    if (attr.type() == proto::AttrType::BLOCK) {
      PADDLE_ENFORCE(attr.has_block_idx());
      return attr.block_idx();
    }
  }
  return -1;
}
Y
Yang Yang 已提交
88

K
Kexin Zhao 已提交
89 90 91 92
bool HasSubBlock(const proto::OpDesc& op_desc) {
  return GetSubBlockIndex(op_desc) > 0;
}

93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110
void AppendOpInputVarNames(const proto::OpDesc& op_desc,
                           std::unordered_set<std::string>* vars_set) {
  for (auto& var : op_desc.inputs()) {
    for (auto& arg : var.arguments()) {
      vars_set->emplace(arg);
    }
  }
}

void AppendOpOutputVarNames(const proto::OpDesc& op_desc,
                            std::unordered_set<std::string>* vars_set) {
  for (auto& var : op_desc.outputs()) {
    for (auto& arg : var.arguments()) {
      vars_set->emplace(arg);
    }
  }
}

K
Kexin Zhao 已提交
111 112 113 114 115 116 117
// block_id is the idx of the current block in the input desc
// parent_block_id is the idx of the parent of the current block
// in the output desc, -1 means the current block is global block
// dependent_vars is passed recursively from the parent block to
// the child block to help pruning
void prune_impl(const proto::ProgramDesc& input, proto::ProgramDesc* output,
                int block_id, int parent_block_id,
118
                std::unordered_set<std::string>* dependent_vars,
119
                const std::set<std::string> feed_var_names) {
Y
Yang Yang 已提交
120
  auto& block = input.blocks(block_id);
Y
Yang Yang 已提交
121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140
  auto& ops = block.ops();

  bool expect_feed = true;
  for (auto& op_desc : ops) {
    PADDLE_ENFORCE(op_desc.type() != kFeedOpType || expect_feed,
                   "All FeedOps are at the beginning of the ProgramDesc");
    expect_feed = (op_desc.type() == kFeedOpType);
  }

  bool expect_fetch = true;
  for (auto op_iter = ops.rbegin(); op_iter != ops.rend(); ++op_iter) {
    auto& op_desc = *op_iter;
    PADDLE_ENFORCE(op_desc.type() != kFetchOpType || expect_fetch,
                   "All FetchOps must at the end of the ProgramDesc");
    expect_fetch = (op_desc.type() == kFetchOpType);
  }

  std::vector<bool> should_run;
  for (auto op_iter = ops.rbegin(); op_iter != ops.rend(); ++op_iter) {
    auto& op_desc = *op_iter;
141
    if (IsTarget(op_desc) || HasDependentOutputVar(op_desc, *dependent_vars)) {
Y
Yang Yang 已提交
142 143 144
      // insert its input to the dependency graph
      for (auto& var : op_desc.inputs()) {
        for (auto& argu : var.arguments()) {
145 146 147
          if (feed_var_names.count(argu) == 0) {
            dependent_vars->insert(argu);
          }
Y
Yang Yang 已提交
148 149 150 151 152 153 154 155 156 157 158 159
        }
      }
      should_run.push_back(true);
    } else {
      should_run.push_back(false);
    }
  }

  // since we are traversing the ProgramDesc in reverse order
  // we reverse the should_run vector
  std::reverse(should_run.begin(), should_run.end());

K
Kexin Zhao 已提交
160 161 162 163 164 165
  // copy the current block from input to output
  auto* block_field = output->mutable_blocks();
  *block_field->Add() = input.blocks(block_id);

  int output_block_id = output->blocks_size() - 1;
  auto* output_block = output->mutable_blocks(output_block_id);
166 167
  output_block->set_idx(output_block_id);
  output_block->set_parent_idx(parent_block_id);
K
Kexin Zhao 已提交
168 169

  auto* op_field = output_block->mutable_ops();
Y
Yang Yang 已提交
170 171 172
  op_field->Clear();
  for (size_t i = 0; i < should_run.size(); ++i) {
    if (should_run[i]) {
K
Kexin Zhao 已提交
173 174 175 176
      auto* op = op_field->Add();
      *op = input.blocks(block_id).ops(i);
      if (HasSubBlock(*op)) {
        // create sub_block_dependent_vars here to help prune the sub block
177
        std::unordered_set<std::string> sub_block_dependent_vars;
178
        for (auto& var : op->inputs()) {
K
Kexin Zhao 已提交
179
          for (auto& argu : var.arguments()) {
180 181 182
            if (feed_var_names.count(argu) == 0) {
              sub_block_dependent_vars.insert(argu);
            }
K
Kexin Zhao 已提交
183 184
          }
        }
185
        for (auto& var : op->outputs()) {
K
Kexin Zhao 已提交
186
          for (auto& argu : var.arguments()) {
187 188 189
            if (feed_var_names.count(argu) == 0) {
              sub_block_dependent_vars.insert(argu);
            }
K
Kexin Zhao 已提交
190 191 192 193 194
          }
        }
        // GetSubBlockIndex(*op) is the idx of the sub_block in the input desc
        // output_block_id is the idx of the current block in the output desc
        prune_impl(input, output, GetSubBlockIndex(*op), output_block_id,
195
                   &sub_block_dependent_vars, feed_var_names);
K
Kexin Zhao 已提交
196
      }
Y
Yang Yang 已提交
197 198
    }
  }
K
Kexin Zhao 已提交
199

K
Kexin Zhao 已提交
200 201 202
  // remove the VarDescs in BlockDesc that are not referenced in
  // the pruned OpDescs
  std::unordered_map<std::string, proto::VarDesc> var_map;
K
Kexin Zhao 已提交
203
  auto* var_field = output->mutable_blocks(output_block_id)->mutable_vars();
K
Kexin Zhao 已提交
204 205
  for (const auto& var : *var_field) {
    var_map[var.name()] = var;
K
Kexin Zhao 已提交
206 207
  }

208
  std::set<std::string> var_names;
K
Kexin Zhao 已提交
209 210
  for (const auto& op : *op_field) {
    auto& input_field = op.inputs();
K
Kexin Zhao 已提交
211 212
    for (auto& input_var : input_field) {
      for (auto& arg : input_var.arguments()) {
213 214 215
        if (var_map.count(arg) != 0) {
          var_names.insert(arg);
        }
K
Kexin Zhao 已提交
216 217 218
      }
    }
    auto& output_field = op.outputs();
K
Kexin Zhao 已提交
219 220
    for (auto& output_var : output_field) {
      for (auto& arg : output_var.arguments()) {
221 222 223
        if (var_map.count(arg) != 0) {
          var_names.insert(arg);
        }
K
Kexin Zhao 已提交
224 225 226
      }
    }
  }
227 228 229 230 231

  var_field->Clear();
  for (const auto& name : var_names) {
    *var_field->Add() = var_map[name];
  }
Y
Yang Yang 已提交
232
}
Y
Yang Yang 已提交
233

234
// TODO(fengjiayi): Prune() could be inplaced to avoid unnecessary copies
235 236 237
void Prune(const proto::ProgramDesc& input,
           const std::set<std::string>& feed_var_names,
           proto::ProgramDesc* output) {
238
  std::unordered_set<std::string> dependent_vars;
K
fix bug  
Kexin Zhao 已提交
239
  output->clear_blocks();
240
  prune_impl(input, output, 0, -1, &dependent_vars, feed_var_names);
Y
Yang Yang 已提交
241
}
242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371

void CloneWholeBlock(proto::ProgramDesc* input, proto::ProgramDesc* output,
                     int block_id, int parent_block_id) {
  auto* block_field = output->mutable_blocks();
  *block_field->Add() = input->blocks(block_id);
  int output_block_id = output->blocks_size() - 1;
  auto* output_block = output->mutable_blocks(output_block_id);
  output_block->set_idx(output_block_id);
  output_block->set_parent_idx(parent_block_id);
}

void PruneBackwardImpl(proto::ProgramDesc* input, proto::ProgramDesc* output,
                       int block_id, int parent_block_id) {
  // Step 1. Copy the current input block to output
  CloneWholeBlock(input, output, block_id, parent_block_id);
  int output_block_id = output->blocks_size() - 1;
  auto* output_block = output->mutable_blocks(output_block_id);

  // Step 2. Mark forward ops on main branch
  auto* ops = input->mutable_blocks(block_id)->mutable_ops();
  std::unordered_set<std::string> op_input_vars;
  std::unordered_set<std::string> op_output_vars;
  for (auto op_iter = ops->rbegin(); op_iter != ops->rend(); ++op_iter) {
    auto& op_desc = *op_iter;
    if (HasTrueTarget(op_desc) ||
        HasDependentOutputVar(op_desc, op_input_vars)) {
      op_desc.set_is_target(true);
      AppendOpInputVarNames(op_desc, &op_input_vars);
      AppendOpOutputVarNames(op_desc, &op_output_vars);
    }
  }

  // Step 3. Mark backward & optimize ops on main branch
  std::unordered_set<std::string> gradop_input_vars;
  std::unordered_set<std::string> gradop_output_vars;
  for (auto op_iter = ops->begin(); op_iter != ops->end(); ++op_iter) {
    auto& op_desc = *op_iter;
    if (HasFalseTarget(op_desc) ||
        HasDependentInputVar(op_desc, gradop_output_vars)) {
      op_desc.set_is_target(false);
      AppendOpInputVarNames(op_desc, &gradop_input_vars);
      AppendOpOutputVarNames(op_desc, &gradop_output_vars);
    }
  }

  // Step 4. Mark ops need to be reserved on sub-branch
  for (auto op_iter = ops->rbegin(); op_iter != ops->rend(); ++op_iter) {
    auto& op_desc = *op_iter;
    if (!op_desc.has_is_target()) {
      if (HasDependentOutputVar(op_desc, gradop_input_vars)) {
        op_desc.set_is_target(false);
        AppendOpInputVarNames(op_desc, &gradop_input_vars);
      } else {
        op_desc.set_is_target(true);
        AppendOpInputVarNames(op_desc, &op_input_vars);
        AppendOpOutputVarNames(op_desc, &op_output_vars);
      }
    }
  }

  // Step 5. Copy the forward ops to new ProgramDesc
  //   Note: The proto::ProgramDesc doesn't have interface
  //         to remove op and var
  auto* op_field = output_block->mutable_ops();
  op_field->Clear();
  for (auto op_iter = ops->begin(); op_iter != ops->end(); ++op_iter) {
    if (IsTarget(*op_iter)) {
      auto* op = op_field->Add();
      *op = *op_iter;
      if (HasSubBlock(*op)) {
        CloneWholeBlock(input, output, GetSubBlockIndex(*op), output_block_id);
      }
    }
  }

  // Step 6. Copy the forward vars to new ProgramDesc
  // construct all var's map before clear
  auto* var_field = output_block->mutable_vars();
  std::unordered_map<std::string, proto::VarDesc> var_map;
  for (const auto& var : *var_field) {
    var_map[var.name()] = var;
  }
  std::unordered_set<std::string> var_names;
  var_names.insert(op_input_vars.begin(), op_input_vars.end());
  var_names.insert(op_output_vars.begin(), op_output_vars.end());
  var_field->Clear();
  for (const auto& name : var_names) {
    *var_field->Add() = var_map[name];
  }
}

std::unique_ptr<framework::ProgramDesc> PruneBackward(
    const framework::ProgramDesc& origin) {
  // Copy original ProgramDesc, origin can't be change
  framework::ProgramDesc origin_clone(origin);

  // Step 1. Update loss op's role & set loss op to be target
  //   The loss op's op_role is (kForward | kLoss)
  //   The input ProgramDesc should have loss operator.
  auto ops = origin_clone.Block(0).AllOps();
  bool has_loss_op = false;
  for (auto op : ops) {
    int op_role =
        boost::get<int>(op->GetAttr(OpProtoAndCheckerMaker::OpRoleAttrName()));
    if (op_role == (static_cast<int>(OpRole::kForward) |
                    static_cast<int>(OpRole::kLoss))) {
      op->SetAttr(OpProtoAndCheckerMaker::OpRoleAttrName(),
                  static_cast<int>(OpRole::kForward));
      op->SetIsTarget(true);
      has_loss_op = true;
    } else if (op_role == (static_cast<int>(OpRole::kBackward) |
                           static_cast<int>(OpRole::kLoss))) {
      op->SetIsTarget(false);
      break;
    }
  }
  PADDLE_ENFORCE_EQ(has_loss_op, true,
                    "The Program need to be pruned its backward part"
                    "should have loss operator.");

  // Step 2. Prune backward
  proto::ProgramDesc pruned_desc;
  pruned_desc.clear_blocks();
  PruneBackwardImpl(origin_clone.Proto(), &pruned_desc, 0, -1);

  // Step 3. Contruct new framework::ProgramDesc
  return std::unique_ptr<framework::ProgramDesc>(
      new framework::ProgramDesc(pruned_desc));
}

Y
Yang Yang 已提交
372 373
}  // namespace framework
}  // namespace paddle