prune.cc 4.8 KB
Newer Older
Y
Yang Yang 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include "paddle/framework/prune.h"

#include <algorithm>
#include <set>
#include <string>
K
Kexin Zhao 已提交
20
#include <unordered_map>
Y
Yang Yang 已提交
21 22 23
#include <vector>

#include <glog/logging.h>
24
#include "paddle/framework/feed_fetch_type.h"
Y
Yang Yang 已提交
25 26 27 28

namespace paddle {
namespace framework {

29 30
const std::string kDropOutOpType = "dropout";
const std::string kBatchNormOpType = "batch_norm";
Y
Yang Yang 已提交
31

32
bool HasDependentVar(const proto::OpDesc& op_desc,
Y
Yang Yang 已提交
33 34 35 36 37 38 39 40 41 42 43
                     const std::set<std::string>& dependent_vars) {
  for (auto& var : op_desc.outputs()) {
    for (auto& argu : var.arguments()) {
      if (dependent_vars.count(argu) != 0) {
        return true;
      }
    }
  }
  return false;
}

44
bool IsTarget(const proto::OpDesc& op_desc) {
Y
Yang Yang 已提交
45 46 47 48 49 50
  if (op_desc.has_is_target()) {
    return op_desc.is_target();
  }
  return false;
}

51 52
void prune_impl(const proto::ProgramDesc& input, proto::ProgramDesc* output,
                int block_id) {
Y
Yang Yang 已提交
53 54 55
  // TODO(tonyyang-svail):
  //    - will change to use multiple blocks for RNN op and Cond Op

Y
Yang Yang 已提交
56
  auto& block = input.blocks(block_id);
Y
Yang Yang 已提交
57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78
  auto& ops = block.ops();

  bool expect_feed = true;
  for (auto& op_desc : ops) {
    PADDLE_ENFORCE(op_desc.type() != kFeedOpType || expect_feed,
                   "All FeedOps are at the beginning of the ProgramDesc");
    expect_feed = (op_desc.type() == kFeedOpType);
  }

  bool expect_fetch = true;
  for (auto op_iter = ops.rbegin(); op_iter != ops.rend(); ++op_iter) {
    auto& op_desc = *op_iter;
    PADDLE_ENFORCE(op_desc.type() != kFetchOpType || expect_fetch,
                   "All FetchOps must at the end of the ProgramDesc");
    expect_fetch = (op_desc.type() == kFetchOpType);
  }

  std::set<std::string> dependent_vars;
  std::vector<bool> should_run;
  for (auto op_iter = ops.rbegin(); op_iter != ops.rend(); ++op_iter) {
    auto& op_desc = *op_iter;

Y
Yang Yang 已提交
79
    if (IsTarget(op_desc) || HasDependentVar(op_desc, dependent_vars)) {
Y
Yang Yang 已提交
80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96
      // insert its input to the dependency graph
      for (auto& var : op_desc.inputs()) {
        for (auto& argu : var.arguments()) {
          dependent_vars.insert(argu);
        }
      }

      should_run.push_back(true);
    } else {
      should_run.push_back(false);
    }
  }

  // since we are traversing the ProgramDesc in reverse order
  // we reverse the should_run vector
  std::reverse(should_run.begin(), should_run.end());

97 98
  *output = input;
  auto* op_field = output->mutable_blocks(block_id)->mutable_ops();
Y
Yang Yang 已提交
99 100 101
  op_field->Clear();
  for (size_t i = 0; i < should_run.size(); ++i) {
    if (should_run[i]) {
Y
Yang Yang 已提交
102
      *op_field->Add() = input.blocks(block_id).ops(i);
Y
Yang Yang 已提交
103 104
    }
  }
K
Kexin Zhao 已提交
105

K
Kexin Zhao 已提交
106 107 108
  // remove the VarDescs in BlockDesc that are not referenced in
  // the pruned OpDescs
  std::unordered_map<std::string, proto::VarDesc> var_map;
K
Kexin Zhao 已提交
109
  auto* var_field = output->mutable_blocks(block_id)->mutable_vars();
K
Kexin Zhao 已提交
110 111
  for (const auto& var : *var_field) {
    var_map[var.name()] = var;
K
Kexin Zhao 已提交
112 113
  }

K
Kexin Zhao 已提交
114 115 116 117
  var_field->Clear();
  for (const auto& op : *op_field) {
    // add VarDescs of all input arguments for each OpDesc
    auto& input_field = op.inputs();
K
Kexin Zhao 已提交
118 119
    for (auto& input_var : input_field) {
      for (auto& arg : input_var.arguments()) {
K
Kexin Zhao 已提交
120 121 122 123 124
        *var_field->Add() = var_map[arg];
      }
    }
    // add VarDescs of all output arguments for each OpDesc
    auto& output_field = op.outputs();
K
Kexin Zhao 已提交
125 126
    for (auto& output_var : output_field) {
      for (auto& arg : output_var.arguments()) {
K
Kexin Zhao 已提交
127 128 129 130
        *var_field->Add() = var_map[arg];
      }
    }
  }
Y
Yang Yang 已提交
131
}
Y
Yang Yang 已提交
132

133
// TODO(fengjiayi): Prune() could be inplaced to avoid unnecessary copies
134
void Prune(const proto::ProgramDesc& input, proto::ProgramDesc* output) {
Y
Yang Yang 已提交
135
  prune_impl(input, output, 0);
Y
Yang Yang 已提交
136 137
}

138 139
void inference_optimize_impl(const proto::ProgramDesc& input,
                             proto::ProgramDesc* output, int block_id) {
140 141 142 143 144 145 146 147 148 149 150 151 152 153 154
  *output = input;
  auto* op_field = output->mutable_blocks(block_id)->mutable_ops();
  for (auto& op_desc : *op_field) {
    if (op_desc.type() == kDropOutOpType ||
        op_desc.type() == kBatchNormOpType) {
      for (auto& attr : *op_desc.mutable_attrs()) {
        if (attr.name() == "is_test") {
          attr.set_b(true);
          break;
        }
      }
    }
  }
}

155 156
void InferenceOptimize(const proto::ProgramDesc& input,
                       proto::ProgramDesc* output) {
157 158 159
  inference_optimize_impl(input, output, 0);
}

Y
Yang Yang 已提交
160 161
}  // namespace framework
}  // namespace paddle