memory_optimize.cpp 5.5 KB
Newer Older
H
hjchen2 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include "pass/memory_optimize.h"
16
#include <algorithm>
17
#include "framework/lod_tensor.h"
H
hjchen2 已提交
18 19 20 21

namespace paddle_mobile {
namespace pass {

22 23
void MemoryOptPass::AppendBlockVars(const framework::BlockDesc *block) {
  // block_vars_.clear();
H
hjchen2 已提交
24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50
  for (const auto var : block->Vars()) {
    block_vars_[var->Name()] = var.get();
  }
}

bool MemoryOptPass::IsPersistable(const std::string name) {
  const auto it = block_vars_.find(name);
  if (it != block_vars_.end()) {
    return it->second->Persistable();
  }
  return false;
}

VarNode *MemoryOptPass::CreateNode(const std::string name) {
  auto it = created_nodes_.find(name);
  if (it != created_nodes_.end()) {
    ++(it->second->count);
    return it->second;
  }
  VarNode *var = new VarNode;
  var->name = name;
  var->count = 1;
  var->visited = false;
  created_nodes_[name] = var;
  return var;
}

51 52 53
void MemoryOptPass::operator()(
    const framework::ProgramDesc *program, framework::Scope *scope,
    MemoryOptimizationLevel memory_optimization_level) {
H
hjchen2 已提交
54 55
  const auto &blocks = program->Blocks();
  for (const auto &block : blocks) {
56 57
    // access all variables in each block
    AppendBlockVars(block.get());
H
hjchen2 已提交
58 59 60 61 62 63 64

    reused_nodes_.clear();
    // collect all not persistable variables, and accumulate
    // it's reference count
    std::stack<VarNode *> empty_var_nodes;
    analysis_nodes_.swap(empty_var_nodes);

65 66 67 68 69 70 71 72 73 74 75 76 77 78 79
    std::vector<std::string> exclude_var_names;
    for (const auto &op : block->Ops()) {
      for (const auto &inputs : op->GetInputs()) {
        for (const auto &input : inputs.second) {
          if (!IsPersistable(input)) {
            if (memory_optimization_level == MemoryOptimizationWithoutFeeds) {
              if (op->Type() == "feed") {
                exclude_var_names.push_back(input);
              }
            }
          }
        }
      }
    }

80
    std::vector<VarNode *> fetch_var_nodes;
H
hjchen2 已提交
81 82
    for (const auto &op : block->Ops()) {
      DLOG << "op_desc->Type(): " << op->Type();
H
hjchen2 已提交
83
      for (const auto &outputs : op->GetOutputs()) {
H
hjchen2 已提交
84
        for (const auto &output : outputs.second) {
85 86 87
          if (!IsPersistable(output) &&
              std::find(exclude_var_names.begin(), exclude_var_names.end(),
                        output) == exclude_var_names.end()) {
H
hjchen2 已提交
88 89 90 91 92 93
            DLOG << "output: " << output;
            VarNode *node = CreateNode(output);
            analysis_nodes_.push(node);
          }
        }
      }
H
hjchen2 已提交
94
      for (const auto &inputs : op->GetInputs()) {
H
hjchen2 已提交
95
        for (const auto &input : inputs.second) {
96 97 98
          if (!IsPersistable(input) &&
              std::find(exclude_var_names.begin(), exclude_var_names.end(),
                        input) == exclude_var_names.end()) {
H
hjchen2 已提交
99 100 101
            DLOG << "input: " << input;
            VarNode *node = CreateNode(input);
            analysis_nodes_.push(node);
102 103 104
            if (op->Type() == "fetch") {
              fetch_var_nodes.push_back(node);
            }
H
hjchen2 已提交
105 106 107
          }
        }
      }
H
hjchen2 已提交
108 109
      for (const auto &outputs : op->GetOutputs()) {
        for (const auto &output : outputs.second) {
110 111 112
          if (!IsPersistable(output) &&
              std::find(exclude_var_names.begin(), exclude_var_names.end(),
                        output) == exclude_var_names.end()) {
H
hjchen2 已提交
113 114 115 116 117 118
            DLOG << "output: " << output;
            VarNode *node = CreateNode(output);
            analysis_nodes_.push(node);
          }
        }
      }
H
hjchen2 已提交
119 120 121 122 123 124 125 126 127 128 129 130
    }

    // apply optimize
    while (!analysis_nodes_.empty()) {
      auto *node = analysis_nodes_.top();
      analysis_nodes_.pop();
      // only not visited node can reuse memory between other nodes
      // with 0 count which indicate they will not be used any more
      if (!node->visited) {
        bool reused = false;
        // find out a possable reuse list
        for (auto &list : reused_nodes_) {
131 132 133
          if (list.back()->count == 0 &&
              std::find(fetch_var_nodes.begin(), fetch_var_nodes.end(),
                        list.back()) == fetch_var_nodes.end()) {
H
hjchen2 已提交
134 135 136 137 138 139 140 141 142 143 144 145 146 147 148
            list.push_back(node);
            reused = true;
            break;
          }
        }
        // create new list if can't find a reused list
        if (!reused) {
          std::vector<VarNode *> list;
          list.push_back(node);
          reused_nodes_.push_back(std::move(list));
        }
      }
      node->visited = true;
      node->count -= 1;
    }
149 150 151 152 153 154 155 156 157 158 159 160 161 162

    // shared data within all variables in the same reused list
    for (const auto &list : reused_nodes_) {
      DLOG << "\n";
      DLOG << "share memory within these variables";
      std::string name = list[0]->name;
      auto *reused_var = scope->Var(name);
      auto *reuse_tensor =
          reused_var->template GetMutable<framework::LoDTensor>();
      reuse_tensor->mutable_data<float>();
      for (const auto &node : list) {
        DLOG << node->name;
        auto *var = scope->Var(node->name);
        auto *tensor = var->template GetMutable<framework::LoDTensor>();
H
update  
hjchen2 已提交
163
        tensor->ShareHolderWith(*reuse_tensor);
164
      }
H
hjchen2 已提交
165 166 167 168 169 170
    }
  }
}

}  // namespace pass
}  // namespace paddle_mobile