executor.h 4.7 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

L
Liu Yiqun 已提交
17 18 19
#include <map>
#include <string>
#include <vector>
S
sneaxiy 已提交
20
#include "paddle/fluid/framework/garbage_collector.h"
Y
Yi Wang 已提交
21 22 23 24 25
#include "paddle/fluid/framework/op_info.h"
#include "paddle/fluid/framework/program_desc.h"
#include "paddle/fluid/framework/scope.h"
#include "paddle/fluid/framework/tensor.h"
#include "paddle/fluid/platform/device_context.h"
Q
qijun 已提交
26 27 28

namespace paddle {
namespace framework {
29
extern void InitializeVariable(Variable* var, proto::VarType::Type var_type);
Q
Qiao Longfei 已提交
30

S
sneaxiy 已提交
31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72
int64_t GetEagerDeletionThreshold();

template <typename T>
std::unordered_map<std::string, T> GetNonPersistableReferenceCount(
    const ProgramDesc& prog, size_t block_id) {
  auto& block = prog.Block(block_id);
  std::unordered_set<std::string> ignored_vars;
  std::unordered_map<std::string, T> ref_cnts;

  for (auto var_desc : block.AllVars()) {
    auto type = var_desc->Proto()->type().type();
    if (type != proto::VarType::LOD_TENSOR || var_desc->Persistable()) {
      ignored_vars.insert(var_desc->Name());  // ignore persistable vars
    }
  }

  for (auto op_desc : block.AllOps()) {
    for (auto& input : op_desc->Inputs()) {
      for (auto& input_name : input.second) {
        if (!ignored_vars.count(input_name)) {
          if (ref_cnts.count(input_name))
            ++ref_cnts[input_name];
          else
            ref_cnts[input_name] = 1;
        }
      }
    }

    for (auto& output : op_desc->Outputs()) {
      for (auto output_name : output.second) {
        if (!ignored_vars.count(output_name)) {
          if (ref_cnts.count(output_name))
            ++ref_cnts[output_name];
          else
            ref_cnts[output_name] = 1;
        }
      }
    }
  }
  return ref_cnts;
}

Q
Qiao Longfei 已提交
73 74 75 76 77 78 79
struct ExecutorPrepareContext {
  ExecutorPrepareContext(const framework::ProgramDesc& prog, size_t block_id);
  ~ExecutorPrepareContext();

  const framework::ProgramDesc& prog_;
  size_t block_id_;
  std::vector<std::unique_ptr<OperatorBase>> ops_;
S
sneaxiy 已提交
80 81

  std::unordered_map<std::string, int> ref_cnts_;
Q
Qiao Longfei 已提交
82 83
};

Q
qijun 已提交
84 85
class Executor {
 public:
D
dzhwinter 已提交
86 87
  // TODO(dzhwinter) : Do not rely on this function, it will be removed
  explicit Executor(const platform::DeviceContext& device)
D
dzhwinter 已提交
88
      : Executor(device.GetPlace()) {}
D
dzhwinter 已提交
89

D
dzhwinter 已提交
90
  explicit Executor(const platform::Place& place);
Y
Yang Yang 已提交
91

W
Wu Yi 已提交
92 93
#ifdef PADDLE_WITH_DISTRIBUTE
  /*
Y
Yancey1989 已提交
94
   * Sending signal to pserver to mark current pass started.
W
Wu Yi 已提交
95
   */
Y
Yancey1989 已提交
96 97 98 99 100 101
  void BeginPass();

  /*
   * Sending signal to pserver to mark current pass finished.
   */
  void EndPass();
W
Wu Yi 已提交
102 103
#endif

Y
Yang Yang 已提交
104 105 106 107 108 109 110
  /* @Brief
   * Runtime evaluation of the given ProgramDesc under certain Scope
   *
   * @param
   *  ProgramDesc
   *  Scope
   */
Y
Yu Yang 已提交
111 112
  void Run(const ProgramDesc& prog, Scope* scope, int block_id,
           bool create_local_scope = true, bool create_vars = true);
Q
qijun 已提交
113

114
  void Run(const ProgramDesc& program, Scope* scope,
115 116
           std::map<std::string, const LoDTensor*>* feed_targets,
           std::map<std::string, LoDTensor*>* fetch_targets,
W
Wu Yi 已提交
117
           bool create_local_scope = true, bool create_vars = true,
118
           const std::string& feed_holder_name = "feed",
119
           const std::string& fetch_holder_name = "fetch");
120

Q
Qiao Longfei 已提交
121 122
  static std::unique_ptr<ExecutorPrepareContext> Prepare(
      const ProgramDesc& program, int block_id);
Y
Yu Yang 已提交
123

T
typhoonzero 已提交
124 125 126
  static std::vector<std::shared_ptr<ExecutorPrepareContext>> Prepare(
      const ProgramDesc& program, const std::vector<int>& block_ids);

L
Liu Yiqun 已提交
127
  void CreateVariables(const ProgramDesc& pdesc, Scope* scope, int block_id);
128

Y
Yu Yang 已提交
129 130
  void RunPreparedContext(ExecutorPrepareContext* ctx, Scope* scope,
                          bool create_local_scope = true,
Q
qiaolongfei 已提交
131
                          bool create_vars = true, bool keep_kids = false);
Y
Yu Yang 已提交
132

133
  void RunPreparedContext(ExecutorPrepareContext* ctx, Scope* scope,
134 135
                          std::map<std::string, const LoDTensor*>* feed_targets,
                          std::map<std::string, LoDTensor*>* fetch_targets,
W
Wu Yi 已提交
136
                          bool create_local_scope = true,
L
Liu Yiqun 已提交
137
                          bool create_vars = true,
138
                          const std::string& feed_holder_name = "feed",
L
Liu Yiqun 已提交
139
                          const std::string& fetch_holder_name = "fetch");
140

141 142
  void EnableMKLDNN(const ProgramDesc& program);

Q
qijun 已提交
143
 private:
D
dzhwinter 已提交
144
  const platform::Place place_;
Q
qijun 已提交
145
};
Q
qijun 已提交
146 147 148

}  // namespace framework
}  // namespace paddle