executor.h 4.7 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Q
qijun 已提交
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

L
Liu Yiqun 已提交
17 18 19
#include <map>
#include <string>
#include <vector>
Y
Yi Wang 已提交
20 21 22 23 24
#include "paddle/fluid/framework/op_info.h"
#include "paddle/fluid/framework/program_desc.h"
#include "paddle/fluid/framework/scope.h"
#include "paddle/fluid/framework/tensor.h"
#include "paddle/fluid/platform/device_context.h"
D
dzhwinter 已提交
25 26 27
#ifndef _WIN32
#include "paddle/fluid/framework/garbage_collector.h"
#endif
Q
qijun 已提交
28 29 30

namespace paddle {
namespace framework {
31
extern void InitializeVariable(Variable* var, proto::VarType::Type var_type);
Q
Qiao Longfei 已提交
32

S
sneaxiy 已提交
33 34 35 36 37 38
template <typename T>
std::unordered_map<std::string, T> GetNonPersistableReferenceCount(
    const ProgramDesc& prog, size_t block_id) {
  auto& block = prog.Block(block_id);
  std::unordered_map<std::string, T> ref_cnts;

S
sneaxiy 已提交
39 40 41 42 43 44 45 46 47
  auto update_ref_cnts = [&](OpDesc* op_desc, const VariableNameMap& name_map) {
    for (auto& name_pair : name_map) {
      for (auto& name : name_pair.second) {
        auto* var_desc = block.FindVar(name);
        if (var_desc == nullptr || var_desc->Persistable()) continue;
        auto type = var_desc->Proto()->type().type();
        if (type != proto::VarType::LOD_TENSOR &&
            type != proto::VarType::SELECTED_ROWS) {
          continue;
S
sneaxiy 已提交
48 49
        }

S
sneaxiy 已提交
50 51 52 53 54
        auto it = ref_cnts.find(name);
        if (it != ref_cnts.end()) {
          ++it->second;
        } else {
          ref_cnts[name] = 1;
S
sneaxiy 已提交
55 56 57
        }
      }
    }
S
sneaxiy 已提交
58 59 60 61 62
  };

  for (auto op_desc : block.AllOps()) {
    update_ref_cnts(op_desc, op_desc->Inputs());
    update_ref_cnts(op_desc, op_desc->Outputs());
S
sneaxiy 已提交
63 64 65 66
  }
  return ref_cnts;
}

Q
Qiao Longfei 已提交
67 68 69 70
struct ExecutorPrepareContext {
  ExecutorPrepareContext(const framework::ProgramDesc& prog, size_t block_id);
  ~ExecutorPrepareContext();

S
sneaxiy 已提交
71 72
  void ResetReferenceCount() { cur_ref_cnts_ = ref_cnts_; }

Q
Qiao Longfei 已提交
73 74 75
  const framework::ProgramDesc& prog_;
  size_t block_id_;
  std::vector<std::unique_ptr<OperatorBase>> ops_;
S
sneaxiy 已提交
76 77

  std::unordered_map<std::string, int> ref_cnts_;
S
sneaxiy 已提交
78
  std::unordered_map<std::string, int> cur_ref_cnts_;
Q
Qiao Longfei 已提交
79 80
};

Q
qijun 已提交
81 82
class Executor {
 public:
D
dzhwinter 已提交
83 84
  // TODO(dzhwinter) : Do not rely on this function, it will be removed
  explicit Executor(const platform::DeviceContext& device)
D
dzhwinter 已提交
85
      : Executor(device.GetPlace()) {}
D
dzhwinter 已提交
86

D
dzhwinter 已提交
87
  explicit Executor(const platform::Place& place);
Y
Yang Yang 已提交
88

W
Wu Yi 已提交
89
  /*
Y
Yancey1989 已提交
90 91
   * Close this Executor.
   * Calling this method will send complete messages to all pserver instances.
W
Wu Yi 已提交
92
   */
Y
Yancey1989 已提交
93
  void Close();
W
Wu Yi 已提交
94

Y
Yang Yang 已提交
95 96 97 98 99 100 101
  /* @Brief
   * Runtime evaluation of the given ProgramDesc under certain Scope
   *
   * @param
   *  ProgramDesc
   *  Scope
   */
Y
Yu Yang 已提交
102 103
  void Run(const ProgramDesc& prog, Scope* scope, int block_id,
           bool create_local_scope = true, bool create_vars = true);
Q
qijun 已提交
104

X
fix  
Xin Pan 已提交
105
  // This API is very slow.
106
  void Run(const ProgramDesc& program, Scope* scope,
107 108
           std::map<std::string, const LoDTensor*>* feed_targets,
           std::map<std::string, LoDTensor*>* fetch_targets,
W
Wu Yi 已提交
109
           bool create_local_scope = true, bool create_vars = true,
110
           const std::string& feed_holder_name = "feed",
111
           const std::string& fetch_holder_name = "fetch");
112

Q
Qiao Longfei 已提交
113 114
  static std::unique_ptr<ExecutorPrepareContext> Prepare(
      const ProgramDesc& program, int block_id);
Y
Yu Yang 已提交
115

T
typhoonzero 已提交
116 117 118
  static std::vector<std::shared_ptr<ExecutorPrepareContext>> Prepare(
      const ProgramDesc& program, const std::vector<int>& block_ids);

L
Liu Yiqun 已提交
119
  void CreateVariables(const ProgramDesc& pdesc, Scope* scope, int block_id);
120

Y
Yu Yang 已提交
121 122
  void RunPreparedContext(ExecutorPrepareContext* ctx, Scope* scope,
                          bool create_local_scope = true,
Q
qiaolongfei 已提交
123
                          bool create_vars = true, bool keep_kids = false);
Y
Yu Yang 已提交
124

X
fix  
Xin Pan 已提交
125
  // This API is very slow.
126
  void RunPreparedContext(ExecutorPrepareContext* ctx, Scope* scope,
127 128
                          std::map<std::string, const LoDTensor*>* feed_targets,
                          std::map<std::string, LoDTensor*>* fetch_targets,
W
Wu Yi 已提交
129
                          bool create_local_scope = true,
L
Liu Yiqun 已提交
130
                          bool create_vars = true,
131
                          const std::string& feed_holder_name = "feed",
L
Liu Yiqun 已提交
132
                          const std::string& fetch_holder_name = "fetch");
133

134 135
  void EnableMKLDNN(const ProgramDesc& program);

Q
qijun 已提交
136
 private:
D
dzhwinter 已提交
137
  const platform::Place place_;
Q
qijun 已提交
138
};
Q
qijun 已提交
139 140 141

}  // namespace framework
}  // namespace paddle