multi_devices_helper.h 4.3 KB
Newer Older
Y
Yu Yang 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
//   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once

Y
Yu Yang 已提交
17 18
#include <memory>
#include <string>
C
chengduo 已提交
19 20 21
#include <unordered_map>
#include <unordered_set>
#include <utility>
X
Xin Pan 已提交
22
#include <vector>
X
clean  
Xin Pan 已提交
23 24 25
#include "paddle/fluid/framework/details/op_handle_base.h"
#include "paddle/fluid/framework/details/var_handle.h"

Z
Zeng Jinle 已提交
26 27
#include "paddle/fluid/framework/op_desc.h"
#include "paddle/fluid/framework/op_proto_maker.h"
Y
Yu Yang 已提交
28 29 30
#include "paddle/fluid/framework/program_desc.h"
#include "paddle/fluid/platform/place.h"

X
Xin Pan 已提交
31
#include "paddle/fluid/framework/ir/graph.h"
32
#include "paddle/fluid/framework/ir/pass.h"
X
Xin Pan 已提交
33

Y
Yu Yang 已提交
34 35 36 37
namespace paddle {
namespace framework {
namespace details {

X
clean  
Xin Pan 已提交
38 39 40 41
// all variable in each devices.
// The outside vector is the device vector. Each element of this vector is a
// map from variable name to variables. The variables, who have the same name,
// will have a differsent version. The offset in the
X
Xin Pan 已提交
42
// `std::vector<VarHandle*>` is the version of varaibles.
Y
Yancey1989 已提交
43
typedef std::vector<std::unordered_map<std::string, std::vector<VarHandle *>>>
X
Xin Pan 已提交
44
    GraphVars;
C
chengduo 已提交
45
constexpr char kGraphVars[] = "vars";
Y
Yancey1989 已提交
46

47 48
constexpr char kNRanks[] = "nranks";

C
chengduo 已提交
49
constexpr char kPlaces[] = "places";
50
constexpr char kGlobalScope[] = "global_scope";
C
chengduo 已提交
51
constexpr char kLocalScopes[] = "local_scopes";
C
chengduo 已提交
52
constexpr char kNCCLCtxs[] = "nccl_ctxs";
53
constexpr char kUseHierarchicalAllReduce[] = "use_hierarchical_allreduce";
C
chengduo 已提交
54 55 56 57

// aux variables to represent dependency. Useful to resolve data hazard.
typedef std::unordered_set<VarHandleBase *> GraphDepVars;
constexpr char kGraphDepVars[] = "dep_vars";
C
chengduo 已提交
58 59 60

typedef std::unordered_set<std::string> FusedVars;
constexpr char kFusedVars[] = "fused_vars";
C
chengduo 已提交
61 62 63 64 65
constexpr char kFusedVarNamePrefix[] = "@FUSEDVAR@";

typedef std::string FusedOptType;
constexpr char kFusedOptType[] = "fused_opt_type";

66
typedef std::vector<std::string> FusedGrads;
C
chengduo 已提交
67
constexpr char kFusedGrads[] = "fused_gradients";
C
chengduo 已提交
68 69

typedef std::vector<std::pair<std::string, std::string>> ParamsAndGrads;
70 71 72 73 74
constexpr char kParamsAndDenseGrads[] = "params_and_dense_grads";
constexpr char kParamsAndSparseGrads[] = "params_and_sparse_grads";

typedef std::vector<ProgramDesc> ProgramDescs;
constexpr char kProgramDescs[] = "program_descs";
C
chengduo 已提交
75

76 77 78
typedef std::unordered_set<std::string> PinnedVars;
constexpr char kPinnedVars[] = "pinned_vars";

C
chengduo 已提交
79
typedef std::vector<std::vector<std::pair<std::string, std::string>>>
80
    GroupParamsAndGrads;
81
constexpr char kGroupParamsAndDenseGrads[] = "group_params_dense_grads";
C
chengduo 已提交
82

Z
Zeng Jinle 已提交
83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103
inline bool IsOpRole(const OpDesc &op, OpRole role) {
  const auto &attrs = op.GetAttrMap();
  auto iter = attrs.find(OpProtoAndCheckerMaker::OpRoleAttrName());
  if (iter == attrs.end()) return false;
  return static_cast<bool>(boost::get<int>(iter->second) &
                           static_cast<int>(role));
}

inline std::vector<std::string> GetOpRoleVarsOrEmpty(const OpDesc &op) {
  const auto &attrs = op.GetAttrMap();
  auto iter = attrs.find(OpProtoAndCheckerMaker::OpRoleVarAttrName());
  if (iter == attrs.end()) return {};
  auto &ret = boost::get<std::vector<std::string>>(iter->second);
  PADDLE_ENFORCE_EQ(
      ret.size() % 2, 0,
      platform::errors::InvalidArgument(
          "The size of attribute %s must be an even number, but got %d",
          OpProtoAndCheckerMaker::OpRoleVarAttrName(), ret.size()));
  return boost::get<std::vector<std::string>>(iter->second);
}

104 105 106 107 108 109 110
std::vector<std::unique_ptr<ir::Graph>> TrySeparateToMultipleSingleDeviceGraphs(
    ir::Graph *graph);

bool HasDropLastReadOp(const ir::Graph &graph);

bool HasKeepLastReadOp(const ir::Graph &graph);

111 112 113 114 115 116 117 118 119
template <typename T>
void CopyGraphAttrIfExists(const ir::Graph &src, ir::Graph *dst,
                           const std::string &name) {
  if (src.Has(name)) {
    auto &attr = src.Get<T>(name);
    dst->Set(name, new T(attr));
  }
}

Y
Yu Yang 已提交
120 121 122
}  // namespace details
}  // namespace framework
}  // namespace paddle