global_utils.h 4.8 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17
// Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//

#pragma once

18 19
#include <atomic>
#include <memory>
20

21
#include "paddle/fluid/eager/hooks.h"
22
#include "paddle/fluid/eager/type_defs.h"
J
Jiabin Yang 已提交
23
#include "paddle/fluid/imperative/tracer.h"
24
#include "paddle/phi/api/ext/op_meta_info.h"
25
#include "paddle/utils/small_vector.h"
26 27 28 29 30 31 32 33 34 35 36 37 38 39
namespace egr {
class UniqueNameGenerator {
 public:
  explicit UniqueNameGenerator(std::string prefix = "") : prefix_(prefix) {}
  std::string Generate(std::string key = "eager_tmp") {
    return prefix_ + key + "_" + std::to_string(id_++);
  }

 private:
  std::atomic<int> id_{0};
  std::string prefix_;
};

// Global
J
Jiabin Yang 已提交
40 41 42
// TODO(jiabin): Now we are using imperative tracer, move it here when we
// deprecate imperative.

43 44
class GradNodeBase;

45 46 47
class Controller {
 public:
  static Controller& Instance() { return *controller_; }
J
Jiabin Yang 已提交
48 49
  paddle::platform::Place GetExpectedPlace() const {
    return tracer_->ExpectedPlace();
50 51
  }
  void SetExpectedPlace(const paddle::platform::Place& place) {
J
Jiabin Yang 已提交
52 53 54 55
    tracer_->SetExpectedPlace(place);
  }
  void SetAMPLevel(paddle::imperative::AmpLevel level) {
    tracer_->SetAmpLevel(level);
56
  }
J
Jiabin Yang 已提交
57 58 59
  paddle::imperative::AmpLevel GetAMPLevel() const {
    return tracer_->GetAmpLevel();
  }
60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76

  bool UseLayoutAutoTune() {
    bool use_autotune = false;
#if defined(PADDLE_WITH_CUDA)
    auto place = tracer_->ExpectedPlace();
    bool is_gpu_place = paddle::platform::is_gpu_place(place);
    if (is_gpu_place) {
      use_autotune = tracer_->UseLayoutAutoTune();
    }
#endif
    return use_autotune;
  }

  void DisableLayoutAutoTune() { tracer_->DisableLayoutAutoTune(); }

  void EnableLayoutAutoTune() { tracer_->EnableLayoutAutoTune(); }

J
Jiabin Yang 已提交
77 78
  bool HasGrad() const { return tracer_->HasGrad(); }
  void SetHasGrad(bool has_grad) { tracer_->SetHasGrad(has_grad); }
79
  std::string GenerateUniqueName(std::string key = "eager_in_tmp") {
J
Jiabin Yang 已提交
80 81 82 83
    return tracer_->GenerateUniqueName(key);
  }
  const std::shared_ptr<paddle::imperative::Tracer>& GetCurrentTracer() {
    return tracer_;
84
  }
J
Jiabin Yang 已提交
85 86 87
  void SetCurrentTracer(
      const std::shared_ptr<paddle::imperative::Tracer>& tracer) {
    tracer_ = tracer;
88
    VLOG(6) << "Set current tracer for Controller: " << tracer_;
J
Jiabin Yang 已提交
89 90
  }

91 92 93 94 95
  const std::unordered_map<std::string, std::vector<paddle::OpMetaInfo>>&
  GetOpMetaInfoMap() {
    return op_meta_info_map_;
  }

96 97 98
  void MergeOpMetaInfoMap(
      const std::unordered_map<std::string, std::vector<paddle::OpMetaInfo>>&
          map) {
99 100 101
    op_meta_info_map_.insert(map.begin(), map.end());
  }

102 103
  std::unordered_map<std::string,
                     std::vector<std::vector<std::unordered_map<int, int>>>>&
104 105 106
  GetCustomEdgesSlotMap() {
    return custom_edges_slot_map_;
  }
107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122
  // For Cpp Hook
  void RegisterBackwardFinalHook(const std::function<void()>& call_back) {
    VLOG(6) << "RegisterBackwardFinalHook";
    final_backward_hooks_.emplace_back(
        std::make_shared<CppVoidHook>(std::move(call_back)));
    VLOG(6) << "Size: " << final_backward_hooks_.size();
  }
  // For Python hook
  void RegisterBackwardFinalHook(const std::shared_ptr<VoidHook>& call_back) {
    final_backward_hooks_.emplace_back(call_back);
  }
  const std::vector<std::shared_ptr<VoidHook>>& FinalBackwardHooks() const {
    return final_backward_hooks_;
  }

  void ClearFinalBackwardHooks() { final_backward_hooks_.clear(); }
123

124 125 126 127 128 129 130 131 132 133 134 135
  void ClearForceSequentialNodes() {
    while (!force_sequential_nodes_.empty()) {
      force_sequential_nodes_.pop();
    }
  }
  void PushBackForceSequentialNodes(GradNodeBase* node) {
    force_sequential_nodes_.push(node);
  }
  std::queue<GradNodeBase*> GetForceSequentialNodes() {
    return force_sequential_nodes_;
  }

136 137 138
 private:
  Controller() = default;
  static Controller* controller_;
J
Jiabin Yang 已提交
139 140
  std::shared_ptr<paddle::imperative::Tracer> tracer_{
      new paddle::imperative::Tracer()};
141 142
  std::unordered_map<std::string, std::vector<paddle::OpMetaInfo>>
      op_meta_info_map_;
143 144 145 146
  /* op_type : {{{grad_outputs}, {grad_inputs}, {input}, {output}, {attrs}},
   * {{grad_outputs}, {grad_inputs}, {input}, {output}, {attrs}}}*/
  std::unordered_map<std::string,
                     std::vector<std::vector<std::unordered_map<int, int>>>>
147
      custom_edges_slot_map_;
148
  std::vector<std::shared_ptr<VoidHook>> final_backward_hooks_;
149
  std::queue<GradNodeBase*> force_sequential_nodes_;
150 151 152 153
  DISABLE_COPY_AND_ASSIGN(Controller);
};

}  // namespace egr