global_utils.h 5.0 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17
// Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//

#pragma once

18 19
#include <atomic>
#include <memory>
20

21
#include "paddle/fluid/eager/hooks.h"
22
#include "paddle/fluid/eager/type_defs.h"
J
Jiabin Yang 已提交
23
#include "paddle/fluid/imperative/tracer.h"
24
#include "paddle/phi/api/ext/op_meta_info.h"
25
#include "paddle/utils/small_vector.h"
26 27 28 29 30 31 32 33 34 35 36 37 38 39
namespace egr {
class UniqueNameGenerator {
 public:
  explicit UniqueNameGenerator(std::string prefix = "") : prefix_(prefix) {}
  std::string Generate(std::string key = "eager_tmp") {
    return prefix_ + key + "_" + std::to_string(id_++);
  }

 private:
  std::atomic<int> id_{0};
  std::string prefix_;
};

// Global
J
Jiabin Yang 已提交
40 41 42
// TODO(jiabin): Now we are using imperative tracer, move it here when we
// deprecate imperative.

43 44
class GradNodeBase;

45 46 47
class Controller {
 public:
  static Controller& Instance() { return *controller_; }
J
Jiabin Yang 已提交
48 49
  paddle::platform::Place GetExpectedPlace() const {
    return tracer_->ExpectedPlace();
50 51
  }
  void SetExpectedPlace(const paddle::platform::Place& place) {
J
Jiabin Yang 已提交
52 53 54 55
    tracer_->SetExpectedPlace(place);
  }
  void SetAMPLevel(paddle::imperative::AmpLevel level) {
    tracer_->SetAmpLevel(level);
56
  }
J
Jiabin Yang 已提交
57 58 59
  paddle::imperative::AmpLevel GetAMPLevel() const {
    return tracer_->GetAmpLevel();
  }
60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76

  bool UseLayoutAutoTune() {
    bool use_autotune = false;
#if defined(PADDLE_WITH_CUDA)
    auto place = tracer_->ExpectedPlace();
    bool is_gpu_place = paddle::platform::is_gpu_place(place);
    if (is_gpu_place) {
      use_autotune = tracer_->UseLayoutAutoTune();
    }
#endif
    return use_autotune;
  }

  void DisableLayoutAutoTune() { tracer_->DisableLayoutAutoTune(); }

  void EnableLayoutAutoTune() { tracer_->EnableLayoutAutoTune(); }

77 78 79 80 81 82
  void SetPythonStack(std::string stack_str) {
    tracer_->SetPythonStack(stack_str);
  }

  std::string GetPythonStack() { return tracer_->GetPythonStack(); }

J
Jiabin Yang 已提交
83 84
  bool HasGrad() const { return tracer_->HasGrad(); }
  void SetHasGrad(bool has_grad) { tracer_->SetHasGrad(has_grad); }
85
  std::string GenerateUniqueName(std::string key = "eager_in_tmp") {
J
Jiabin Yang 已提交
86 87 88 89
    return tracer_->GenerateUniqueName(key);
  }
  const std::shared_ptr<paddle::imperative::Tracer>& GetCurrentTracer() {
    return tracer_;
90
  }
J
Jiabin Yang 已提交
91 92 93
  void SetCurrentTracer(
      const std::shared_ptr<paddle::imperative::Tracer>& tracer) {
    tracer_ = tracer;
94
    VLOG(6) << "Set current tracer for Controller: " << tracer_;
J
Jiabin Yang 已提交
95 96
  }

97 98 99 100 101
  const std::unordered_map<std::string, std::vector<paddle::OpMetaInfo>>&
  GetOpMetaInfoMap() {
    return op_meta_info_map_;
  }

102 103 104
  void MergeOpMetaInfoMap(
      const std::unordered_map<std::string, std::vector<paddle::OpMetaInfo>>&
          map) {
105 106 107
    op_meta_info_map_.insert(map.begin(), map.end());
  }

108 109
  std::unordered_map<std::string,
                     std::vector<std::vector<std::unordered_map<int, int>>>>&
110 111 112
  GetCustomEdgesSlotMap() {
    return custom_edges_slot_map_;
  }
113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128
  // For Cpp Hook
  void RegisterBackwardFinalHook(const std::function<void()>& call_back) {
    VLOG(6) << "RegisterBackwardFinalHook";
    final_backward_hooks_.emplace_back(
        std::make_shared<CppVoidHook>(std::move(call_back)));
    VLOG(6) << "Size: " << final_backward_hooks_.size();
  }
  // For Python hook
  void RegisterBackwardFinalHook(const std::shared_ptr<VoidHook>& call_back) {
    final_backward_hooks_.emplace_back(call_back);
  }
  const std::vector<std::shared_ptr<VoidHook>>& FinalBackwardHooks() const {
    return final_backward_hooks_;
  }

  void ClearFinalBackwardHooks() { final_backward_hooks_.clear(); }
129

130 131 132 133 134 135 136 137 138 139 140 141
  void ClearForceSequentialNodes() {
    while (!force_sequential_nodes_.empty()) {
      force_sequential_nodes_.pop();
    }
  }
  void PushBackForceSequentialNodes(GradNodeBase* node) {
    force_sequential_nodes_.push(node);
  }
  std::queue<GradNodeBase*> GetForceSequentialNodes() {
    return force_sequential_nodes_;
  }

142 143 144
 private:
  Controller() = default;
  static Controller* controller_;
J
Jiabin Yang 已提交
145 146
  std::shared_ptr<paddle::imperative::Tracer> tracer_{
      new paddle::imperative::Tracer()};
147 148
  std::unordered_map<std::string, std::vector<paddle::OpMetaInfo>>
      op_meta_info_map_;
149 150 151 152
  /* op_type : {{{grad_outputs}, {grad_inputs}, {input}, {output}, {attrs}},
   * {{grad_outputs}, {grad_inputs}, {input}, {output}, {attrs}}}*/
  std::unordered_map<std::string,
                     std::vector<std::vector<std::unordered_map<int, int>>>>
153
      custom_edges_slot_map_;
154
  std::vector<std::shared_ptr<VoidHook>> final_backward_hooks_;
155
  std::queue<GradNodeBase*> force_sequential_nodes_;
156 157 158 159
  DISABLE_COPY_AND_ASSIGN(Controller);
};

}  // namespace egr