op_base.h 9.6 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17
// Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#pragma once

#include <atomic>
18
#include <map>
19 20 21 22 23
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "paddle/fluid/framework/type_defs.h"
24
#include "paddle/fluid/imperative/saved_variable_wrapper_list.h"
25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46
#include "paddle/fluid/imperative/type_defs.h"
#include "paddle/fluid/imperative/variable_wrapper.h"
#include "paddle/fluid/platform/place.h"

namespace paddle {
namespace imperative {

// TODO(zjl): to support py_func layer
class OpBase {
 public:
  OpBase() = default;

  OpBase(const OpBase&) = delete;

  OpBase(OpBase&&) = default;

  OpBase& operator=(const OpBase&) = delete;

  OpBase& operator=(OpBase&&) = default;

  ~OpBase() { VLOG(3) << "Destruct Op: " << Type(); }

47 48 49
  const std::string& Type() const {
    return op_ ? op_->Type() : UnknownOpType();
  }
50 51 52

  const framework::AttributeMap& Attrs() const { return attrs_; }

53 54 55 56
  const framework::AttributeMap& DefaultAttrsMap() const {
    return *default_attrs_;
  }

57 58 59 60 61 62
  const framework::OpInfo& Info() const {
    PADDLE_ENFORCE_NOT_NULL(op_, platform::errors::PreconditionNotMet(
                                     "OpBase::Info() should be called after "
                                     "OpBase::SetType() is called"));
    return op_->Info();
  }
63

64 65 66 67 68 69
  const framework::OperatorBase& InnerOp() const {
    PADDLE_ENFORCE_NOT_NULL(op_, platform::errors::PreconditionNotMet(
                                     "OpBase::InnerOp() should be called after "
                                     "OpBase::SetType() is called"));
    return *op_;
  }
70 71 72 73 74 75 76 77 78 79 80 81 82 83

  void ClearBackwardTrace();

  NameVarMap<VariableWrapper>* GetMutableOutsMap() { return &outs_; }

  NameVarMap<VariableWrapper>* GetMutableInsMap() { return &ins_; }

  const NameVarMap<VariableWrapper>& GetInsMap() const { return ins_; }

  const NameVarMap<VariableWrapper>& GetOutsMap() const { return outs_; }

  void SetType(const std::string& type);

  void CheckAttrs() {
84
    auto& info = Info();
85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105
    if (info.Checker() != nullptr) {
      info.Checker()->Check(&attrs_, true);
    }
  }

  void SetInput(const std::string& name, VariableWrapperList vars,
                bool is_grad) {
    auto& in_vars = ins_[name];
    *(in_vars.MutableVarList()) = std::move(vars);
    in_vars.SetIsGrad(is_grad);
  }

  void SetOutput(const std::string& name, VariableWrapperList vars,
                 bool is_grad) {
    auto& out_vars = outs_[name];
    *(out_vars.MutableVarList()) = std::move(vars);
    out_vars.SetIsGrad(is_grad);
  }

  void SetAttrMap(const framework::AttributeMap& attrs) { attrs_ = attrs; }

106 107 108 109
  void SetDefaultAttrsMap(const framework::AttributeMap& default_attrs) {
    default_attrs_ = &default_attrs;
  }

110 111 112 113 114 115 116 117 118 119 120
  void SetAttr(const std::string& name, const framework::Attribute& v) {
    attrs_[name] = v;
  }

  void SetBlockAttr(const std::string& name, framework::BlockDesc* block) {
    PADDLE_THROW(platform::errors::PermissionDenied(
        "SetBlockAttr is not support in dygraph OpBase"));
  }

  const framework::AttributeMap& Attrs() { return attrs_; }

121 122 123
  const framework::AttributeMap& DefaultAttrsMap() { return *default_attrs_; }

  bool HasAttr(const std::string& name) const {
J
Jiabin Yang 已提交
124 125
    VLOG(6) << "Default attrs: " << default_attrs_;
    VLOG(6) << "attrs: " << &attrs_;
126 127
    return attrs_.count(name) > 0 || default_attrs_->count(name) > 0;
  }
128 129 130

  const framework::Attribute& GetAttr(const std::string& name) const {
    auto it = attrs_.find(name);
131 132 133 134 135 136 137 138 139
    if (it != attrs_.end()) {
      return it->second;
    } else {
      auto it_default = default_attrs_->find(name);
      PADDLE_ENFORCE_NE(
          it_default, default_attrs_->end(),
          platform::errors::NotFound("can not find attribute [%s]", name));
      return it_default->second;
    }
140 141 142 143
  }

  template <typename T>
  inline const T& Attr(const std::string& name) const {
144
    return BOOST_GET_CONST(T, GetAttr(name));
145 146 147 148 149 150 151 152 153 154
  }

  size_t id() const { return id_; }

  void SetId(size_t id) { id_ = id; }

  const platform::Place& place() const { return place_; }

  void SetPlace(const platform::Place& place) { place_ = place; }

Z
Zeng Jinle 已提交
155 156 157 158 159 160 161 162 163 164 165 166 167 168
  void EnforceHasInOut() const {
    PADDLE_ENFORCE_NE(
        ins_.empty() && outs_.empty(), true,
        platform::errors::NotFound(
            "Inputs and outputs of %s do not exist. This may be because:\n"
            "1. You use some output variables of the previous batch as the "
            "inputs of the current batch. Please try to call \"stop_gradient "
            "= True\" or \"detach()\" for these variables.\n"
            "2. You calculate backward twice for the same subgraph without "
            "setting retain_graph=True. Please set retain_graph=True in the "
            "first backward call.\n\n",
            Type()));
  }

169 170 171 172 173 174 175 176 177
  static size_t GenerateUniqueId() {
    static std::atomic<size_t> unique_id{0};
    return unique_id.fetch_add(1);
  }

  static void Run(const framework::OperatorBase& op,
                  const NameVarMap<VarBase>& ins,
                  const NameVarMap<VarBase>& outs,
                  const framework::AttributeMap& attrs,
178
                  const framework::AttributeMap& default_attrs,
179 180 181 182 183 184
                  const platform::Place& place);

  static void Run(const framework::OperatorBase& op,
                  const NameVarMap<VariableWrapper>& ins,
                  const NameVarMap<VariableWrapper>& outs,
                  const framework::AttributeMap& attrs,
185
                  const framework::AttributeMap& default_attrs,
186
                  const platform::Place& place);
J
Jiabin Yang 已提交
187 188 189 190 191 192
  static void Run(const framework::OperatorBase& op,
                  const NameVarMap<egr::EagerTensor>& ins,
                  const NameVarMap<egr::EagerTensor>& outs,
                  const framework::AttributeMap& attrs,
                  const framework::AttributeMap& default_attrs,
                  const platform::Place& place);
193

194 195 196 197 198 199 200 201 202 203 204 205 206
  bool HasVoidFunctionPostHook() const {
    return !void_function_post_hooks_.empty();
  }

  void AddVoidFunctionPostHook(std::shared_ptr<std::function<void()>>&& hook) {
    void_function_post_hooks_.emplace_back(std::move(hook));
  }

  const std::vector<std::shared_ptr<std::function<void()>>>&
  GetVoidFunctionPostHooks() const {
    return void_function_post_hooks_;
  }

207 208 209 210 211 212
 private:
  static const std::string& UnknownOpType() {
    static std::string kUnknownOpType{"unknown"};
    return kUnknownOpType;
  }

213 214 215 216
 private:
  NameVarMap<VariableWrapper> ins_;
  NameVarMap<VariableWrapper> outs_;
  framework::AttributeMap attrs_;
217
  const framework::AttributeMap* default_attrs_;
218 219 220
  std::unique_ptr<framework::OperatorBase> op_;
  platform::Place place_;
  size_t id_{-1UL};
221 222 223
  // In order to reduce the compatibility phase
  // performance overhead, temporarily cache KernelContext
  static pten::KernelContext pt_kernel_context_;
224
  std::vector<std::shared_ptr<std::function<void()>>> void_function_post_hooks_;
225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274
};

class GradOpNode {
 public:
  GradOpNode() = default;

  void reserve(size_t size) { ops_.reserve(size); }

  size_t size() const { return ops_.size(); }

  bool empty() const { return ops_.empty(); }

  void clear() { ops_.clear(); }

  void pop_back() { ops_.pop_back(); }

  template <typename... ARGS>
  OpBase& emplace_back(ARGS&&... args) {  // NOLINT
    ops_.emplace_back(std::forward<ARGS>(args)...);
    return ops_.back();
  }

  const OpBase& back() const { return ops_.back(); }

  OpBase& back() { return ops_.back(); }

  OpBase& operator[](size_t idx) { return ops_[idx]; }

  const OpBase& operator[](size_t idx) const { return ops_[idx]; }

  /* Iterator related */
  using Iterator = std::vector<OpBase>::iterator;
  using ConstIterator = std::vector<OpBase>::const_iterator;

  Iterator begin() { return ops_.begin(); }

  Iterator end() { return ops_.end(); }

  ConstIterator begin() const { return ops_.begin(); }

  ConstIterator end() const { return ops_.end(); }

  void InsertGradPendingNode(const std::shared_ptr<GradOpNode>& node) {
    if (node &&
        std::find(grad_pending_nodes_.begin(), grad_pending_nodes_.end(),
                  node) == grad_pending_nodes_.end()) {
      grad_pending_nodes_.emplace_back(node);
    }
  }

275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290
  void SetInplaceGradNameMap(
      const std::map<std::string, std::string>& inplace_input_map) {
    for (auto& pair : inplace_input_map) {
      VLOG(10) << "Set mapping relationship ("
               << framework::GradVarName(pair.first) << ", "
               << framework::GradVarName(pair.second)
               << ") for Inplace grad node.";
      inplace_grad_name_map_[framework::GradVarName(pair.first)] =
          framework::GradVarName(pair.second);
    }
  }

  const std::map<std::string, std::string>& InplaceGradNameMap() const {
    return inplace_grad_name_map_;
  }

291 292 293 294 295 296 297 298 299 300
  const std::vector<std::shared_ptr<GradOpNode>>& GradPendingNodes() const {
    return grad_pending_nodes_;
  }

 private:
  DISABLE_COPY_AND_ASSIGN(GradOpNode);

 private:
  std::vector<OpBase> ops_;
  std::vector<std::shared_ptr<GradOpNode>> grad_pending_nodes_;
301 302 303
  // Mapping relationship between grad output and grad input of the grad node of
  // Inplace op.
  std::map<std::string, std::string> inplace_grad_name_map_;
304 305 306 307
};

}  // namespace imperative
}  // namespace paddle