op_info.h 5.5 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Y
Yu Yang 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Y
Yu Yang 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Y
Yu Yang 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Y
Yu Yang 已提交
14 15 16 17 18 19

#pragma once
#include <functional>
#include <map>
#include <string>
#include <unordered_map>
S
sneaxiy 已提交
20
#include <vector>
21

Y
Yi Wang 已提交
22
#include "paddle/fluid/framework/attribute.h"
S
sneaxiy 已提交
23
#include "paddle/fluid/framework/no_need_buffer_vars_inference.h"
Y
Yi Wang 已提交
24 25
#include "paddle/fluid/framework/type_defs.h"
#include "paddle/fluid/platform/macros.h"
Y
Yu Yang 已提交
26 27 28 29

namespace paddle {
namespace framework {

30 31 32 33 34 35
class InferShapeBase {
 public:
  virtual ~InferShapeBase() = default;
  virtual void operator()(InferShapeContext*) const = 0;
};

36 37
class OpInfo {
 public:
Y
Yu Yang 已提交
38
  OpCreator creator_;
Y
Yu Yang 已提交
39
  GradOpMakerFN grad_op_maker_;
40
  proto::OpProto* proto_{nullptr};
41
  OpAttrChecker* checker_{nullptr};
Y
Yu Yang 已提交
42
  InferVarTypeFN infer_var_type_;
43
  InferShapeFN infer_shape_;
D
dzhwinter 已提交
44
  InferInplaceOpFN infer_inplace_;
S
sneaxiy 已提交
45
  InferNoNeedBufferVarsFN infer_no_need_buffer_vars_;
H
hong 已提交
46
  DygraphGradOpMakerFN dygraph_grad_op_maker_;
Y
Yu Yang 已提交
47

S
sneaxiy 已提交
48 49 50 51
  // NOTE(zjl): this flag is added to check whether
  // the grad maker is the default one.
  bool use_default_grad_op_desc_maker_{false};

52 53 54 55
  // NOTE(huihuangzheng): this flag is added to check whether
  // the grad maker is the empty one.
  bool use_empty_grad_op_desc_maker_{false};

Y
Yu Yang 已提交
56 57 58 59
  bool HasOpProtoAndChecker() const {
    return proto_ != nullptr && checker_ != nullptr;
  }

60
  const proto::OpProto& Proto() const {
61 62 63
    PADDLE_ENFORCE_NOT_NULL(
        proto_,
        platform::errors::NotFound("Operator's Proto has not been registered"));
64 65 66
    PADDLE_ENFORCE_EQ(proto_->IsInitialized(), true,
                      platform::errors::InvalidArgument(
                          "Operator's Proto in op info is not initialized."));
Y
Yu Yang 已提交
67 68 69 70
    return *proto_;
  }

  const OpCreator& Creator() const {
71
    PADDLE_ENFORCE_NOT_NULL(creator_,
72 73
                            platform::errors::NotFound(
                                "Operator's Creator has not been registered."));
Y
Yu Yang 已提交
74 75
    return creator_;
  }
Y
Yu Yang 已提交
76 77

  const GradOpMakerFN& GradOpMaker() const {
78 79 80 81 82
    // Normally, proto_ should not be null, except some special operators, such
    // as LeaklyReluDoubleGrad op.
    std::string type = proto_ ? proto_->type() : "unknown";
    PADDLE_ENFORCE_NOT_NULL(
        grad_op_maker_,
83 84 85 86 87 88
        platform::errors::NotFound(
            "Operator %s's GradOpMaker has not been "
            "registered.\nPlease check whether (%s) operator has "
            "gradient operator.\nIf not, please set stop_gradient to be True "
            "for its input and output variables using var.stop_gradient=True.",
            type.c_str(), type.c_str()));
Y
Yu Yang 已提交
89 90
    return grad_op_maker_;
  }
F
fengjiayi 已提交
91

92
  // some ops don't have grad_op_maker, add check before use GradOpMaker()
93
  bool HasGradOpMaker() const { return grad_op_maker_ != nullptr; }
94

95 96 97 98
  bool HasNonEmptyGradOpMaker() const {
    return grad_op_maker_ != nullptr && !use_empty_grad_op_desc_maker_;
  }

H
hong 已提交
99 100 101 102 103 104
  const DygraphGradOpMakerFN& DygraphGradOpMaker() const {
    // Normally, proto_ should not be null, except some special operators, such
    // as LeaklyReluDoubleGrad op.
    std::string type = proto_ ? proto_->type() : "unknown";
    PADDLE_ENFORCE_NOT_NULL(
        dygraph_grad_op_maker_,
105 106 107 108 109 110
        platform::errors::NotFound(
            "Operator %s's DygraphGradOpMaker has not been "
            "registered.\nPlease check whether (%s) operator has "
            "gradient operator.\nIf not, please set stop_gradient to be True "
            "for its input and output variables using var.stop_gradient=True.",
            type.c_str(), type.c_str()));
H
hong 已提交
111 112 113 114
    return dygraph_grad_op_maker_;
  }

  bool HasDygraphGradOpMaker() const {
115
    return dygraph_grad_op_maker_ != nullptr;
H
hong 已提交
116 117
  }

118
  bool HasInferInplace() const { return infer_inplace_ != nullptr; }
119

F
fengjiayi 已提交
120
  const OpAttrChecker* Checker() const { return checker_; }
S
sneaxiy 已提交
121 122 123 124

  const InferNoNeedBufferVarsFN& NoNeedBufferVarsInferer() const {
    return infer_no_need_buffer_vars_;
  }
Y
Yu Yang 已提交
125 126
};

Y
Yu Yang 已提交
127 128 129 130 131 132 133 134
class OpInfoMap {
 public:
  static OpInfoMap& Instance();

  bool Has(const std::string& op_type) const {
    return map_.find(op_type) != map_.end();
  }

135
  void Insert(const std::string& type, const OpInfo& info) {
136 137 138
    PADDLE_ENFORCE_NE(Has(type), true,
                      platform::errors::AlreadyExists(
                          "Operator (%s) has been registered.", type));
Y
Yu Yang 已提交
139 140 141 142
    map_.insert({type, info});
  }

  const OpInfo& Get(const std::string& type) const {
143
    auto op_info_ptr = GetNullable(type);
144 145 146
    PADDLE_ENFORCE_NOT_NULL(
        op_info_ptr,
        platform::errors::NotFound("Operator (%s) is not registered.", type));
147 148 149 150
    return *op_info_ptr;
  }

  const OpInfo* GetNullable(const std::string& type) const {
Y
Yu Yang 已提交
151
    auto it = map_.find(type);
152 153 154 155 156
    if (it == map_.end()) {
      return nullptr;
    } else {
      return &it->second;
    }
Y
Yu Yang 已提交
157 158
  }

159 160 161
  const std::unordered_map<std::string, OpInfo>& map() const { return map_; }

  std::unordered_map<std::string, OpInfo>* mutable_map() { return &map_; }
Y
Yu Yang 已提交
162

S
sneaxiy 已提交
163 164
  std::vector<std::string> GetUseDefaultGradOpDescMakerOps() const;

Y
Yu Yang 已提交
165 166
 private:
  OpInfoMap() = default;
167
  std::unordered_map<std::string, OpInfo> map_;
D
format  
dongzhihong 已提交
168 169

  DISABLE_COPY_AND_ASSIGN(OpInfoMap);
Y
Yu Yang 已提交
170
};
Y
Yu Yang 已提交
171 172 173

}  // namespace framework
}  // namespace paddle