op_info.h 5.7 KB
Newer Older
1
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Y
Yu Yang 已提交
2

L
Luo Tao 已提交
3 4 5
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Y
Yu Yang 已提交
6

L
Luo Tao 已提交
7
    http://www.apache.org/licenses/LICENSE-2.0
Y
Yu Yang 已提交
8

L
Luo Tao 已提交
9 10 11 12 13
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
Y
Yu Yang 已提交
14 15 16 17 18 19

#pragma once
#include <functional>
#include <map>
#include <string>
#include <unordered_map>
S
sneaxiy 已提交
20
#include <vector>
21

Y
Yi Wang 已提交
22
#include "paddle/fluid/framework/attribute.h"
S
sneaxiy 已提交
23
#include "paddle/fluid/framework/no_need_buffer_vars_inference.h"
Y
Yi Wang 已提交
24
#include "paddle/fluid/framework/type_defs.h"
W
wanghuancoder 已提交
25
#include "paddle/fluid/platform/enforce.h"
Y
Yi Wang 已提交
26
#include "paddle/fluid/platform/macros.h"
C
chentianyu03 已提交
27
#include "paddle/utils/flat_hash_map.h"
Y
Yu Yang 已提交
28 29 30 31

namespace paddle {
namespace framework {

W
wanghuancoder 已提交
32 33 34
class InferShapeContext;
class OpAttrChecker;

35 36 37 38 39 40
class InferShapeBase {
 public:
  virtual ~InferShapeBase() = default;
  virtual void operator()(InferShapeContext*) const = 0;
};

41 42
class OpInfo {
 public:
Y
Yu Yang 已提交
43
  OpCreator creator_;
Y
Yu Yang 已提交
44
  GradOpMakerFN grad_op_maker_;
45
  proto::OpProto* proto_{nullptr};
46
  OpAttrChecker* checker_{nullptr};
Y
Yu Yang 已提交
47
  InferVarTypeFN infer_var_type_;
48
  InferShapeFN infer_shape_;
D
dzhwinter 已提交
49
  InferInplaceOpFN infer_inplace_;
S
sneaxiy 已提交
50
  InferNoNeedBufferVarsFN infer_no_need_buffer_vars_;
H
hong 已提交
51
  DygraphGradOpMakerFN dygraph_grad_op_maker_;
Y
Yu Yang 已提交
52

S
sneaxiy 已提交
53 54 55 56
  // NOTE(zjl): this flag is added to check whether
  // the grad maker is the default one.
  bool use_default_grad_op_desc_maker_{false};

57 58 59 60
  // NOTE(huihuangzheng): this flag is added to check whether
  // the grad maker is the empty one.
  bool use_empty_grad_op_desc_maker_{false};

Y
Yu Yang 已提交
61 62 63 64
  bool HasOpProtoAndChecker() const {
    return proto_ != nullptr && checker_ != nullptr;
  }

65
  const proto::OpProto& Proto() const {
66 67 68
    PADDLE_ENFORCE_NOT_NULL(
        proto_,
        platform::errors::NotFound("Operator's Proto has not been registered"));
69 70 71
    PADDLE_ENFORCE_EQ(proto_->IsInitialized(), true,
                      platform::errors::InvalidArgument(
                          "Operator's Proto in op info is not initialized."));
Y
Yu Yang 已提交
72 73 74 75
    return *proto_;
  }

  const OpCreator& Creator() const {
76
    PADDLE_ENFORCE_NOT_NULL(creator_,
77 78
                            platform::errors::NotFound(
                                "Operator's Creator has not been registered."));
Y
Yu Yang 已提交
79 80
    return creator_;
  }
Y
Yu Yang 已提交
81 82

  const GradOpMakerFN& GradOpMaker() const {
83 84 85 86 87
    // Normally, proto_ should not be null, except some special operators, such
    // as LeaklyReluDoubleGrad op.
    std::string type = proto_ ? proto_->type() : "unknown";
    PADDLE_ENFORCE_NOT_NULL(
        grad_op_maker_,
88 89 90 91 92 93
        platform::errors::NotFound(
            "Operator %s's GradOpMaker has not been "
            "registered.\nPlease check whether (%s) operator has "
            "gradient operator.\nIf not, please set stop_gradient to be True "
            "for its input and output variables using var.stop_gradient=True.",
            type.c_str(), type.c_str()));
Y
Yu Yang 已提交
94 95
    return grad_op_maker_;
  }
F
fengjiayi 已提交
96

97
  // some ops don't have grad_op_maker, add check before use GradOpMaker()
98
  bool HasGradOpMaker() const { return grad_op_maker_ != nullptr; }
99

100 101 102 103
  bool HasNonEmptyGradOpMaker() const {
    return grad_op_maker_ != nullptr && !use_empty_grad_op_desc_maker_;
  }

H
hong 已提交
104 105 106 107 108 109
  const DygraphGradOpMakerFN& DygraphGradOpMaker() const {
    // Normally, proto_ should not be null, except some special operators, such
    // as LeaklyReluDoubleGrad op.
    std::string type = proto_ ? proto_->type() : "unknown";
    PADDLE_ENFORCE_NOT_NULL(
        dygraph_grad_op_maker_,
110 111 112 113 114 115
        platform::errors::NotFound(
            "Operator %s's DygraphGradOpMaker has not been "
            "registered.\nPlease check whether (%s) operator has "
            "gradient operator.\nIf not, please set stop_gradient to be True "
            "for its input and output variables using var.stop_gradient=True.",
            type.c_str(), type.c_str()));
H
hong 已提交
116 117 118 119
    return dygraph_grad_op_maker_;
  }

  bool HasDygraphGradOpMaker() const {
120
    return dygraph_grad_op_maker_ != nullptr;
H
hong 已提交
121 122
  }

123
  bool HasInferInplace() const { return infer_inplace_ != nullptr; }
124

F
fengjiayi 已提交
125
  const OpAttrChecker* Checker() const { return checker_; }
S
sneaxiy 已提交
126 127 128 129

  const InferNoNeedBufferVarsFN& NoNeedBufferVarsInferer() const {
    return infer_no_need_buffer_vars_;
  }
Y
Yu Yang 已提交
130 131
};

Y
Yu Yang 已提交
132 133 134 135 136 137 138 139
class OpInfoMap {
 public:
  static OpInfoMap& Instance();

  bool Has(const std::string& op_type) const {
    return map_.find(op_type) != map_.end();
  }

140
  void Insert(const std::string& type, const OpInfo& info) {
141 142 143
    PADDLE_ENFORCE_NE(Has(type), true,
                      platform::errors::AlreadyExists(
                          "Operator (%s) has been registered.", type));
Y
Yu Yang 已提交
144 145 146 147
    map_.insert({type, info});
  }

  const OpInfo& Get(const std::string& type) const {
148
    auto op_info_ptr = GetNullable(type);
149 150 151
    PADDLE_ENFORCE_NOT_NULL(
        op_info_ptr,
        platform::errors::NotFound("Operator (%s) is not registered.", type));
152 153 154 155
    return *op_info_ptr;
  }

  const OpInfo* GetNullable(const std::string& type) const {
Y
Yu Yang 已提交
156
    auto it = map_.find(type);
157 158 159 160 161
    if (it == map_.end()) {
      return nullptr;
    } else {
      return &it->second;
    }
Y
Yu Yang 已提交
162 163
  }

C
chentianyu03 已提交
164
  const paddle::flat_hash_map<std::string, OpInfo>& map() const { return map_; }
165

C
chentianyu03 已提交
166
  paddle::flat_hash_map<std::string, OpInfo>* mutable_map() { return &map_; }
Y
Yu Yang 已提交
167

S
sneaxiy 已提交
168 169
  std::vector<std::string> GetUseDefaultGradOpDescMakerOps() const;

Y
Yu Yang 已提交
170 171
 private:
  OpInfoMap() = default;
C
chentianyu03 已提交
172
  paddle::flat_hash_map<std::string, OpInfo> map_;
D
format  
dongzhihong 已提交
173 174

  DISABLE_COPY_AND_ASSIGN(OpInfoMap);
Y
Yu Yang 已提交
175
};
Y
Yu Yang 已提交
176 177 178

}  // namespace framework
}  // namespace paddle