op_registry.h 13.4 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

15 16
#pragma once

17
#include <algorithm>
18
#include <atomic>
Y
Yu Yang 已提交
19
#include <type_traits>
F
WIP  
fengjiayi 已提交
20
#include <typeinfo>
21 22
#include <unordered_map>
#include <unordered_set>
Y
Yi Wang 已提交
23
#include "paddle/framework/attribute.h"
Y
Yu Yang 已提交
24
#include "paddle/framework/framework.pb.h"
F
fengjiayi 已提交
25
#include "paddle/framework/grad_op_builder.h"
26
#include "paddle/framework/operator.h"
D
dongzhihong 已提交
27
#include "paddle/framework/scope.h"
28 29 30 31 32 33 34 35 36 37

namespace paddle {
namespace framework {

// this class not only make proto but also init attribute checkers.
class OpProtoAndCheckerMaker {
 public:
  OpProtoAndCheckerMaker(OpProto* proto, OpAttrChecker* op_checker)
      : proto_(proto), op_checker_(op_checker) {}

38 39 40 41 42 43 44 45
  ~OpProtoAndCheckerMaker() {
    PADDLE_ENFORCE(validated_, "should call Validate after build");
  }

  void Validate() {
    validated_ = true;
    CheckNoDuplicatedInOutAttrs();
  }
46

47
 protected:
Y
Yu Yang 已提交
48
  struct VariableBuilder {
Y
Yu Yang 已提交
49
    OpProto::Var* var_;
Y
Yu Yang 已提交
50

Y
Yu Yang 已提交
51
    VariableBuilder& AsDuplicable() {
Y
Yu Yang 已提交
52
      var_->set_duplicable(true);
Y
Yu Yang 已提交
53 54 55
      return *this;
    }

Y
Yu Yang 已提交
56
    VariableBuilder& AsIntermediate() {
Y
Yu Yang 已提交
57
      var_->set_intermediate(true);
Y
Yu Yang 已提交
58 59 60
      return *this;
    }

Y
Yu Yang 已提交
61 62 63 64
    // TODO(FengJiayi, yuyang18): `AsNoGradient` is a very bad name, because it
    // means that input/output is not needed when calculate gradient. It does
    // not mean no gradient when backward. It should be changed soon.
    VariableBuilder& AsNoGradient() {
Y
Yu Yang 已提交
65
      var_->set_no_gradient(true);
Y
Yu Yang 已提交
66 67 68 69 70 71
      return *this;
    }
  };

  VariableBuilder AddInput(const std::string& name,
                           const std::string& comment) {
72
    auto* input = proto_->add_inputs();
73 74
    input->set_name(name);
    input->set_comment(comment);
Y
Yu Yang 已提交
75
    return VariableBuilder{input};
76 77
  }

Y
Yu Yang 已提交
78 79
  VariableBuilder AddOutput(const std::string& name,
                            const std::string& comment) {
80
    auto* output = proto_->add_outputs();
81 82
    output->set_name(name);
    output->set_comment(comment);
Y
Yu Yang 已提交
83
    return VariableBuilder{output};
84 85 86 87
  }

  template <typename T>
  TypedAttrChecker<T>& AddAttr(const std::string& name,
88 89
                               const std::string& comment,
                               bool generated = false) {
90
    auto* attr = proto_->add_attrs();
91 92
    attr->set_name(name);
    attr->set_comment(comment);
93
    attr->set_generated(generated);
Y
Yi Wang 已提交
94
    attr->set_type(AttrTypeID<T>());
95 96 97
    return op_checker_->AddAttrChecker<T>(name);
  }

98
  void AddComment(const std::string& comment) { proto_->set_comment(comment); }
99

100
 private:
101
  void CheckNoDuplicatedInOutAttrs() {
102
    std::unordered_set<std::string> names;
103 104 105 106
    auto checker = [&](const std::string& name) {
      PADDLE_ENFORCE(!names.count(name), "[%s] is duplicated", name);
      names.insert(name);
    };
107
    for (auto& attr : proto_->attrs()) {
108 109 110 111 112 113 114
      checker(attr.name());
    }
    for (auto& input : proto_->inputs()) {
      checker(input.name());
    }
    for (auto& output : proto_->outputs()) {
      checker(output.name());
115 116 117
    }
  }

118 119
  OpProto* proto_;
  OpAttrChecker* op_checker_;
120
  bool validated_{false};
121 122
};

F
fengjiayi 已提交
123 124 125 126 127
class NOPMaker : public OpProtoAndCheckerMaker {
 public:
  NOPMaker(framework::OpProto* proto, framework::OpAttrChecker* op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {}
};
F
WIP  
fengjiayi 已提交
128

129
class OpRegistry {
Y
Yu Yang 已提交
130 131 132 133
  using VarNameMap = OperatorBase::VarNameMap;
  using OpCreator = std::function<OperatorBase*(
      const std::string& /*type*/, const VarNameMap& /*inputs*/,
      const VarNameMap& /*outputs*/, const AttributeMap& /*attrs*/)>;
134 135

 public:
F
fengjiayi 已提交
136 137 138 139 140 141
  struct OpInfo {
    OpCreator creator_;
    std::string grad_op_type_;
    OpProto* proto_;
    OpAttrChecker* checker_;
  };
142

143
  template <typename OpType, typename ProtoMakerType, typename GradOpType>
F
WIP  
fengjiayi 已提交
144 145 146 147 148
  static void RegisterOp(const std::string& op_type,
                         const std::string& grad_op_type) {
    PADDLE_ENFORCE(op_info_map().count(op_type) == 0,
                   "'%s' is registered more than once.", op_type);
    OpInfo op_info;
149 150 151
    op_info.creator_ = [](const std::string& type, const VarNameMap& inputs,
                          const VarNameMap& outputs,
                          const AttributeMap& attrs) {
Y
Yu Yang 已提交
152 153
      return new OpType(type, inputs, outputs, attrs);
    };
F
WIP  
fengjiayi 已提交
154 155 156 157
    op_info.grad_op_type_ = grad_op_type;
    if (std::type_index(typeid(ProtoMakerType)) !=
        std::type_index(typeid(NOPMaker))) {
      op_info.proto_ = new OpProto;
F
fengjiayi 已提交
158 159
      op_info.checker_ = new OpAttrChecker;
      auto maker = ProtoMakerType(op_info.proto_, op_info.checker_);
F
WIP  
fengjiayi 已提交
160
      maker.Validate();
161
      op_info.proto_->set_type(op_type);
F
WIP  
fengjiayi 已提交
162 163 164 165
      PADDLE_ENFORCE(
          op_info.proto_->IsInitialized(),
          "Fail to initialize %s's OpProto, because %s is not initialized",
          op_type, op_info.proto_->InitializationErrorString());
F
fengjiayi 已提交
166 167 168
    } else {
      op_info.proto_ = nullptr;
      op_info.checker_ = nullptr;
Y
Yu Yang 已提交
169
    }
F
fengjiayi 已提交
170
    op_info_map().insert(std::make_pair(op_type, op_info));
171 172 173 174
    // register gradient op
    if (!grad_op_type.empty()) {
      RegisterOp<GradOpType, NOPMaker, NOP>(grad_op_type, "");
    }
F
fengjiayi 已提交
175 176
  }

Y
Yu Yang 已提交
177
  static std::unique_ptr<OperatorBase> CreateOp(const std::string& type,
Y
Yu Yang 已提交
178 179
                                                const VarNameMap& inputs,
                                                const VarNameMap& outputs,
Y
Yu Yang 已提交
180
                                                AttributeMap attrs) {
F
WIP  
fengjiayi 已提交
181
    auto it = op_info_map().find(type);
182 183 184 185
    PADDLE_ENFORCE(it != op_info_map().end(),
                   "Operator '%s' has not been registered.", type);
    it->second.checker_->Check(attrs);
    auto op = it->second.creator_(type, inputs, outputs, attrs);
Y
Yu Yang 已提交
186
    return std::unique_ptr<OperatorBase>(op);
187 188
  }

Y
Yu Yang 已提交
189 190 191 192 193 194
  static VarNameMap ConvertOpDescVarsToVarNameMap(
      const google::protobuf::RepeatedPtrField<OpDesc::Var>& op_desc_vars) {
    VarNameMap ret_val;
    for (auto& var : op_desc_vars) {
      auto& var_names = ret_val[var.parameter()];
      auto& var_names_in_proto = var.arguments();
Y
Yu Yang 已提交
195 196 197 198
      var_names.reserve(static_cast<size_t>(var_names_in_proto.size()));
      std::copy(var_names_in_proto.begin(), var_names_in_proto.end(),
                std::back_inserter(var_names));
    }
Y
Yu Yang 已提交
199 200
    return ret_val;
  }
Y
Yu Yang 已提交
201

Y
Yu Yang 已提交
202
  static std::unique_ptr<OperatorBase> CreateOp(const OpDesc& op_desc) {
Y
Yu Yang 已提交
203 204
    VarNameMap inputs = ConvertOpDescVarsToVarNameMap(op_desc.inputs());
    VarNameMap outputs = ConvertOpDescVarsToVarNameMap(op_desc.outputs());
Y
Yu Yang 已提交
205
    AttributeMap attrs;
206
    for (auto& attr : op_desc.attrs()) {
Y
Yi Wang 已提交
207
      attrs[attr.name()] = GetAttrValue(attr);
208
    }
Y
Yu Yang 已提交
209 210

    return CreateOp(op_desc.type(), inputs, outputs, attrs);
211 212
  }

Y
Yu Yang 已提交
213
  static std::unique_ptr<OperatorBase> CreateGradOp(const OperatorBase& op) {
Y
Yu Yang 已提交
214
    PADDLE_ENFORCE(!op.IsNetOp(),
Y
Yu Yang 已提交
215
                   "Use framework::Backward to get backward ops");
Y
Yu Yang 已提交
216
    return std::unique_ptr<OperatorBase>(BuildGradOp(&op));
D
dongzhihong 已提交
217 218
  }

F
fengjiayi 已提交
219 220
  static std::unordered_map<std::string, const OpInfo>& op_info_map() {
    static std::unordered_map<std::string, const OpInfo> op_info_map_;
F
WIP  
fengjiayi 已提交
221
    return op_info_map_;
L
liaogang 已提交
222
  }
223
};
224

F
Fix bug  
fengjiayi 已提交
225 226
class Registrar {
 public:
227 228 229 230 231 232 233
  // In our design, various kinds of classes, e.g., operators and kernels,
  // have their corresponding registry and registrar. The action of
  // registration is in the constructor of a global registrar variable, which,
  // however, are not used in the code that calls package framework, and would
  // be removed from the generated binary file by the linker. To avoid such
  // removal, we add Touch to all registrar classes and make USE_OP macros to
  // call this method. So, as long as the callee code calls USE_OP, the global
234
  // registrar variable won't be removed by the linker.
F
Fix bug  
fengjiayi 已提交
235 236
  void Touch() {}
};
F
fengjiayi 已提交
237

238
template <typename OpType, typename ProtoMakerType, typename GradOpType>
F
fengjiayi 已提交
239
class OpRegistrar : public Registrar {
240
 public:
F
fengjiayi 已提交
241
  explicit OpRegistrar(const char* op_type) { OpRegistrar(op_type, ""); }
F
WIP  
fengjiayi 已提交
242
  OpRegistrar(const char* op_type, const char* grad_op_type) {
243 244
    OpRegistry::RegisterOp<OpType, ProtoMakerType, GradOpType>(op_type,
                                                               grad_op_type);
D
dongzhihong 已提交
245 246 247
  }
};

F
fengjiayi 已提交
248 249 250 251
template <typename PlaceType, typename KernelType>
class OpKernelRegistrar : public Registrar {
 public:
  explicit OpKernelRegistrar(const char* op_type) {
252
    OperatorWithKernel::OpKernelKey key;
F
fengjiayi 已提交
253
    key.place_ = PlaceType();
254
    OperatorWithKernel::AllOpKernels()[op_type][key].reset(new KernelType);
F
fengjiayi 已提交
255 256 257
  }
};

258 259 260
/**
 * check if MACRO is used in GLOBAL NAMESPACE.
 */
Y
Yu Yang 已提交
261 262 263 264 265 266
#define STATIC_ASSERT_GLOBAL_NAMESPACE(uniq_name, msg)                        \
  struct __test_global_namespace_##uniq_name##__ {};                          \
  static_assert(std::is_same<::__test_global_namespace_##uniq_name##__,       \
                             __test_global_namespace_##uniq_name##__>::value, \
                msg)

267
/**
268
 * Macro to register Operator.
269
 */
270 271
#define REGISTER_OP(op_type, op_class, op_maker_class, grad_op_type,          \
                    grad_op_class)                                            \
F
fengjiayi 已提交
272 273
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                             \
      __reg_op__##op_type, "REGISTER_OP must be called in global namespace"); \
Y
Yu Yang 已提交
274 275 276
  class _OpClass_##op_type##_ : public op_class {                             \
   public:                                                                    \
    DEFINE_OP_CLONE_METHOD(_OpClass_##op_type##_);                            \
Y
Yu Yang 已提交
277
    DEFINE_OP_CONSTRUCTOR(_OpClass_##op_type##_, op_class);                   \
Y
Yu Yang 已提交
278
  };                                                                          \
279 280 281 282 283 284 285
  class _OpGradClass_##op_type##_ : public grad_op_class {                    \
   public:                                                                    \
    DEFINE_OP_CLONE_METHOD(_OpGradClass_##op_type##_);                        \
    DEFINE_OP_CONSTRUCTOR(_OpGradClass_##op_type##_, grad_op_class);          \
  };                                                                          \
  static ::paddle::framework::OpRegistrar<                                    \
      _OpClass_##op_type##_, op_maker_class, _OpGradClass_##op_type##_>       \
F
WIP  
fengjiayi 已提交
286
      __op_registrar_##op_type##__(#op_type, #grad_op_type);                  \
F
fengjiayi 已提交
287
  int TouchOpRegistrar_##op_type() {                                          \
F
Fix bug  
fengjiayi 已提交
288
    __op_registrar_##op_type##__.Touch();                                     \
F
fengjiayi 已提交
289 290
    return 0;                                                                 \
  }
Y
Yu Yang 已提交
291

F
WIP  
fengjiayi 已提交
292
#define REGISTER_OP_WITHOUT_GRADIENT(op_type, op_class, op_maker_class) \
293
  REGISTER_OP(op_type, op_class, op_maker_class, , ::paddle::framework::NOP)
D
dongzhihong 已提交
294

D
dongzhihong 已提交
295
/**
296
 * Macro to register OperatorKernel.
D
dongzhihong 已提交
297
 */
F
Fix bug  
fengjiayi 已提交
298 299 300 301 302 303 304 305 306
#define REGISTER_OP_KERNEL(op_type, DEVICE_TYPE, place_class, ...)        \
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                         \
      __reg_op_kernel_##op_type##_##DEVICE_TYPE##__,                      \
      "REGISTER_OP_KERNEL must be called in global namespace");           \
  static ::paddle::framework::OpKernelRegistrar<place_class, __VA_ARGS__> \
      __op_kernel_registrar_##op_type##_##DEVICE_TYPE##__(#op_type);      \
  int TouchOpKernelRegistrar_##op_type##_##DEVICE_TYPE() {                \
    __op_kernel_registrar_##op_type##_##DEVICE_TYPE##__.Touch();          \
    return 0;                                                             \
F
fengjiayi 已提交
307
  }
D
dongzhihong 已提交
308

F
fengjiayi 已提交
309 310
#define REGISTER_OP_GPU_KERNEL(op_type, ...) \
  REGISTER_OP_KERNEL(op_type, GPU, ::paddle::platform::GPUPlace, __VA_ARGS__)
F
fengjiayi 已提交
311

F
fengjiayi 已提交
312 313
#define REGISTER_OP_CPU_KERNEL(op_type, ...) \
  REGISTER_OP_KERNEL(op_type, CPU, ::paddle::platform::CPUPlace, __VA_ARGS__)
Y
Yu Yang 已提交
314

315
/**
316 317
 * Macro to mark what Operator and Kernel
 * we will use and tell the compiler to
318 319
 * link them into target.
 */
F
fengjiayi 已提交
320 321 322 323 324 325
#define USE_OP_ITSELF(op_type)                                    \
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                 \
      __use_op_itself_##op_type,                                  \
      "USE_OP_ITSELF must be called in global namespace");        \
  extern int TouchOpRegistrar_##op_type();                        \
  static int use_op_itself_##op_type##_ __attribute__((unused)) = \
F
Fix bug  
fengjiayi 已提交
326
      TouchOpRegistrar_##op_type()
F
fengjiayi 已提交
327

328
#define USE_OP_DEVICE_KERNEL(op_type, DEVICE_TYPE)               \
F
fengjiayi 已提交
329 330
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                \
      __use_op_kernel_##op_type##_##DEVICE_TYPE##__,             \
331
      "USE_OP_DEVICE_KERNEL must be in global namespace");       \
F
fengjiayi 已提交
332 333 334
  extern int TouchOpKernelRegistrar_##op_type##_##DEVICE_TYPE(); \
  static int use_op_kernel_##op_type##_##DEVICE_TYPE##_          \
      __attribute__((unused)) =                                  \
F
Fix bug  
fengjiayi 已提交
335
          TouchOpKernelRegistrar_##op_type##_##DEVICE_TYPE()
Y
Yu Yang 已提交
336

337 338
// TODO(fengjiayi): The following macros
// seems ugly, do we have better method?
Y
Yu Yang 已提交
339

340
#ifdef PADDLE_ONLY_CPU
341
#define USE_OP_KERNEL(op_type) USE_OP_DEVICE_KERNEL(op_type, CPU)
Y
Yu Yang 已提交
342
#else
343 344 345
#define USE_OP_KERNEL(op_type)        \
  USE_OP_DEVICE_KERNEL(op_type, CPU); \
  USE_OP_DEVICE_KERNEL(op_type, GPU)
Y
Yu Yang 已提交
346
#endif
347

F
WIP  
fengjiayi 已提交
348 349 350
#define USE_CPU_ONLY_OP(op_type) \
  USE_OP_ITSELF(op_type);        \
  USE_OP_DEVICE_KERNEL(op_type, CPU);
351

F
WIP  
fengjiayi 已提交
352 353 354
#define USE_OP(op_type)   \
  USE_OP_ITSELF(op_type); \
  USE_OP_KERNEL(op_type)
355

356 357
}  // namespace framework
}  // namespace paddle