op_registry.h 14.1 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

15 16
#pragma once

17
#include <algorithm>
18
#include <atomic>
Y
Yu Yang 已提交
19
#include <type_traits>
20 21
#include <unordered_map>
#include <unordered_set>
Y
Yi Wang 已提交
22
#include "paddle/framework/attribute.h"
Y
Yu Yang 已提交
23
#include "paddle/framework/framework.pb.h"
F
fengjiayi 已提交
24
#include "paddle/framework/grad_op_builder.h"
25
#include "paddle/framework/operator.h"
D
dongzhihong 已提交
26
#include "paddle/framework/scope.h"
27 28 29 30 31 32 33 34 35 36

namespace paddle {
namespace framework {

// this class not only make proto but also init attribute checkers.
class OpProtoAndCheckerMaker {
 public:
  OpProtoAndCheckerMaker(OpProto* proto, OpAttrChecker* op_checker)
      : proto_(proto), op_checker_(op_checker) {}

37 38 39 40 41 42 43 44
  ~OpProtoAndCheckerMaker() {
    PADDLE_ENFORCE(validated_, "should call Validate after build");
  }

  void Validate() {
    validated_ = true;
    CheckNoDuplicatedInOutAttrs();
  }
45

46
 protected:
Y
Yu Yang 已提交
47
  struct VariableBuilder {
Y
Yu Yang 已提交
48
    OpProto::Var* var_;
Y
Yu Yang 已提交
49 50

    VariableBuilder& SetMultiple() {
Y
Yu Yang 已提交
51
      var_->set_duplicable(true);
Y
Yu Yang 已提交
52 53 54 55
      return *this;
    }

    VariableBuilder& SetTemporary() {
Y
Yu Yang 已提交
56
      var_->set_intermediate(true);
Y
Yu Yang 已提交
57 58 59 60
      return *this;
    }

    VariableBuilder& IgnoreGradient() {
Y
Yu Yang 已提交
61
      var_->set_no_gradient(true);
Y
Yu Yang 已提交
62 63 64 65 66 67
      return *this;
    }
  };

  VariableBuilder AddInput(const std::string& name,
                           const std::string& comment) {
68
    auto* input = proto_->add_inputs();
69 70
    input->set_name(name);
    input->set_comment(comment);
Y
Yu Yang 已提交
71
    return VariableBuilder{input};
72 73
  }

Y
Yu Yang 已提交
74 75
  VariableBuilder AddOutput(const std::string& name,
                            const std::string& comment) {
76
    auto* output = proto_->add_outputs();
77 78
    output->set_name(name);
    output->set_comment(comment);
Y
Yu Yang 已提交
79
    return VariableBuilder{output};
80 81 82 83
  }

  template <typename T>
  TypedAttrChecker<T>& AddAttr(const std::string& name,
84 85
                               const std::string& comment,
                               bool generated = false) {
86
    auto* attr = proto_->add_attrs();
87 88
    attr->set_name(name);
    attr->set_comment(comment);
89
    attr->set_generated(generated);
Y
Yi Wang 已提交
90
    attr->set_type(AttrTypeID<T>());
91 92 93
    return op_checker_->AddAttrChecker<T>(name);
  }

94
  void AddComment(const std::string& comment) { proto_->set_comment(comment); }
95

96
 private:
97
  void CheckNoDuplicatedInOutAttrs() {
98
    std::unordered_set<std::string> names;
99 100 101 102
    auto checker = [&](const std::string& name) {
      PADDLE_ENFORCE(!names.count(name), "[%s] is duplicated", name);
      names.insert(name);
    };
103
    for (auto& attr : proto_->attrs()) {
104 105 106 107 108 109 110
      checker(attr.name());
    }
    for (auto& input : proto_->inputs()) {
      checker(input.name());
    }
    for (auto& output : proto_->outputs()) {
      checker(output.name());
111 112 113
    }
  }

114 115
  OpProto* proto_;
  OpAttrChecker* op_checker_;
116
  bool validated_{false};
117 118 119
};

class OpRegistry {
Q
Qiao Longfei 已提交
120
  using OpCreator = std::function<OperatorBase*()>;
Y
Yu Yang 已提交
121
  using VarNameMap = std::unordered_map<std::string, std::vector<std::string>>;
122 123 124 125

 public:
  template <typename OpType, typename ProtoMakerType>
  static void RegisterOp(const std::string& op_type) {
126
    op_creators()[op_type] = [] { return new OpType; };
127
    OpAttrChecker& op_checker = op_checkers()[op_type];
128
    OpProto& op_proto = OpProtos()[op_type];
129 130
    auto maker = ProtoMakerType(&op_proto, &op_checker);
    maker.Validate();
131
    op_proto.set_type(op_type);
Y
Yu Yang 已提交
132 133 134 135
    PADDLE_ENFORCE(
        op_proto.IsInitialized(),
        "Fail to initialize %s's OpProto, because %s is not initialized",
        op_type, op_proto.InitializationErrorString());
136 137
  }

138 139 140 141 142
  template <typename GradOpType>
  static void RegisterGradOp(const std::string& op_type,
                             const std::string& grad_op_type) {
    op_creators()[grad_op_type] = [] { return new GradOpType; };
    grad_ops()[op_type] = grad_op_type;
F
fengjiayi 已提交
143 144
  }

Y
Yu Yang 已提交
145
  static std::shared_ptr<OperatorBase> CreateOp(const std::string& type,
Y
Yu Yang 已提交
146 147
                                                const VarNameMap& inputs,
                                                const VarNameMap& outputs,
Y
Yu Yang 已提交
148
                                                const AttributeMap& attrs) {
149 150
    auto op_create_it = op_creators().find(type);
    PADDLE_ENFORCE(op_create_it != op_creators().end(),
F
fengjiayi 已提交
151
                   "Operator %s cannot be found.", type);
152

Y
Yu Yang 已提交
153 154 155 156
    auto op = op_create_it->second();
    op->type_ = type;
    op->inputs_ = inputs;
    op->outputs_ = outputs;
F
fengjiayi 已提交
157

Y
Yu Yang 已提交
158 159
    op->attrs_ = attrs;
    op_checkers().at(type).Check(op->attrs_);
160

Y
Yu Yang 已提交
161
    GenerateTempVariableName(op);
162

Q
Qiao Longfei 已提交
163
    op->Init();
Y
Yu Yang 已提交
164
    return std::shared_ptr<OperatorBase>(op);
165 166
  }

Y
Yu Yang 已提交
167
  static std::shared_ptr<OperatorBase> CreateOp(const OpDesc& op_desc) {
Y
Yu Yang 已提交
168 169
    VarNameMap inputs;
    for (auto& input : op_desc.inputs()) {
170 171
      auto& var_names = inputs[input.parameter()];
      auto& var_names_in_proto = input.arguments();
Y
Yu Yang 已提交
172 173 174 175
      var_names.reserve(static_cast<size_t>(var_names_in_proto.size()));
      std::copy(var_names_in_proto.begin(), var_names_in_proto.end(),
                std::back_inserter(var_names));
    }
Y
Yu Yang 已提交
176

Y
Yu Yang 已提交
177 178
    VarNameMap outputs;
    for (auto& output : op_desc.outputs()) {
179 180
      auto& var_names = outputs[output.parameter()];
      auto& var_names_in_proto = output.arguments();
Y
Yu Yang 已提交
181 182 183 184
      var_names.reserve(static_cast<size_t>(var_names_in_proto.size()));
      std::copy(var_names_in_proto.begin(), var_names_in_proto.end(),
                std::back_inserter(var_names));
    }
Y
Yu Yang 已提交
185 186

    AttributeMap attrs;
187
    for (auto& attr : op_desc.attrs()) {
Y
Yi Wang 已提交
188
      attrs[attr.name()] = GetAttrValue(attr);
189
    }
Y
Yu Yang 已提交
190 191

    return CreateOp(op_desc.type(), inputs, outputs, attrs);
192 193
  }

Y
Yu Yang 已提交
194 195
  static std::shared_ptr<OperatorBase> CreateGradOp(const OperatorBase& op) {
    PADDLE_ENFORCE(!op.IsNetOp(),
Y
Yu Yang 已提交
196
                   "Use framework::Backward to get backward ops");
197
    std::shared_ptr<OperatorBase> grad_op(BuildGradOp(&op));
F
fengjiayi 已提交
198 199
    grad_op->Init();
    return grad_op;
D
dongzhihong 已提交
200 201
  }

202 203 204
  static std::unordered_map<std::string, std::string>& grad_ops() {
    static std::unordered_map<std::string, std::string> grad_ops_;
    return grad_ops_;
205 206
  }

207 208 209
  static std::unordered_map<std::string, OpCreator>& op_creators() {
    static std::unordered_map<std::string, OpCreator> op_creators_;
    return op_creators_;
F
fengjiayi 已提交
210 211
  }

212
 private:
F
fengjiayi 已提交
213 214 215
  static std::unordered_map<std::string, OpAttrChecker>& op_checkers() {
    static std::unordered_map<std::string, OpAttrChecker> op_checkers_;
    return op_checkers_;
L
liaogang 已提交
216
  }
F
fengjiayi 已提交
217

218
  static void GenerateTempVariableName(OperatorBase* op) {
219
    static std::atomic<size_t> gUniqId(0UL);
Y
Yu Yang 已提交
220 221 222 223 224 225 226
    for (auto& output : op->outputs_) {
      for (auto& output_name : output.second) {
        if (output_name == kTempVarName) {
          output_name += op->type_;
          output_name += "@";
          output_name += std::to_string(gUniqId.fetch_add(1));
        }
227 228 229
      }
    }
  }
230
};
231

F
Fix bug  
fengjiayi 已提交
232 233
class Registrar {
 public:
234 235 236 237 238 239 240 241
  // In our design, various kinds of classes, e.g., operators and kernels, have
  // their corresponding registry and registrar. The action of registration is
  // in the constructor of a global registrar variable, which, however, are not
  // used in the code that calls package framework, and would be removed from
  // the generated binary file by the linker. To avoid such removal, we add
  // Touch to all registrar classes and make USE_OP macros to call this
  // method. So, as long as the callee code calls USE_OP, the global
  // registrar variable won't be removed by the linker.
F
Fix bug  
fengjiayi 已提交
242 243
  void Touch() {}
};
F
fengjiayi 已提交
244

245
template <typename OpType, typename ProtoMakerType>
F
fengjiayi 已提交
246
class OpRegistrar : public Registrar {
247
 public:
F
fengjiayi 已提交
248
  explicit OpRegistrar(const char* op_type) {
249 250 251 252
    OpRegistry::RegisterOp<OpType, ProtoMakerType>(op_type);
  }
};

253
template <typename GradOpType>
F
fengjiayi 已提交
254
class GradOpRegistrar : public Registrar {
D
dongzhihong 已提交
255
 public:
F
fengjiayi 已提交
256
  GradOpRegistrar(const char* op_type, const char* grad_op_type) {
257
    OpRegistry::RegisterGradOp<GradOpType>(op_type, grad_op_type);
D
dongzhihong 已提交
258 259 260
  }
};

F
fengjiayi 已提交
261 262 263 264
template <typename PlaceType, typename KernelType>
class OpKernelRegistrar : public Registrar {
 public:
  explicit OpKernelRegistrar(const char* op_type) {
265
    OperatorWithKernel::OpKernelKey key;
F
fengjiayi 已提交
266
    key.place_ = PlaceType();
267
    OperatorWithKernel::AllOpKernels()[op_type][key].reset(new KernelType);
F
fengjiayi 已提交
268 269 270
  }
};

271 272 273
/**
 * check if MACRO is used in GLOBAL NAMESPACE.
 */
Y
Yu Yang 已提交
274 275 276 277 278 279
#define STATIC_ASSERT_GLOBAL_NAMESPACE(uniq_name, msg)                        \
  struct __test_global_namespace_##uniq_name##__ {};                          \
  static_assert(std::is_same<::__test_global_namespace_##uniq_name##__,       \
                             __test_global_namespace_##uniq_name##__>::value, \
                msg)

280
/**
281
 * Macro to register Operator.
282
 */
F
fengjiayi 已提交
283 284 285
#define REGISTER_OP(op_type, op_class, op_maker_class)                        \
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                             \
      __reg_op__##op_type, "REGISTER_OP must be called in global namespace"); \
F
Fix bug  
fengjiayi 已提交
286 287
  static ::paddle::framework::OpRegistrar<op_class, op_maker_class>           \
      __op_registrar_##op_type##__(#op_type);                                 \
F
fengjiayi 已提交
288
  int TouchOpRegistrar_##op_type() {                                          \
F
Fix bug  
fengjiayi 已提交
289
    __op_registrar_##op_type##__.Touch();                                     \
F
fengjiayi 已提交
290 291
    return 0;                                                                 \
  }
Y
Yu Yang 已提交
292

D
dongzhihong 已提交
293
/**
294
 * Macro to register Gradient Operator.
D
dongzhihong 已提交
295
 */
F
Fix bug  
fengjiayi 已提交
296 297 298 299 300 301 302 303 304 305
#define REGISTER_GRADIENT_OP(op_type, grad_op_type, grad_op_class)           \
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                            \
      __reg_gradient_op__##op_type##_##grad_op_type,                         \
      "REGISTER_GRADIENT_OP must be called in global namespace");            \
  static ::paddle::framework::GradOpRegistrar<grad_op_class>                 \
      __op_gradient_registrar_##op_type##_##grad_op_type##__(#op_type,       \
                                                             #grad_op_type); \
  int TouchOpGradientRegistrar_##op_type() {                                 \
    __op_gradient_registrar_##op_type##_##grad_op_type##__.Touch();          \
    return 0;                                                                \
306
  }
D
dongzhihong 已提交
307

D
dongzhihong 已提交
308
/**
309
 * Macro to register OperatorKernel.
D
dongzhihong 已提交
310
 */
F
Fix bug  
fengjiayi 已提交
311 312 313 314 315 316 317 318 319
#define REGISTER_OP_KERNEL(op_type, DEVICE_TYPE, place_class, ...)        \
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                         \
      __reg_op_kernel_##op_type##_##DEVICE_TYPE##__,                      \
      "REGISTER_OP_KERNEL must be called in global namespace");           \
  static ::paddle::framework::OpKernelRegistrar<place_class, __VA_ARGS__> \
      __op_kernel_registrar_##op_type##_##DEVICE_TYPE##__(#op_type);      \
  int TouchOpKernelRegistrar_##op_type##_##DEVICE_TYPE() {                \
    __op_kernel_registrar_##op_type##_##DEVICE_TYPE##__.Touch();          \
    return 0;                                                             \
F
fengjiayi 已提交
320
  }
D
dongzhihong 已提交
321

322
/**
F
fengjiayi 已提交
323
 * Macro to Forbid user register Gradient Operator.
324
 */
F
fengjiayi 已提交
325 326 327 328 329
#define NO_GRADIENT(op_type)                           \
  STATIC_ASSERT_GLOBAL_NAMESPACE(                      \
      __reg_gradient_op__##op_type##_##op_type##_grad, \
      "NO_GRADIENT must be called in global namespace")

F
fengjiayi 已提交
330 331
#define REGISTER_OP_GPU_KERNEL(op_type, ...) \
  REGISTER_OP_KERNEL(op_type, GPU, ::paddle::platform::GPUPlace, __VA_ARGS__)
F
fengjiayi 已提交
332

F
fengjiayi 已提交
333 334
#define REGISTER_OP_CPU_KERNEL(op_type, ...) \
  REGISTER_OP_KERNEL(op_type, CPU, ::paddle::platform::CPUPlace, __VA_ARGS__)
Y
Yu Yang 已提交
335

336 337 338 339
/**
 * Macro to mark what Operator and Kernel we will use and tell the compiler to
 * link them into target.
 */
F
fengjiayi 已提交
340 341 342 343 344 345
#define USE_OP_ITSELF(op_type)                                    \
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                 \
      __use_op_itself_##op_type,                                  \
      "USE_OP_ITSELF must be called in global namespace");        \
  extern int TouchOpRegistrar_##op_type();                        \
  static int use_op_itself_##op_type##_ __attribute__((unused)) = \
F
Fix bug  
fengjiayi 已提交
346
      TouchOpRegistrar_##op_type()
F
fengjiayi 已提交
347

348
// TODO(fengjiayi): Most ops' gradient op have not been compeleted. So we use
349 350 351 352 353
// `NO_GRAD` to disable micro USE_OP_GRADIENT(op_type). Otherwise the code can't
// be compiled. `NO_GRAD` should be removed after all gradient ops are
// compeleted.
#define NO_GRAD
#ifndef NO_GRAD
F
fengjiayi 已提交
354 355 356 357 358 359 360
#define USE_OP_GRADIENT(op_type)                                    \
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                   \
      __use_op_gradient_##op_type,                                  \
      "USE_OP_GRADIENT must be called in global namespace");        \
  extern int TouchOpGradientRegistrar_##op_type();                  \
  static int use_op_gradient_##op_type##_ __attribute__((unused)) = \
      TouchOpGradientRegistrar_##op_type()
361 362 363
#else
#define USE_OP_GRADIENT(op_type)
#endif
F
fengjiayi 已提交
364

365
#define USE_OP_DEVICE_KERNEL(op_type, DEVICE_TYPE)               \
F
fengjiayi 已提交
366 367
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                \
      __use_op_kernel_##op_type##_##DEVICE_TYPE##__,             \
368
      "USE_OP_DEVICE_KERNEL must be in global namespace");       \
F
fengjiayi 已提交
369 370 371
  extern int TouchOpKernelRegistrar_##op_type##_##DEVICE_TYPE(); \
  static int use_op_kernel_##op_type##_##DEVICE_TYPE##_          \
      __attribute__((unused)) =                                  \
F
Fix bug  
fengjiayi 已提交
372
          TouchOpKernelRegistrar_##op_type##_##DEVICE_TYPE()
Y
Yu Yang 已提交
373

374
// TODO(fengjiayi): The following macros seems ugly, do we have better method?
Y
Yu Yang 已提交
375

376
#ifdef PADDLE_ONLY_CPU
377
#define USE_OP_KERNEL(op_type) USE_OP_DEVICE_KERNEL(op_type, CPU)
Y
Yu Yang 已提交
378
#else
379 380 381
#define USE_OP_KERNEL(op_type)        \
  USE_OP_DEVICE_KERNEL(op_type, CPU); \
  USE_OP_DEVICE_KERNEL(op_type, GPU)
Y
Yu Yang 已提交
382
#endif
383

384 385 386 387 388 389 390 391 392 393 394 395 396
#define USE_NO_GRAD_OP(op_type) \
  USE_OP_ITSELF(op_type);       \
  USE_OP_KERNEL(op_type)

#define USE_CPU_OP(op_type)           \
  USE_OP_ITSELF(op_type);             \
  USE_OP_DEVICE_KERNEL(op_type, CPU); \
  USE_OP_GRADIENT(op_type)

#define USE_OP(op_type)    \
  USE_NO_GRAD_OP(op_type); \
  USE_OP_GRADIENT(op_type)

397 398
}  // namespace framework
}  // namespace paddle