op_registry.h 16.4 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

15 16
#pragma once

17
#include <algorithm>
18
#include <atomic>
Y
Yu Yang 已提交
19
#include <type_traits>
20 21
#include <unordered_map>
#include <unordered_set>
Y
Yi Wang 已提交
22
#include "paddle/framework/attribute.h"
F
fengjiayi 已提交
23
#include "paddle/framework/grad_op_builder.h"
24
#include "paddle/framework/op_desc.pb.h"
D
dongzhihong 已提交
25
#include "paddle/framework/scope.h"
26 27 28 29 30 31 32 33 34 35

namespace paddle {
namespace framework {

// this class not only make proto but also init attribute checkers.
class OpProtoAndCheckerMaker {
 public:
  OpProtoAndCheckerMaker(OpProto* proto, OpAttrChecker* op_checker)
      : proto_(proto), op_checker_(op_checker) {}

36 37 38 39 40 41 42 43
  ~OpProtoAndCheckerMaker() {
    PADDLE_ENFORCE(validated_, "should call Validate after build");
  }

  void Validate() {
    validated_ = true;
    CheckNoDuplicatedInOutAttrs();
  }
44

45
 protected:
Y
Yu Yang 已提交
46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71
  struct VariableBuilder {
    VarProto* var_;
    std::function<void()> on_multiple_;
    std::function<void()> on_temporary_;

    VariableBuilder& SetMultiple() {
      var_->set_multiple(true);
      on_multiple_();
      return *this;
    }

    VariableBuilder& SetTemporary() {
      PADDLE_ENFORCE(bool(on_temporary_), "Cannot set temporary");
      var_->set_temporary(true);
      on_temporary_();
      return *this;
    }

    VariableBuilder& IgnoreGradient() {
      var_->set_ignore_gradient(true);
      return *this;
    }
  };

  VariableBuilder AddInput(const std::string& name,
                           const std::string& comment) {
72 73 74
    VarProto* input = proto_->add_inputs();
    input->set_name(name);
    input->set_comment(comment);
Y
Yu Yang 已提交
75 76
    return VariableBuilder{input, [=] { this->SetHasMultipleInput(); },
                           nullptr};
77 78
  }

Y
Yu Yang 已提交
79 80
  VariableBuilder AddOutput(const std::string& name,
                            const std::string& comment) {
81 82 83
    VarProto* output = proto_->add_outputs();
    output->set_name(name);
    output->set_comment(comment);
Y
Yu Yang 已提交
84 85
    return VariableBuilder{output, [=] { this->SetHasMultipleOutput(); },
                           [=] { this->SetHasTemporaryOutput(); }};
86 87 88 89
  }

  template <typename T>
  TypedAttrChecker<T>& AddAttr(const std::string& name,
90 91
                               const std::string& comment,
                               bool generated = false) {
92 93 94
    AttrProto* attr = proto_->add_attrs();
    attr->set_name(name);
    attr->set_comment(comment);
95
    attr->set_generated(generated);
Y
Yi Wang 已提交
96
    attr->set_type(AttrTypeID<T>());
97 98 99
    return op_checker_->AddAttrChecker<T>(name);
  }

100
  void AddComment(const std::string& comment) { proto_->set_comment(comment); }
101

102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149
 private:
  void SetHasMultiple(const std::string& in_out, bool* flag) {
    if (!*flag) {
      AddAttr<std::vector<int>>(in_out + "_format",
                                "The multiple index of " + in_out +
                                    "\n"
                                    R"DOC(
This attribute is used by Paddle core framework. Paddle's Op support each input
or output could be a list of variable. This attribute is used to show how that
list organized.

e.g.
  input = ["a", "b", "c", "d", "e", "f"]
  input_format = [0, 4, 5, 6]

means
  The number of all input variables this op is six, and they are segmented into
  three inputs.

  The first input is input[0:4], second is input[4:5], third is input[5:6].
)DOC",
                                /*generated*/ true);
      *flag = true;
    }
  }

  void SetHasMultipleInput() { SetHasMultiple("input", &has_multiple_input_); }
  void SetHasMultipleOutput() {
    SetHasMultiple("output", &has_multiple_output_);
  }

  void SetHasTemporaryOutput() {
    if (!has_temporary_output_) {
      AddAttr<std::vector<int>>("temporary_index",
                                R"DOC(The temporary index of output.

Not all output of Paddle Op is used by user. For faster computation, each op
could output some its internal state to other op, other op could take that
output to make compute faster.

Add a mark to which output is temporary is helpful for future optimization.
)DOC",
                                /*generated*/ true)
          .SetDefault(std::vector<int>());
      has_temporary_output_ = true;
    }
  }

150
  void CheckNoDuplicatedInOutAttrs() {
151
    std::unordered_set<std::string> names;
152 153 154 155
    auto checker = [&](const std::string& name) {
      PADDLE_ENFORCE(!names.count(name), "[%s] is duplicated", name);
      names.insert(name);
    };
156
    for (auto& attr : proto_->attrs()) {
157 158 159 160 161 162 163
      checker(attr.name());
    }
    for (auto& input : proto_->inputs()) {
      checker(input.name());
    }
    for (auto& output : proto_->outputs()) {
      checker(output.name());
164 165 166
    }
  }

167 168
  OpProto* proto_;
  OpAttrChecker* op_checker_;
169
  bool validated_{false};
170 171 172
  bool has_multiple_input_{false};
  bool has_multiple_output_{false};
  bool has_temporary_output_{false};
173 174 175
};

class OpRegistry {
Q
Qiao Longfei 已提交
176
  using OpCreator = std::function<OperatorBase*()>;
Y
Yu Yang 已提交
177
  using VarIndexMap = std::unordered_map<std::string, int>;
Y
Yu Yang 已提交
178
  using VarNameList = std::vector<std::string>;
179 180 181 182

 public:
  template <typename OpType, typename ProtoMakerType>
  static void RegisterOp(const std::string& op_type) {
183
    op_creators()[op_type] = [] { return new OpType; };
184
    OpAttrChecker& op_checker = op_checkers()[op_type];
D
dongzhihong 已提交
185
    OpProto& op_proto = protos()[op_type];
186 187
    auto maker = ProtoMakerType(&op_proto, &op_checker);
    maker.Validate();
188
    op_proto.set_type(op_type);
Y
Yu Yang 已提交
189 190 191 192
    PADDLE_ENFORCE(
        op_proto.IsInitialized(),
        "Fail to initialize %s's OpProto, because %s is not initialized",
        op_type, op_proto.InitializationErrorString());
Y
Yu Yang 已提交
193 194 195 196 197 198 199 200 201 202 203

    VarIndexMaps()[op_type].reset(new VarIndexMap());
    auto& varmap = *VarIndexMaps()[op_type];
    int idx = 0;
    for (auto& var : op_proto.inputs()) {
      varmap[var.name()] = idx++;
    }
    idx = 0;
    for (auto& var : op_proto.outputs()) {
      varmap[var.name()] = idx++;
    }
204 205
  }

206 207 208 209 210
  template <typename GradOpType>
  static void RegisterGradOp(const std::string& op_type,
                             const std::string& grad_op_type) {
    op_creators()[grad_op_type] = [] { return new GradOpType; };
    grad_ops()[op_type] = grad_op_type;
F
fengjiayi 已提交
211 212
  }

Y
Yu Yang 已提交
213 214 215 216
  static std::shared_ptr<OperatorBase> CreateOp(const std::string& type,
                                                const VarNameList& inputs,
                                                const VarNameList& outputs,
                                                const AttributeMap& attrs) {
217 218
    auto op_create_it = op_creators().find(type);
    PADDLE_ENFORCE(op_create_it != op_creators().end(),
F
fengjiayi 已提交
219
                   "Operator %s cannot be found.", type);
220

Y
Yu Yang 已提交
221 222 223 224
    auto op = op_create_it->second();
    op->type_ = type;
    op->inputs_ = inputs;
    op->outputs_ = outputs;
F
fengjiayi 已提交
225

Y
Yu Yang 已提交
226 227
    op->attrs_ = attrs;
    op_checkers().at(type).Check(op->attrs_);
228

Y
Yu Yang 已提交
229
    GenerateTempVariableName(op);
230

Y
Yu Yang 已提交
231
    {
Y
Yu Yang 已提交
232
      auto var_index_it = VarIndexMaps().find(type);
Y
Yu Yang 已提交
233 234 235 236
      if (var_index_it != VarIndexMaps().end()) {
        op->in_out_idxs_ = var_index_it->second;
      }
    }
Y
Yu Yang 已提交
237

Q
Qiao Longfei 已提交
238
    op->Init();
Y
Yu Yang 已提交
239
    return std::shared_ptr<OperatorBase>(op);
240 241
  }

Y
Yu Yang 已提交
242
  static std::shared_ptr<OperatorBase> CreateOp(const OpDesc& op_desc) {
Y
Yu Yang 已提交
243 244
    std::vector<std::string> inputs;
    inputs.reserve((size_t)op_desc.inputs_size());
245
    std::copy(op_desc.inputs().begin(), op_desc.inputs().end(),
Y
Yu Yang 已提交
246 247 248 249
              std::back_inserter(inputs));

    std::vector<std::string> outputs;
    outputs.reserve((size_t)op_desc.outputs_size());
250
    std::copy(op_desc.outputs().begin(), op_desc.outputs().end(),
Y
Yu Yang 已提交
251 252 253
              std::back_inserter(outputs));

    AttributeMap attrs;
254
    for (auto& attr : op_desc.attrs()) {
Y
Yi Wang 已提交
255
      attrs[attr.name()] = GetAttrValue(attr);
256
    }
Y
Yu Yang 已提交
257 258

    return CreateOp(op_desc.type(), inputs, outputs, attrs);
259 260
  }

Y
Yu Yang 已提交
261 262
  static std::shared_ptr<OperatorBase> CreateGradOp(const OperatorBase& op) {
    PADDLE_ENFORCE(!op.IsNetOp(),
Y
Yu Yang 已提交
263
                   "Use framework::Backward to get backward ops");
264
    std::shared_ptr<OperatorBase> grad_op(BuildGradOp(&op));
F
fengjiayi 已提交
265 266
    grad_op->Init();
    return grad_op;
D
dongzhihong 已提交
267 268
  }

Y
Yu Yang 已提交
269 270 271
  static std::unordered_map<std::string, OpProto>& protos() {
    static std::unordered_map<std::string, OpProto> protos_;
    return protos_;
L
liaogang 已提交
272
  }
Y
Yu Yang 已提交
273

274 275 276
  static std::unordered_map<std::string, std::string>& grad_ops() {
    static std::unordered_map<std::string, std::string> grad_ops_;
    return grad_ops_;
277 278
  }

Y
Yu Yang 已提交
279 280 281 282 283 284
  static std::unordered_map<std::string, std::shared_ptr<VarIndexMap>>&
  VarIndexMaps() {
    static std::unordered_map<std::string, std::shared_ptr<VarIndexMap>> maps_;
    return maps_;
  }

285 286 287
  static std::unordered_map<std::string, OpCreator>& op_creators() {
    static std::unordered_map<std::string, OpCreator> op_creators_;
    return op_creators_;
F
fengjiayi 已提交
288 289
  }

290
 private:
F
fengjiayi 已提交
291 292 293
  static std::unordered_map<std::string, OpAttrChecker>& op_checkers() {
    static std::unordered_map<std::string, OpAttrChecker> op_checkers_;
    return op_checkers_;
L
liaogang 已提交
294
  }
F
fengjiayi 已提交
295

296
  static void GenerateTempVariableName(OperatorBase* op) {
297 298
    static std::atomic<size_t> gUniqId(0UL);
    for (auto& outname : op->outputs_) {
299
      if (outname == kTempVarName) {
300
        outname += op->type_;
301 302 303 304 305
        outname += "@";
        outname += std::to_string(gUniqId.fetch_add(1));
      }
    }
  }
306
};
307

F
Fix bug  
fengjiayi 已提交
308 309
class Registrar {
 public:
310 311 312 313 314 315 316 317
  // In our design, various kinds of classes, e.g., operators and kernels, have
  // their corresponding registry and registrar. The action of registration is
  // in the constructor of a global registrar variable, which, however, are not
  // used in the code that calls package framework, and would be removed from
  // the generated binary file by the linker. To avoid such removal, we add
  // Touch to all registrar classes and make USE_OP macros to call this
  // method. So, as long as the callee code calls USE_OP, the global
  // registrar variable won't be removed by the linker.
F
Fix bug  
fengjiayi 已提交
318 319
  void Touch() {}
};
F
fengjiayi 已提交
320

321
template <typename OpType, typename ProtoMakerType>
F
fengjiayi 已提交
322
class OpRegistrar : public Registrar {
323
 public:
F
fengjiayi 已提交
324
  explicit OpRegistrar(const char* op_type) {
325 326 327 328
    OpRegistry::RegisterOp<OpType, ProtoMakerType>(op_type);
  }
};

329
template <typename GradOpType>
F
fengjiayi 已提交
330
class GradOpRegistrar : public Registrar {
D
dongzhihong 已提交
331
 public:
F
fengjiayi 已提交
332
  GradOpRegistrar(const char* op_type, const char* grad_op_type) {
333
    OpRegistry::RegisterGradOp<GradOpType>(op_type, grad_op_type);
D
dongzhihong 已提交
334 335 336
  }
};

F
fengjiayi 已提交
337 338 339 340
template <typename PlaceType, typename KernelType>
class OpKernelRegistrar : public Registrar {
 public:
  explicit OpKernelRegistrar(const char* op_type) {
341
    OperatorWithKernel::OpKernelKey key;
F
fengjiayi 已提交
342
    key.place_ = PlaceType();
343
    OperatorWithKernel::AllOpKernels()[op_type][key].reset(new KernelType);
F
fengjiayi 已提交
344 345 346
  }
};

347 348 349
/**
 * check if MACRO is used in GLOBAL NAMESPACE.
 */
Y
Yu Yang 已提交
350 351 352 353 354 355
#define STATIC_ASSERT_GLOBAL_NAMESPACE(uniq_name, msg)                        \
  struct __test_global_namespace_##uniq_name##__ {};                          \
  static_assert(std::is_same<::__test_global_namespace_##uniq_name##__,       \
                             __test_global_namespace_##uniq_name##__>::value, \
                msg)

356
/**
357
 * Macro to register Operator.
358
 */
F
fengjiayi 已提交
359 360 361
#define REGISTER_OP(op_type, op_class, op_maker_class)                        \
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                             \
      __reg_op__##op_type, "REGISTER_OP must be called in global namespace"); \
F
Fix bug  
fengjiayi 已提交
362 363
  static ::paddle::framework::OpRegistrar<op_class, op_maker_class>           \
      __op_registrar_##op_type##__(#op_type);                                 \
F
fengjiayi 已提交
364
  int TouchOpRegistrar_##op_type() {                                          \
F
Fix bug  
fengjiayi 已提交
365
    __op_registrar_##op_type##__.Touch();                                     \
F
fengjiayi 已提交
366 367
    return 0;                                                                 \
  }
Y
Yu Yang 已提交
368

D
dongzhihong 已提交
369
/**
370
 * Macro to register Gradient Operator.
D
dongzhihong 已提交
371
 */
F
Fix bug  
fengjiayi 已提交
372 373 374 375 376 377 378 379 380 381
#define REGISTER_GRADIENT_OP(op_type, grad_op_type, grad_op_class)           \
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                            \
      __reg_gradient_op__##op_type##_##grad_op_type,                         \
      "REGISTER_GRADIENT_OP must be called in global namespace");            \
  static ::paddle::framework::GradOpRegistrar<grad_op_class>                 \
      __op_gradient_registrar_##op_type##_##grad_op_type##__(#op_type,       \
                                                             #grad_op_type); \
  int TouchOpGradientRegistrar_##op_type() {                                 \
    __op_gradient_registrar_##op_type##_##grad_op_type##__.Touch();          \
    return 0;                                                                \
F
fengjiayi 已提交
382
  }
D
dongzhihong 已提交
383

D
dongzhihong 已提交
384
/**
385
 * Macro to register OperatorKernel.
D
dongzhihong 已提交
386
 */
F
Fix bug  
fengjiayi 已提交
387 388 389 390 391 392 393 394 395
#define REGISTER_OP_KERNEL(op_type, DEVICE_TYPE, place_class, ...)        \
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                         \
      __reg_op_kernel_##op_type##_##DEVICE_TYPE##__,                      \
      "REGISTER_OP_KERNEL must be called in global namespace");           \
  static ::paddle::framework::OpKernelRegistrar<place_class, __VA_ARGS__> \
      __op_kernel_registrar_##op_type##_##DEVICE_TYPE##__(#op_type);      \
  int TouchOpKernelRegistrar_##op_type##_##DEVICE_TYPE() {                \
    __op_kernel_registrar_##op_type##_##DEVICE_TYPE##__.Touch();          \
    return 0;                                                             \
F
fengjiayi 已提交
396
  }
D
dongzhihong 已提交
397

398
/**
F
fengjiayi 已提交
399
 * Macro to Forbid user register Gradient Operator.
400
 */
F
fengjiayi 已提交
401 402 403 404 405
#define NO_GRADIENT(op_type)                           \
  STATIC_ASSERT_GLOBAL_NAMESPACE(                      \
      __reg_gradient_op__##op_type##_##op_type##_grad, \
      "NO_GRADIENT must be called in global namespace")

F
fengjiayi 已提交
406 407
#define REGISTER_OP_GPU_KERNEL(op_type, ...) \
  REGISTER_OP_KERNEL(op_type, GPU, ::paddle::platform::GPUPlace, __VA_ARGS__)
F
fengjiayi 已提交
408

F
fengjiayi 已提交
409 410
#define REGISTER_OP_CPU_KERNEL(op_type, ...) \
  REGISTER_OP_KERNEL(op_type, CPU, ::paddle::platform::CPUPlace, __VA_ARGS__)
Y
Yu Yang 已提交
411

412 413 414 415
/**
 * Macro to mark what Operator and Kernel we will use and tell the compiler to
 * link them into target.
 */
F
fengjiayi 已提交
416 417 418 419 420 421
#define USE_OP_ITSELF(op_type)                                    \
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                 \
      __use_op_itself_##op_type,                                  \
      "USE_OP_ITSELF must be called in global namespace");        \
  extern int TouchOpRegistrar_##op_type();                        \
  static int use_op_itself_##op_type##_ __attribute__((unused)) = \
F
Fix bug  
fengjiayi 已提交
422
      TouchOpRegistrar_##op_type()
F
fengjiayi 已提交
423

424
// TODO(fengjiayi): Most ops' gradient op have not been compeleted. So we use
425 426 427 428 429
// `NO_GRAD` to disable micro USE_OP_GRADIENT(op_type). Otherwise the code can't
// be compiled. `NO_GRAD` should be removed after all gradient ops are
// compeleted.
#define NO_GRAD
#ifndef NO_GRAD
F
fengjiayi 已提交
430 431 432 433 434 435 436
#define USE_OP_GRADIENT(op_type)                                    \
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                   \
      __use_op_gradient_##op_type,                                  \
      "USE_OP_GRADIENT must be called in global namespace");        \
  extern int TouchOpGradientRegistrar_##op_type();                  \
  static int use_op_gradient_##op_type##_ __attribute__((unused)) = \
      TouchOpGradientRegistrar_##op_type()
437 438 439
#else
#define USE_OP_GRADIENT(op_type)
#endif
F
fengjiayi 已提交
440

441
#define USE_OP_DEVICE_KERNEL(op_type, DEVICE_TYPE)               \
F
fengjiayi 已提交
442 443
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                \
      __use_op_kernel_##op_type##_##DEVICE_TYPE##__,             \
444
      "USE_OP_DEVICE_KERNEL must be in global namespace");       \
F
fengjiayi 已提交
445 446 447
  extern int TouchOpKernelRegistrar_##op_type##_##DEVICE_TYPE(); \
  static int use_op_kernel_##op_type##_##DEVICE_TYPE##_          \
      __attribute__((unused)) =                                  \
F
Fix bug  
fengjiayi 已提交
448
          TouchOpKernelRegistrar_##op_type##_##DEVICE_TYPE()
Y
Yu Yang 已提交
449

450
// TODO(fengjiayi): The following macros seems ugly, do we have better method?
F
Fix bug  
fengjiayi 已提交
451

F
fengjiayi 已提交
452
#ifdef PADDLE_ONLY_CPU
453
#define USE_OP_KERNEL(op_type) USE_OP_DEVICE_KERNEL(op_type, CPU)
Y
Yu Yang 已提交
454
#else
455 456 457
#define USE_OP_KERNEL(op_type)        \
  USE_OP_DEVICE_KERNEL(op_type, CPU); \
  USE_OP_DEVICE_KERNEL(op_type, GPU)
Y
Yu Yang 已提交
458
#endif
459

460 461 462 463 464 465 466 467 468 469 470 471 472
#define USE_NO_GRAD_OP(op_type) \
  USE_OP_ITSELF(op_type);       \
  USE_OP_KERNEL(op_type)

#define USE_CPU_OP(op_type)           \
  USE_OP_ITSELF(op_type);             \
  USE_OP_DEVICE_KERNEL(op_type, CPU); \
  USE_OP_GRADIENT(op_type)

#define USE_OP(op_type)    \
  USE_NO_GRAD_OP(op_type); \
  USE_OP_GRADIENT(op_type)

473 474
}  // namespace framework
}  // namespace paddle