op_registry.h 15.3 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

15 16
#pragma once

17
#include <algorithm>
18
#include <atomic>
Y
Yu Yang 已提交
19
#include <type_traits>
F
WIP  
fengjiayi 已提交
20
#include <typeinfo>
21 22
#include <unordered_map>
#include <unordered_set>
Y
Yi Wang 已提交
23
#include "paddle/framework/attribute.h"
F
fengjiayi 已提交
24
#include "paddle/framework/grad_op_builder.h"
25
#include "paddle/framework/op_desc.pb.h"
D
dongzhihong 已提交
26
#include "paddle/framework/scope.h"
27 28 29 30 31 32 33 34 35 36

namespace paddle {
namespace framework {

// this class not only make proto but also init attribute checkers.
class OpProtoAndCheckerMaker {
 public:
  OpProtoAndCheckerMaker(OpProto* proto, OpAttrChecker* op_checker)
      : proto_(proto), op_checker_(op_checker) {}

37 38 39 40 41 42 43 44
  ~OpProtoAndCheckerMaker() {
    PADDLE_ENFORCE(validated_, "should call Validate after build");
  }

  void Validate() {
    validated_ = true;
    CheckNoDuplicatedInOutAttrs();
  }
45

46
 protected:
Y
Yu Yang 已提交
47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72
  struct VariableBuilder {
    VarProto* var_;
    std::function<void()> on_multiple_;
    std::function<void()> on_temporary_;

    VariableBuilder& SetMultiple() {
      var_->set_multiple(true);
      on_multiple_();
      return *this;
    }

    VariableBuilder& SetTemporary() {
      PADDLE_ENFORCE(bool(on_temporary_), "Cannot set temporary");
      var_->set_temporary(true);
      on_temporary_();
      return *this;
    }

    VariableBuilder& IgnoreGradient() {
      var_->set_ignore_gradient(true);
      return *this;
    }
  };

  VariableBuilder AddInput(const std::string& name,
                           const std::string& comment) {
73 74 75
    VarProto* input = proto_->add_inputs();
    input->set_name(name);
    input->set_comment(comment);
Y
Yu Yang 已提交
76 77
    return VariableBuilder{input, [=] { this->SetHasMultipleInput(); },
                           nullptr};
78 79
  }

Y
Yu Yang 已提交
80 81
  VariableBuilder AddOutput(const std::string& name,
                            const std::string& comment) {
82 83 84
    VarProto* output = proto_->add_outputs();
    output->set_name(name);
    output->set_comment(comment);
Y
Yu Yang 已提交
85 86
    return VariableBuilder{output, [=] { this->SetHasMultipleOutput(); },
                           [=] { this->SetHasTemporaryOutput(); }};
87 88 89 90
  }

  template <typename T>
  TypedAttrChecker<T>& AddAttr(const std::string& name,
91 92
                               const std::string& comment,
                               bool generated = false) {
93 94 95
    AttrProto* attr = proto_->add_attrs();
    attr->set_name(name);
    attr->set_comment(comment);
96
    attr->set_generated(generated);
Y
Yi Wang 已提交
97
    attr->set_type(AttrTypeID<T>());
98 99 100
    return op_checker_->AddAttrChecker<T>(name);
  }

101
  void AddComment(const std::string& comment) { proto_->set_comment(comment); }
102

103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150
 private:
  void SetHasMultiple(const std::string& in_out, bool* flag) {
    if (!*flag) {
      AddAttr<std::vector<int>>(in_out + "_format",
                                "The multiple index of " + in_out +
                                    "\n"
                                    R"DOC(
This attribute is used by Paddle core framework. Paddle's Op support each input
or output could be a list of variable. This attribute is used to show how that
list organized.

e.g.
  input = ["a", "b", "c", "d", "e", "f"]
  input_format = [0, 4, 5, 6]

means
  The number of all input variables this op is six, and they are segmented into
  three inputs.

  The first input is input[0:4], second is input[4:5], third is input[5:6].
)DOC",
                                /*generated*/ true);
      *flag = true;
    }
  }

  void SetHasMultipleInput() { SetHasMultiple("input", &has_multiple_input_); }
  void SetHasMultipleOutput() {
    SetHasMultiple("output", &has_multiple_output_);
  }

  void SetHasTemporaryOutput() {
    if (!has_temporary_output_) {
      AddAttr<std::vector<int>>("temporary_index",
                                R"DOC(The temporary index of output.

Not all output of Paddle Op is used by user. For faster computation, each op
could output some its internal state to other op, other op could take that
output to make compute faster.

Add a mark to which output is temporary is helpful for future optimization.
)DOC",
                                /*generated*/ true)
          .SetDefault(std::vector<int>());
      has_temporary_output_ = true;
    }
  }

151
  void CheckNoDuplicatedInOutAttrs() {
152
    std::unordered_set<std::string> names;
153 154 155 156
    auto checker = [&](const std::string& name) {
      PADDLE_ENFORCE(!names.count(name), "[%s] is duplicated", name);
      names.insert(name);
    };
157
    for (auto& attr : proto_->attrs()) {
158 159 160 161 162 163 164
      checker(attr.name());
    }
    for (auto& input : proto_->inputs()) {
      checker(input.name());
    }
    for (auto& output : proto_->outputs()) {
      checker(output.name());
165 166 167
    }
  }

168 169
  OpProto* proto_;
  OpAttrChecker* op_checker_;
170
  bool validated_{false};
171 172 173
  bool has_multiple_input_{false};
  bool has_multiple_output_{false};
  bool has_temporary_output_{false};
174 175
};

F
fengjiayi 已提交
176 177 178 179 180
class NOPMaker : public OpProtoAndCheckerMaker {
 public:
  NOPMaker(framework::OpProto* proto, framework::OpAttrChecker* op_checker)
      : OpProtoAndCheckerMaker(proto, op_checker) {}
};
F
WIP  
fengjiayi 已提交
181 182

struct OpInfo {
F
fengjiayi 已提交
183
  std::function<OperatorBase*()> creator_;
F
WIP  
fengjiayi 已提交
184 185 186 187 188
  std::string grad_op_type_;
  OpProto* proto_;
  OpAttrChecker* checker_;
};

189
class OpRegistry {
Y
Yu Yang 已提交
190
  using VarIndexMap = std::unordered_map<std::string, int>;
Y
Yu Yang 已提交
191
  using VarNameList = std::vector<std::string>;
192 193

 public:
194
  template <typename OpType, typename ProtoMakerType, typename GradOpType>
F
WIP  
fengjiayi 已提交
195 196 197 198 199 200 201 202 203 204
  static void RegisterOp(const std::string& op_type,
                         const std::string& grad_op_type) {
    PADDLE_ENFORCE(op_info_map().count(op_type) == 0,
                   "'%s' is registered more than once.", op_type);
    OpInfo op_info;
    op_info.creator_ = [] { return new OpType; };
    op_info.grad_op_type_ = grad_op_type;
    if (std::type_index(typeid(ProtoMakerType)) !=
        std::type_index(typeid(NOPMaker))) {
      op_info.proto_ = new OpProto;
F
fengjiayi 已提交
205 206
      op_info.checker_ = new OpAttrChecker;
      auto maker = ProtoMakerType(op_info.proto_, op_info.checker_);
F
WIP  
fengjiayi 已提交
207
      maker.Validate();
208
      op_info.proto_->set_type(op_type);
F
WIP  
fengjiayi 已提交
209 210 211 212
      PADDLE_ENFORCE(
          op_info.proto_->IsInitialized(),
          "Fail to initialize %s's OpProto, because %s is not initialized",
          op_type, op_info.proto_->InitializationErrorString());
F
fengjiayi 已提交
213
      // ======will be refactored in following PRs============ //
F
WIP  
fengjiayi 已提交
214 215 216
      VarIndexMaps()[op_type].reset(new VarIndexMap());
      auto& varmap = *VarIndexMaps()[op_type];
      int idx = 0;
F
fengjiayi 已提交
217
      for (auto& var : op_info.proto_->inputs()) {
F
WIP  
fengjiayi 已提交
218 219 220
        varmap[var.name()] = idx++;
      }
      idx = 0;
F
fengjiayi 已提交
221
      for (auto& var : op_info.proto_->outputs()) {
F
WIP  
fengjiayi 已提交
222 223
        varmap[var.name()] = idx++;
      }
F
fengjiayi 已提交
224
      // ================================================ //
F
fengjiayi 已提交
225 226 227
    } else {
      op_info.proto_ = nullptr;
      op_info.checker_ = nullptr;
Y
Yu Yang 已提交
228
    }
F
fengjiayi 已提交
229
    op_info_map().insert(std::make_pair(op_type, op_info));
230 231 232 233
    // register gradient op
    if (!grad_op_type.empty()) {
      RegisterOp<GradOpType, NOPMaker, NOP>(grad_op_type, "");
    }
F
fengjiayi 已提交
234 235
  }

Y
Yu Yang 已提交
236 237 238 239
  static std::shared_ptr<OperatorBase> CreateOp(const std::string& type,
                                                const VarNameList& inputs,
                                                const VarNameList& outputs,
                                                const AttributeMap& attrs) {
F
WIP  
fengjiayi 已提交
240 241 242
    auto it = op_info_map().find(type);
    PADDLE_ENFORCE(it != op_info_map().end(), "'%s' has not been registered.",
                   type);
243

F
WIP  
fengjiayi 已提交
244
    auto op = it->second.creator_();
Y
Yu Yang 已提交
245 246 247
    op->type_ = type;
    op->inputs_ = inputs;
    op->outputs_ = outputs;
F
fengjiayi 已提交
248

Y
Yu Yang 已提交
249
    op->attrs_ = attrs;
F
WIP  
fengjiayi 已提交
250
    it->second.checker_->Check(op->attrs_);
251

Y
Yu Yang 已提交
252
    GenerateTempVariableName(op);
253

Y
Yu Yang 已提交
254
    {
Y
Yu Yang 已提交
255
      auto var_index_it = VarIndexMaps().find(type);
Y
Yu Yang 已提交
256 257 258 259
      if (var_index_it != VarIndexMaps().end()) {
        op->in_out_idxs_ = var_index_it->second;
      }
    }
Y
Yu Yang 已提交
260

Q
Qiao Longfei 已提交
261
    op->Init();
Y
Yu Yang 已提交
262
    return std::shared_ptr<OperatorBase>(op);
263 264
  }

Y
Yu Yang 已提交
265
  static std::shared_ptr<OperatorBase> CreateOp(const OpDesc& op_desc) {
Y
Yu Yang 已提交
266 267
    std::vector<std::string> inputs;
    inputs.reserve((size_t)op_desc.inputs_size());
268
    std::copy(op_desc.inputs().begin(), op_desc.inputs().end(),
Y
Yu Yang 已提交
269 270 271 272
              std::back_inserter(inputs));

    std::vector<std::string> outputs;
    outputs.reserve((size_t)op_desc.outputs_size());
273
    std::copy(op_desc.outputs().begin(), op_desc.outputs().end(),
Y
Yu Yang 已提交
274 275 276
              std::back_inserter(outputs));

    AttributeMap attrs;
277
    for (auto& attr : op_desc.attrs()) {
Y
Yi Wang 已提交
278
      attrs[attr.name()] = GetAttrValue(attr);
279
    }
Y
Yu Yang 已提交
280 281

    return CreateOp(op_desc.type(), inputs, outputs, attrs);
282 283
  }

Y
Yu Yang 已提交
284 285
  static std::shared_ptr<OperatorBase> CreateGradOp(const OperatorBase& op) {
    PADDLE_ENFORCE(!op.IsNetOp(),
Y
Yu Yang 已提交
286
                   "Use framework::Backward to get backward ops");
287
    std::shared_ptr<OperatorBase> grad_op(BuildGradOp(&op));
F
fengjiayi 已提交
288 289
    grad_op->Init();
    return grad_op;
D
dongzhihong 已提交
290 291
  }

F
fengjiayi 已提交
292 293
  static std::unordered_map<std::string, const OpInfo>& op_info_map() {
    static std::unordered_map<std::string, const OpInfo> op_info_map_;
F
WIP  
fengjiayi 已提交
294
    return op_info_map_;
295 296
  }

Y
Yu Yang 已提交
297 298 299 300 301 302
  static std::unordered_map<std::string, std::shared_ptr<VarIndexMap>>&
  VarIndexMaps() {
    static std::unordered_map<std::string, std::shared_ptr<VarIndexMap>> maps_;
    return maps_;
  }

303
 private:
304
  static void GenerateTempVariableName(OperatorBase* op) {
305 306
    static std::atomic<size_t> gUniqId(0UL);
    for (auto& outname : op->outputs_) {
307
      if (outname == kTempVarName) {
308
        outname += op->type_;
309 310 311 312 313
        outname += "@";
        outname += std::to_string(gUniqId.fetch_add(1));
      }
    }
  }
314
};
315

F
Fix bug  
fengjiayi 已提交
316 317
class Registrar {
 public:
318 319 320 321 322 323 324 325
  // In our design, various kinds of classes, e.g., operators and kernels, have
  // their corresponding registry and registrar. The action of registration is
  // in the constructor of a global registrar variable, which, however, are not
  // used in the code that calls package framework, and would be removed from
  // the generated binary file by the linker. To avoid such removal, we add
  // Touch to all registrar classes and make USE_OP macros to call this
  // method. So, as long as the callee code calls USE_OP, the global
  // registrar variable won't be removed by the linker.
F
Fix bug  
fengjiayi 已提交
326 327
  void Touch() {}
};
F
fengjiayi 已提交
328

329
template <typename OpType, typename ProtoMakerType, typename GradOpType>
F
fengjiayi 已提交
330
class OpRegistrar : public Registrar {
331
 public:
F
fengjiayi 已提交
332
  explicit OpRegistrar(const char* op_type) { OpRegistrar(op_type, ""); }
F
WIP  
fengjiayi 已提交
333
  OpRegistrar(const char* op_type, const char* grad_op_type) {
334 335
    OpRegistry::RegisterOp<OpType, ProtoMakerType, GradOpType>(op_type,
                                                               grad_op_type);
D
dongzhihong 已提交
336 337 338
  }
};

F
fengjiayi 已提交
339 340 341 342
template <typename PlaceType, typename KernelType>
class OpKernelRegistrar : public Registrar {
 public:
  explicit OpKernelRegistrar(const char* op_type) {
343
    OperatorWithKernel::OpKernelKey key;
F
fengjiayi 已提交
344
    key.place_ = PlaceType();
345
    OperatorWithKernel::AllOpKernels()[op_type][key].reset(new KernelType);
F
fengjiayi 已提交
346 347 348
  }
};

349 350 351
/**
 * check if MACRO is used in GLOBAL NAMESPACE.
 */
Y
Yu Yang 已提交
352 353 354 355 356 357
#define STATIC_ASSERT_GLOBAL_NAMESPACE(uniq_name, msg)                        \
  struct __test_global_namespace_##uniq_name##__ {};                          \
  static_assert(std::is_same<::__test_global_namespace_##uniq_name##__,       \
                             __test_global_namespace_##uniq_name##__>::value, \
                msg)

358
/**
359
 * Macro to register Operator.
360
 */
361 362
#define REGISTER_OP(op_type, op_class, op_maker_class, grad_op_type,          \
                    grad_op_class)                                            \
F
fengjiayi 已提交
363 364
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                             \
      __reg_op__##op_type, "REGISTER_OP must be called in global namespace"); \
365 366
  static ::paddle::framework::OpRegistrar<op_class, op_maker_class,           \
                                          grad_op_class>                      \
F
WIP  
fengjiayi 已提交
367
      __op_registrar_##op_type##__(#op_type, #grad_op_type);                  \
F
fengjiayi 已提交
368
  int TouchOpRegistrar_##op_type() {                                          \
F
Fix bug  
fengjiayi 已提交
369
    __op_registrar_##op_type##__.Touch();                                     \
F
fengjiayi 已提交
370 371
    return 0;                                                                 \
  }
Y
Yu Yang 已提交
372

F
WIP  
fengjiayi 已提交
373
#define REGISTER_OP_WITHOUT_GRADIENT(op_type, op_class, op_maker_class) \
374
  REGISTER_OP(op_type, op_class, op_maker_class, , ::paddle::framework::NOP)
D
dongzhihong 已提交
375

D
dongzhihong 已提交
376
/**
377
 * Macro to register OperatorKernel.
D
dongzhihong 已提交
378
 */
F
Fix bug  
fengjiayi 已提交
379 380 381 382 383 384 385 386 387
#define REGISTER_OP_KERNEL(op_type, DEVICE_TYPE, place_class, ...)        \
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                         \
      __reg_op_kernel_##op_type##_##DEVICE_TYPE##__,                      \
      "REGISTER_OP_KERNEL must be called in global namespace");           \
  static ::paddle::framework::OpKernelRegistrar<place_class, __VA_ARGS__> \
      __op_kernel_registrar_##op_type##_##DEVICE_TYPE##__(#op_type);      \
  int TouchOpKernelRegistrar_##op_type##_##DEVICE_TYPE() {                \
    __op_kernel_registrar_##op_type##_##DEVICE_TYPE##__.Touch();          \
    return 0;                                                             \
F
fengjiayi 已提交
388
  }
D
dongzhihong 已提交
389

390
/**
F
fengjiayi 已提交
391
 * Macro to Forbid user register Gradient Operator.
392
 */
F
WIP  
fengjiayi 已提交
393
/*
F
fengjiayi 已提交
394
#define NO_GRADIENT(op_type)                           \
F
WIP  
fengjiayi 已提交
395 396 397 398
 STATIC_ASSERT_GLOBAL_NAMESPACE(                      \
     __reg_gradient_op__##op_type##_##op_type##_grad, \
     "NO_GRADIENT must be called in global namespace")
*/
F
fengjiayi 已提交
399

F
fengjiayi 已提交
400 401
#define REGISTER_OP_GPU_KERNEL(op_type, ...) \
  REGISTER_OP_KERNEL(op_type, GPU, ::paddle::platform::GPUPlace, __VA_ARGS__)
F
fengjiayi 已提交
402

F
fengjiayi 已提交
403 404
#define REGISTER_OP_CPU_KERNEL(op_type, ...) \
  REGISTER_OP_KERNEL(op_type, CPU, ::paddle::platform::CPUPlace, __VA_ARGS__)
Y
Yu Yang 已提交
405

406 407 408 409
/**
 * Macro to mark what Operator and Kernel we will use and tell the compiler to
 * link them into target.
 */
F
fengjiayi 已提交
410 411 412 413 414 415
#define USE_OP_ITSELF(op_type)                                    \
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                 \
      __use_op_itself_##op_type,                                  \
      "USE_OP_ITSELF must be called in global namespace");        \
  extern int TouchOpRegistrar_##op_type();                        \
  static int use_op_itself_##op_type##_ __attribute__((unused)) = \
F
Fix bug  
fengjiayi 已提交
416
      TouchOpRegistrar_##op_type()
F
fengjiayi 已提交
417

418
#define USE_OP_DEVICE_KERNEL(op_type, DEVICE_TYPE)               \
F
fengjiayi 已提交
419 420
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                \
      __use_op_kernel_##op_type##_##DEVICE_TYPE##__,             \
421
      "USE_OP_DEVICE_KERNEL must be in global namespace");       \
F
fengjiayi 已提交
422 423 424
  extern int TouchOpKernelRegistrar_##op_type##_##DEVICE_TYPE(); \
  static int use_op_kernel_##op_type##_##DEVICE_TYPE##_          \
      __attribute__((unused)) =                                  \
F
Fix bug  
fengjiayi 已提交
425
          TouchOpKernelRegistrar_##op_type##_##DEVICE_TYPE()
Y
Yu Yang 已提交
426

427
// TODO(fengjiayi): The following macros seems ugly, do we have better method?
F
Fix bug  
fengjiayi 已提交
428

F
fengjiayi 已提交
429
#ifdef PADDLE_ONLY_CPU
430
#define USE_OP_KERNEL(op_type) USE_OP_DEVICE_KERNEL(op_type, CPU)
Y
Yu Yang 已提交
431
#else
432 433 434
#define USE_OP_KERNEL(op_type)        \
  USE_OP_DEVICE_KERNEL(op_type, CPU); \
  USE_OP_DEVICE_KERNEL(op_type, GPU)
Y
Yu Yang 已提交
435
#endif
436

F
WIP  
fengjiayi 已提交
437 438 439
#define USE_CPU_ONLY_OP(op_type) \
  USE_OP_ITSELF(op_type);        \
  USE_OP_DEVICE_KERNEL(op_type, CPU);
440

F
WIP  
fengjiayi 已提交
441 442 443
#define USE_OP(op_type)   \
  USE_OP_ITSELF(op_type); \
  USE_OP_KERNEL(op_type)
444

445 446
}  // namespace framework
}  // namespace paddle