op_registry.h 14.9 KB
Newer Older
F
fengjiayi 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

15 16
#pragma once

17
#include <algorithm>
18
#include <atomic>
Y
Yu Yang 已提交
19
#include <type_traits>
F
WIP  
fengjiayi 已提交
20
#include <typeinfo>
21 22
#include <unordered_map>
#include <unordered_set>
Y
Yi Wang 已提交
23
#include "paddle/framework/attribute.h"
F
fengjiayi 已提交
24
#include "paddle/framework/grad_op_builder.h"
25
#include "paddle/framework/op_desc.pb.h"
D
dongzhihong 已提交
26
#include "paddle/framework/scope.h"
27 28 29 30 31 32 33 34 35 36

namespace paddle {
namespace framework {

// this class not only make proto but also init attribute checkers.
class OpProtoAndCheckerMaker {
 public:
  OpProtoAndCheckerMaker(OpProto* proto, OpAttrChecker* op_checker)
      : proto_(proto), op_checker_(op_checker) {}

37 38 39 40 41 42 43 44
  ~OpProtoAndCheckerMaker() {
    PADDLE_ENFORCE(validated_, "should call Validate after build");
  }

  void Validate() {
    validated_ = true;
    CheckNoDuplicatedInOutAttrs();
  }
45

46
 protected:
Y
Yu Yang 已提交
47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72
  struct VariableBuilder {
    VarProto* var_;
    std::function<void()> on_multiple_;
    std::function<void()> on_temporary_;

    VariableBuilder& SetMultiple() {
      var_->set_multiple(true);
      on_multiple_();
      return *this;
    }

    VariableBuilder& SetTemporary() {
      PADDLE_ENFORCE(bool(on_temporary_), "Cannot set temporary");
      var_->set_temporary(true);
      on_temporary_();
      return *this;
    }

    VariableBuilder& IgnoreGradient() {
      var_->set_ignore_gradient(true);
      return *this;
    }
  };

  VariableBuilder AddInput(const std::string& name,
                           const std::string& comment) {
73
    auto input = proto_->mutable_inputs()->Add();
74 75
    *input->mutable_name() = name;
    *input->mutable_comment() = comment;
Y
Yu Yang 已提交
76 77
    return VariableBuilder{input, [=] { this->SetHasMultipleInput(); },
                           nullptr};
78 79
  }

Y
Yu Yang 已提交
80 81
  VariableBuilder AddOutput(const std::string& name,
                            const std::string& comment) {
82
    auto output = proto_->mutable_outputs()->Add();
83 84
    *output->mutable_name() = name;
    *output->mutable_comment() = comment;
Y
Yu Yang 已提交
85 86
    return VariableBuilder{output, [=] { this->SetHasMultipleOutput(); },
                           [=] { this->SetHasTemporaryOutput(); }};
87 88 89 90
  }

  template <typename T>
  TypedAttrChecker<T>& AddAttr(const std::string& name,
91 92
                               const std::string& comment,
                               bool generated = false) {
93
    auto attr = proto_->mutable_attrs()->Add();
94 95
    *attr->mutable_name() = name;
    *attr->mutable_comment() = comment;
96
    attr->set_generated(generated);
Y
Yi Wang 已提交
97
    attr->set_type(AttrTypeID<T>());
98 99 100 101 102 103 104
    return op_checker_->AddAttrChecker<T>(name);
  }

  void AddComment(const std::string& comment) {
    *(proto_->mutable_comment()) = comment;
  }

105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152
 private:
  void SetHasMultiple(const std::string& in_out, bool* flag) {
    if (!*flag) {
      AddAttr<std::vector<int>>(in_out + "_format",
                                "The multiple index of " + in_out +
                                    "\n"
                                    R"DOC(
This attribute is used by Paddle core framework. Paddle's Op support each input
or output could be a list of variable. This attribute is used to show how that
list organized.

e.g.
  input = ["a", "b", "c", "d", "e", "f"]
  input_format = [0, 4, 5, 6]

means
  The number of all input variables this op is six, and they are segmented into
  three inputs.

  The first input is input[0:4], second is input[4:5], third is input[5:6].
)DOC",
                                /*generated*/ true);
      *flag = true;
    }
  }

  void SetHasMultipleInput() { SetHasMultiple("input", &has_multiple_input_); }
  void SetHasMultipleOutput() {
    SetHasMultiple("output", &has_multiple_output_);
  }

  void SetHasTemporaryOutput() {
    if (!has_temporary_output_) {
      AddAttr<std::vector<int>>("temporary_index",
                                R"DOC(The temporary index of output.

Not all output of Paddle Op is used by user. For faster computation, each op
could output some its internal state to other op, other op could take that
output to make compute faster.

Add a mark to which output is temporary is helpful for future optimization.
)DOC",
                                /*generated*/ true)
          .SetDefault(std::vector<int>());
      has_temporary_output_ = true;
    }
  }

153
  void CheckNoDuplicatedInOutAttrs() {
154
    std::unordered_set<std::string> names;
155 156 157 158
    auto checker = [&](const std::string& name) {
      PADDLE_ENFORCE(!names.count(name), "[%s] is duplicated", name);
      names.insert(name);
    };
159
    for (auto& attr : proto_->attrs()) {
160 161 162 163 164 165 166
      checker(attr.name());
    }
    for (auto& input : proto_->inputs()) {
      checker(input.name());
    }
    for (auto& output : proto_->outputs()) {
      checker(output.name());
167 168 169
    }
  }

170 171
  OpProto* proto_;
  OpAttrChecker* op_checker_;
172
  bool validated_{false};
173 174 175
  bool has_multiple_input_{false};
  bool has_multiple_output_{false};
  bool has_temporary_output_{false};
176 177
};

F
WIP  
fengjiayi 已提交
178 179 180 181 182 183 184 185 186
class NOPMaker : public OpProtoAndCheckerMaker {};

struct OpInfo {
  std::function creator_;
  std::string grad_op_type_;
  OpProto* proto_;
  OpAttrChecker* checker_;
};

187
class OpRegistry {
Q
Qiao Longfei 已提交
188
  using OpCreator = std::function<OperatorBase*()>;
Y
Yu Yang 已提交
189
  using VarIndexMap = std::unordered_map<std::string, int>;
Y
Yu Yang 已提交
190
  using VarNameList = std::vector<std::string>;
191 192 193

 public:
  template <typename OpType, typename ProtoMakerType>
F
WIP  
fengjiayi 已提交
194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223
  static void RegisterOp(const std::string& op_type,
                         const std::string& grad_op_type) {
    PADDLE_ENFORCE(op_info_map().count(op_type) == 0,
                   "'%s' is registered more than once.", op_type);
    OpInfo op_info;
    op_info.creator_ = [] { return new OpType; };
    op_info.grad_op_type_ = grad_op_type;
    if (std::type_index(typeid(ProtoMakerType)) !=
        std::type_index(typeid(NOPMaker))) {
      op_info.proto_ = new OpProto;
      op_info.op_checker_ = new OpAttrChecker;
      auto maker = ProtoMakerType(op_info.proto_, op_info.op_checker_);
      maker.Validate();
      *op_info.proto_->mutable_type() = op_type;
      PADDLE_ENFORCE(
          op_info.proto_->IsInitialized(),
          "Fail to initialize %s's OpProto, because %s is not initialized",
          op_type, op_info.proto_->InitializationErrorString());
      //======will be refactored in following PRs============//
      VarIndexMaps()[op_type].reset(new VarIndexMap());
      auto& varmap = *VarIndexMaps()[op_type];
      int idx = 0;
      for (auto& var : op_proto.inputs()) {
        varmap[var.name()] = idx++;
      }
      idx = 0;
      for (auto& var : op_proto.outputs()) {
        varmap[var.name()] = idx++;
      }
      //================================================//
Y
Yu Yang 已提交
224
    }
F
WIP  
fengjiayi 已提交
225
    op_info_map.insert(std::make_pair(op_type, op_info));
F
fengjiayi 已提交
226 227
  }

Y
Yu Yang 已提交
228 229 230 231
  static std::shared_ptr<OperatorBase> CreateOp(const std::string& type,
                                                const VarNameList& inputs,
                                                const VarNameList& outputs,
                                                const AttributeMap& attrs) {
F
WIP  
fengjiayi 已提交
232 233 234
    auto it = op_info_map().find(type);
    PADDLE_ENFORCE(it != op_info_map().end(), "'%s' has not been registered.",
                   type);
235

F
WIP  
fengjiayi 已提交
236
    auto op = it->second.creator_();
Y
Yu Yang 已提交
237 238 239
    op->type_ = type;
    op->inputs_ = inputs;
    op->outputs_ = outputs;
F
fengjiayi 已提交
240

Y
Yu Yang 已提交
241
    op->attrs_ = attrs;
F
WIP  
fengjiayi 已提交
242
    it->second.checker_->Check(op->attrs_);
243

Y
Yu Yang 已提交
244
    GenerateTempVariableName(op);
245

Y
Yu Yang 已提交
246
    {
Y
Yu Yang 已提交
247
      auto var_index_it = VarIndexMaps().find(type);
Y
Yu Yang 已提交
248 249 250 251
      if (var_index_it != VarIndexMaps().end()) {
        op->in_out_idxs_ = var_index_it->second;
      }
    }
Y
Yu Yang 已提交
252

Q
Qiao Longfei 已提交
253
    op->Init();
Y
Yu Yang 已提交
254
    return std::shared_ptr<OperatorBase>(op);
255 256
  }

Y
Yu Yang 已提交
257
  static std::shared_ptr<OperatorBase> CreateOp(const OpDesc& op_desc) {
Y
Yu Yang 已提交
258 259
    std::vector<std::string> inputs;
    inputs.reserve((size_t)op_desc.inputs_size());
260
    std::copy(op_desc.inputs().begin(), op_desc.inputs().end(),
Y
Yu Yang 已提交
261 262 263 264
              std::back_inserter(inputs));

    std::vector<std::string> outputs;
    outputs.reserve((size_t)op_desc.outputs_size());
265
    std::copy(op_desc.outputs().begin(), op_desc.outputs().end(),
Y
Yu Yang 已提交
266 267 268
              std::back_inserter(outputs));

    AttributeMap attrs;
269
    for (auto& attr : op_desc.attrs()) {
Y
Yi Wang 已提交
270
      attrs[attr.name()] = GetAttrValue(attr);
271
    }
Y
Yu Yang 已提交
272 273

    return CreateOp(op_desc.type(), inputs, outputs, attrs);
274 275
  }

Y
Yu Yang 已提交
276 277
  static std::shared_ptr<OperatorBase> CreateGradOp(const OperatorBase& op) {
    PADDLE_ENFORCE(!op.IsNetOp(),
Y
Yu Yang 已提交
278
                   "Use framework::Backward to get backward ops");
279
    std::shared_ptr<OperatorBase> grad_op(BuildGradOp(&op));
F
fengjiayi 已提交
280 281
    grad_op->Init();
    return grad_op;
D
dongzhihong 已提交
282 283
  }

F
WIP  
fengjiayi 已提交
284 285 286
  static std::unordered_map<const std::string, const OpInfo>& op_info_map() {
    static std::unordered_map<const std::string, const OpInfo> op_info_map_;
    return op_info_map_;
287 288
  }

Y
Yu Yang 已提交
289 290 291 292 293 294
  static std::unordered_map<std::string, std::shared_ptr<VarIndexMap>>&
  VarIndexMaps() {
    static std::unordered_map<std::string, std::shared_ptr<VarIndexMap>> maps_;
    return maps_;
  }

295
 private:
296
  static void GenerateTempVariableName(OperatorBase* op) {
297 298
    static std::atomic<size_t> gUniqId(0UL);
    for (auto& outname : op->outputs_) {
299
      if (outname == kTempVarName) {
300
        outname += op->type_;
301 302 303 304 305
        outname += "@";
        outname += std::to_string(gUniqId.fetch_add(1));
      }
    }
  }
306
};
307

F
Fix bug  
fengjiayi 已提交
308 309
class Registrar {
 public:
310 311 312 313 314 315 316 317
  // In our design, various kinds of classes, e.g., operators and kernels, have
  // their corresponding registry and registrar. The action of registration is
  // in the constructor of a global registrar variable, which, however, are not
  // used in the code that calls package framework, and would be removed from
  // the generated binary file by the linker. To avoid such removal, we add
  // Touch to all registrar classes and make USE_OP macros to call this
  // method. So, as long as the callee code calls USE_OP, the global
  // registrar variable won't be removed by the linker.
F
Fix bug  
fengjiayi 已提交
318 319
  void Touch() {}
};
F
fengjiayi 已提交
320

321
template <typename OpType, typename ProtoMakerType>
F
fengjiayi 已提交
322
class OpRegistrar : public Registrar {
323
 public:
F
WIP  
fengjiayi 已提交
324 325 326
  OpRegistrar(const char* op_type) { OpRegistrar(op_type, ""); }
  OpRegistrar(const char* op_type, const char* grad_op_type) {
    OpRegistry::RegisterOp<OpType, ProtoMakerType>(op_type, grad_op_type);
D
dongzhihong 已提交
327 328 329
  }
};

F
fengjiayi 已提交
330 331 332 333
template <typename PlaceType, typename KernelType>
class OpKernelRegistrar : public Registrar {
 public:
  explicit OpKernelRegistrar(const char* op_type) {
334
    OperatorWithKernel::OpKernelKey key;
F
fengjiayi 已提交
335
    key.place_ = PlaceType();
336
    OperatorWithKernel::AllOpKernels()[op_type][key].reset(new KernelType);
F
fengjiayi 已提交
337 338 339
  }
};

340 341 342
/**
 * check if MACRO is used in GLOBAL NAMESPACE.
 */
Y
Yu Yang 已提交
343 344 345 346 347 348
#define STATIC_ASSERT_GLOBAL_NAMESPACE(uniq_name, msg)                        \
  struct __test_global_namespace_##uniq_name##__ {};                          \
  static_assert(std::is_same<::__test_global_namespace_##uniq_name##__,       \
                             __test_global_namespace_##uniq_name##__>::value, \
                msg)

349
/**
350
 * Macro to register Operator.
351
 */
F
WIP  
fengjiayi 已提交
352
#define REGISTER_OP(op_type, op_class, op_maker_class, grad_op_type)          \
F
fengjiayi 已提交
353 354
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                             \
      __reg_op__##op_type, "REGISTER_OP must be called in global namespace"); \
F
Fix bug  
fengjiayi 已提交
355
  static ::paddle::framework::OpRegistrar<op_class, op_maker_class>           \
F
WIP  
fengjiayi 已提交
356
      __op_registrar_##op_type##__(#op_type, #grad_op_type);                  \
F
fengjiayi 已提交
357
  int TouchOpRegistrar_##op_type() {                                          \
F
Fix bug  
fengjiayi 已提交
358
    __op_registrar_##op_type##__.Touch();                                     \
F
fengjiayi 已提交
359 360
    return 0;                                                                 \
  }
Y
Yu Yang 已提交
361

F
WIP  
fengjiayi 已提交
362 363 364 365 366
#define REGISTER_OP_WITHOUT_GRADIENT(op_type, op_class, op_maker_class) \
  REGISTER_OP(op_type, op_class, op_maker_class, )

#define REGISTER_GRADIENT_OP(op_type, op_class) \
  REGISTER_OP(op_type, op_class, ::paddle::framework::NOPMaker, )
D
dongzhihong 已提交
367

D
dongzhihong 已提交
368
/**
369
 * Macro to register OperatorKernel.
D
dongzhihong 已提交
370
 */
F
Fix bug  
fengjiayi 已提交
371 372 373 374 375 376 377 378 379
#define REGISTER_OP_KERNEL(op_type, DEVICE_TYPE, place_class, ...)        \
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                         \
      __reg_op_kernel_##op_type##_##DEVICE_TYPE##__,                      \
      "REGISTER_OP_KERNEL must be called in global namespace");           \
  static ::paddle::framework::OpKernelRegistrar<place_class, __VA_ARGS__> \
      __op_kernel_registrar_##op_type##_##DEVICE_TYPE##__(#op_type);      \
  int TouchOpKernelRegistrar_##op_type##_##DEVICE_TYPE() {                \
    __op_kernel_registrar_##op_type##_##DEVICE_TYPE##__.Touch();          \
    return 0;                                                             \
F
fengjiayi 已提交
380
  }
D
dongzhihong 已提交
381

382
/**
F
fengjiayi 已提交
383
 * Macro to Forbid user register Gradient Operator.
384
 */
F
WIP  
fengjiayi 已提交
385
/*
F
fengjiayi 已提交
386
#define NO_GRADIENT(op_type)                           \
F
WIP  
fengjiayi 已提交
387 388 389 390
 STATIC_ASSERT_GLOBAL_NAMESPACE(                      \
     __reg_gradient_op__##op_type##_##op_type##_grad, \
     "NO_GRADIENT must be called in global namespace")
*/
F
fengjiayi 已提交
391

F
fengjiayi 已提交
392 393
#define REGISTER_OP_GPU_KERNEL(op_type, ...) \
  REGISTER_OP_KERNEL(op_type, GPU, ::paddle::platform::GPUPlace, __VA_ARGS__)
F
fengjiayi 已提交
394

F
fengjiayi 已提交
395 396
#define REGISTER_OP_CPU_KERNEL(op_type, ...) \
  REGISTER_OP_KERNEL(op_type, CPU, ::paddle::platform::CPUPlace, __VA_ARGS__)
Y
Yu Yang 已提交
397

398 399 400 401
/**
 * Macro to mark what Operator and Kernel we will use and tell the compiler to
 * link them into target.
 */
F
fengjiayi 已提交
402 403 404 405 406 407
#define USE_OP_ITSELF(op_type)                                    \
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                 \
      __use_op_itself_##op_type,                                  \
      "USE_OP_ITSELF must be called in global namespace");        \
  extern int TouchOpRegistrar_##op_type();                        \
  static int use_op_itself_##op_type##_ __attribute__((unused)) = \
F
Fix bug  
fengjiayi 已提交
408
      TouchOpRegistrar_##op_type()
F
fengjiayi 已提交
409

410
#define USE_OP_DEVICE_KERNEL(op_type, DEVICE_TYPE)               \
F
fengjiayi 已提交
411 412
  STATIC_ASSERT_GLOBAL_NAMESPACE(                                \
      __use_op_kernel_##op_type##_##DEVICE_TYPE##__,             \
413
      "USE_OP_DEVICE_KERNEL must be in global namespace");       \
F
fengjiayi 已提交
414 415 416
  extern int TouchOpKernelRegistrar_##op_type##_##DEVICE_TYPE(); \
  static int use_op_kernel_##op_type##_##DEVICE_TYPE##_          \
      __attribute__((unused)) =                                  \
F
Fix bug  
fengjiayi 已提交
417
          TouchOpKernelRegistrar_##op_type##_##DEVICE_TYPE()
Y
Yu Yang 已提交
418

419
// TODO(fengjiayi): The following macros seems ugly, do we have better method?
F
Fix bug  
fengjiayi 已提交
420

F
fengjiayi 已提交
421
#ifdef PADDLE_ONLY_CPU
422
#define USE_OP_KERNEL(op_type) USE_OP_DEVICE_KERNEL(op_type, CPU)
Y
Yu Yang 已提交
423
#else
424 425 426
#define USE_OP_KERNEL(op_type)        \
  USE_OP_DEVICE_KERNEL(op_type, CPU); \
  USE_OP_DEVICE_KERNEL(op_type, GPU)
Y
Yu Yang 已提交
427
#endif
428

F
WIP  
fengjiayi 已提交
429 430 431
#define USE_CPU_ONLY_OP(op_type) \
  USE_OP_ITSELF(op_type);        \
  USE_OP_DEVICE_KERNEL(op_type, CPU);
432

F
WIP  
fengjiayi 已提交
433 434 435
#define USE_OP(op_type)   \
  USE_OP_ITSELF(op_type); \
  USE_OP_KERNEL(op_type)
436

437 438
}  // namespace framework
}  // namespace paddle