/* Copyright (c) 2017 PaddlePaddle Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #pragma once #include #include #include #include #include #include #include #include #include "paddle/fluid/framework/grad_op_desc_maker.h" #include "paddle/fluid/framework/inplace_op_inference.h" #include "paddle/fluid/framework/no_need_buffer_vars_inference.h" #include "paddle/fluid/framework/op_info.h" #include "paddle/fluid/framework/op_proto_maker.h" #include "paddle/fluid/framework/operator.h" #include "paddle/fluid/framework/var_type_inference.h" #include "paddle/fluid/imperative/dygraph_grad_maker.h" #include "paddle/fluid/imperative/type_defs.h" #include "paddle/fluid/prim/utils/static/composite_grad_desc_maker.h" namespace paddle { namespace framework { namespace details { enum OpInfoFillType { kOperator = 0, kOpProtoAndCheckerMaker = 1, kGradOpDescMaker = 2, kVarTypeInference = 3, kShapeInference = 4, kInplaceOpInference = 5, kNoNeedBufferVarsInference = 6, kGradOpBaseMaker = 7, kGradCompOpDescMaker = 8, kUnknown = -1 }; namespace internal { template struct TypePair { using Type = T; static constexpr OpInfoFillType kFillType = kType; }; using OpRegistryClasses = std::tuple< // NOLINT TypePair, // NOLINT TypePair, // NOLINT TypePair, // NOLINT TypePair, // NOLINT TypePair, // NOLINT TypePair, // NOLINT TypePair, // NOLINT TypePair, // NOLINT TypePair // NOLINT >; static constexpr int kOpRegistryClassNumber = std::tuple_size::value; template struct IsMatchedBaseTypeImpl { using PairType = typename std::tuple_element::type; static constexpr bool kValue = std::is_base_of::value; }; template struct IsMatchedBaseTypeImpl { static constexpr bool kValue = false; }; template static inline constexpr bool IsMatchedBaseType() { return IsMatchedBaseTypeImpl= 0 && kPos < kOpRegistryClassNumber)>::kValue; } template struct OpInfoFillTypeGetterImpl {}; // This case should not happen template struct OpInfoFillTypeGetterImpl {}; template struct OpInfoFillTypeGetterImpl { static constexpr OpInfoFillType kType = kUnknown; }; template struct OpInfoFillTypeGetterImpl { static constexpr OpInfoFillType kType = OpInfoFillTypeGetterImpl()>::kType; }; template struct OpInfoFillTypeGetterImpl { using PairType = typename std::tuple_element::type; static constexpr OpInfoFillType kType = PairType::kFillType; }; template using OpInfoFillTypeGetter = OpInfoFillTypeGetterImpl()>; } // namespace internal template struct OpInfoFillTypeID { static constexpr OpInfoFillType ID() { return internal::OpInfoFillTypeGetter::kType; } }; template ::ID()> struct OpInfoFiller; template class OperatorRegistrarRecursive; template class OperatorRegistrarRecursive { public: using T = typename std::tuple_element>::type; OperatorRegistrarRecursive(const char* op_type, OpInfo* info) { OpInfoFiller fill; fill(op_type, info); constexpr auto size = sizeof...(ARGS); OperatorRegistrarRecursive reg(op_type, info); (void)(reg); } }; template class OperatorRegistrarRecursive { public: OperatorRegistrarRecursive(const char* op_type, OpInfo* info) {} }; template struct OpInfoFiller { void operator()(const char* op_type, OpInfo* info) const { PADDLE_ENFORCE_EQ(info->creator_, nullptr, platform::errors::AlreadyExists( "OpCreator of %s has been registered", op_type)); info->creator_ = [](const std::string& type, const VariableNameMap& inputs, const VariableNameMap& outputs, const AttributeMap& attrs) { return new T(type, inputs, outputs, attrs); }; if (std::is_base_of::value) { PADDLE_ENFORCE_EQ( info->infer_shape_, nullptr, platform::errors::AlreadyExists( "Duplicate InferShapeFN of %s has been registered", op_type)); OperatorWithKernel* op = dynamic_cast(info->creator_( std::string{}, VariableNameMap{}, VariableNameMap{}, AttributeMap{})); PADDLE_ENFORCE_NOT_NULL( op, platform::errors::InvalidArgument("%s should have kernels", op_type)); info->infer_shape_ = [op](InferShapeContext* ctx) { op->InferShape(ctx); }; } } }; template struct OpInfoFiller { void operator()(const char* op_type, OpInfo* info) const { PADDLE_ENFORCE_EQ(info->proto_, nullptr, platform::errors::AlreadyExists( "OpProto of %s has been registered.", op_type)); PADDLE_ENFORCE_EQ(info->checker_, nullptr, platform::errors::AlreadyExists( "OpAttrChecker of %s has been registered.", op_type)); info->proto_ = new proto::OpProto; info->checker_ = new OpAttrChecker(); info->proto_->set_type(op_type); T maker; maker(info->proto_, info->checker_); PADDLE_ENFORCE_EQ( info->proto_->IsInitialized(), true, platform::errors::PreconditionNotMet( "Fail to initialize %s's OpProto, because %s is not initialized.", op_type, info->proto_->InitializationErrorString())); } }; template struct OpInfoFiller { void operator()(const char* op_type, OpInfo* info) const { PADDLE_ENFORCE_EQ( info->grad_op_maker_, nullptr, platform::errors::AlreadyExists( "GradOpDescMaker of %s has been registered", op_type)); info->grad_op_maker_ = [](const OpDesc& fwd_op, const std::unordered_set& no_grad_set, std::unordered_map* grad_to_var, const std::vector& grad_block) { T maker(fwd_op, no_grad_set, grad_to_var, grad_block); return maker(); }; info->use_default_grad_op_desc_maker_ = std::is_base_of, T>::value || std::is_base_of, T>::value || std::is_base_of, T>::value || std::is_base_of, T>::value; info->use_empty_grad_op_desc_maker_ = std::is_base_of, T>::value || std::is_base_of, T>::value; } }; template struct OpInfoFiller { void operator()(const char* op_type, OpInfo* info) const { PADDLE_ENFORCE_EQ( info->grad_comp_op_maker_, nullptr, platform::errors::AlreadyExists( "CompositeGradOpMakerBase of %s has been registered", op_type)); info->grad_comp_op_maker_ = [](const OpDesc& fwd_op, const std::unordered_set& no_grad_set, std::unordered_map* grad_to_var, const BlockDesc* current_block, const std::vector& grad_block) { T maker(fwd_op, no_grad_set, grad_to_var, current_block, grad_block); return maker(); }; // TODO(jiabin): Support this later or just not. info->use_default_grad_op_desc_maker_ = false; info->use_empty_grad_op_desc_maker_ = false; } }; template struct OpInfoFiller { void operator()(const char* op_type, OpInfo* info) const { PADDLE_ENFORCE_EQ( info->dygraph_grad_op_maker_, nullptr, platform::errors::AlreadyExists( "GradOpBaseMaker of %s has been registered", op_type)); info->dygraph_grad_op_maker_ = [](const std::string& type, const imperative::NameVarBaseMap& var_base_map_in, const imperative::NameVarBaseMap& var_base_map_out, const framework::AttributeMap& attrs, const framework::AttributeMap& default_attrs, const std::map& inplace_map) { T maker(type, var_base_map_in, var_base_map_out, attrs, inplace_map); maker.SetDygraphDefaultAttrsMap(default_attrs); return maker(); }; } }; template struct OpInfoFiller { void operator()(const char* op_type, OpInfo* info) const { PADDLE_ENFORCE_EQ( info->infer_var_type_, nullptr, platform::errors::AlreadyExists( "VarTypeInference of %s has been registered", op_type)); info->infer_var_type_ = [](InferVarTypeContext* context) { T inference; inference(context); }; } }; template struct OpInfoFiller { void operator()(const char* op_type, OpInfo* info) const { // Note: if fill InferShapeFN by this Filler, the infershape here // will overwrite the op->InferShape func registered in kOperator Filler info->infer_shape_ = [](InferShapeContext* ctx) { T inference; inference(ctx); }; } }; template struct OpInfoFiller { void operator()(const char* op_type, OpInfo* info) const { PADDLE_ENFORCE_EQ( info->infer_inplace_, nullptr, platform::errors::AlreadyExists( "InplaceOpInference of %s has been registered", op_type)); info->infer_inplace_ = [](bool use_cuda) { T infer; return infer(use_cuda); }; } }; template struct OpInfoFiller { void operator()(const char* op_type, OpInfo* info) const { PADDLE_ENFORCE_EQ( info->infer_no_need_buffer_vars_, nullptr, platform::errors::AlreadyExists( "NoNeedBufferVarsInference of %s has been registered", op_type)); info->infer_no_need_buffer_vars_.Reset(std::make_shared()); } }; // A fake OpInfoFiller of void template <> struct OpInfoFiller { void operator()(const char* op_type, OpInfo* info) const {} }; } // namespace details } // namespace framework } // namespace paddle