op_utils.h 9.1 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
/* Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

17 18
#include <string>
#include <unordered_set>
19

20 21 22 23 24
#include "paddle/phi/core/compat/arg_map_context.h"
#include "paddle/phi/core/enforce.h"
#include "paddle/phi/core/infermeta_utils.h"
#include "paddle/phi/core/macros.h"
#include "paddle/phi/core/type_defs.h"
25 26
#include "paddle/utils/flat_hash_map.h"

27
namespace phi {
28

29 30 31 32 33
const std::unordered_set<std::string> standard_kernel_suffixs({
    "sr",  // SelectedRows kernel
    "raw"  // fallback kernel of origfinal fluid op
});

34 35 36 37 38 39
/**
 * Some fluid ops are no longer used under the corresponding official API
 * system of 2.0. These names need to correspond to the official API names
 * after 2.0, and can no longer be occupied by the previously abandoned ops.
 * They are marked here uniformly.
 */
L
Linjie Chen 已提交
40 41
const std::unordered_set<std::string> deprecated_op_names({"diag",
                                                           "flatten",
42
                                                           "flatten_grad",
43 44
                                                           "isinf",
                                                           "isnan",
45 46 47 48
                                                           "unsqueeze",
                                                           "unsqueeze_grad",
                                                           "squeeze",
                                                           "squeeze_grad",
49
                                                           "isfinite",
50 51 52 53
                                                           "matmul",
                                                           "matmul_grad",
                                                           "matmul_grad_grad",
                                                           "mean",
54
                                                           "mean_grad",
55
                                                           "max",
56
                                                           "max_grad",
57
                                                           "min",
58 59 60
                                                           "min_grad",
                                                           "prod",
                                                           "prod_grad",
61 62
                                                           "any",
                                                           "all",
63 64
                                                           "reshape",
                                                           "reshape_grad",
65
                                                           "expand",
66
                                                           "expand_as",
67
                                                           "expand_grad",
68
                                                           "expand_as_grad",
69
                                                           "sum",
H
hong 已提交
70
                                                           "one_hot",
C
chentianyu03 已提交
71
                                                           "sum_grad",
72 73
                                                           "top_k",
                                                           "top_k_grad"});
74

75 76 77 78 79 80 81 82 83 84 85
class DefaultKernelSignatureMap {
 public:
  static DefaultKernelSignatureMap& Instance();

  bool Has(const std::string& op_type) const { return map_.count(op_type) > 0; }

  const KernelSignature& Get(const std::string& op_type) const {
    auto it = map_.find(op_type);
    PADDLE_ENFORCE_NE(
        it,
        map_.end(),
86
        phi::errors::NotFound(
87 88 89 90 91 92 93 94
            "Operator `%s`'s kernel signature is not registered.", op_type));
    return it->second;
  }

  void Insert(std::string op_type, KernelSignature signature) {
    PADDLE_ENFORCE_NE(
        Has(op_type),
        true,
95
        phi::errors::AlreadyExists(
96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112
            "Operator (%s)'s Kernel Siginature has been registered.", op_type));
    map_.insert({std::move(op_type), std::move(signature)});
  }

 private:
  DefaultKernelSignatureMap() = default;

  paddle::flat_hash_map<std::string, KernelSignature> map_;

  DISABLE_COPY_AND_ASSIGN(DefaultKernelSignatureMap);
};

class OpUtilsMap {
 public:
  static OpUtilsMap& Instance();

  bool Contains(const std::string& op_type) const {
113 114
    return base_kernel_name_map_.count(op_type) ||
           arg_mapping_fn_map_.count(op_type);
115 116
  }

117
  void InsertBaseKernelName(std::string op_type, std::string base_kernel_name) {
118
    PADDLE_ENFORCE_EQ(
119
        base_kernel_name_map_.count(op_type),
120
        0UL,
121
        phi::errors::AlreadyExists(
122
            "Operator (%s)'s api name has been registered.", op_type));
123 124
    base_kernel_name_map_.insert(
        {std::move(op_type), std::move(base_kernel_name)});
125 126 127 128 129 130
  }

  void InsertArgumentMappingFn(std::string op_type, ArgumentMappingFn fn) {
    PADDLE_ENFORCE_EQ(
        arg_mapping_fn_map_.count(op_type),
        0UL,
131
        phi::errors::AlreadyExists(
132 133 134 135 136
            "Operator (%s)'s argu,emt mapping function has been registered.",
            op_type));
    arg_mapping_fn_map_.insert({std::move(op_type), std::move(fn)});
  }

137 138
  std::string GetBaseKernelName(const std::string& op_type) const {
    if (deprecated_op_names.find(op_type) != deprecated_op_names.end()) {
139
      return "deprecated";
140 141 142 143
    }
    auto it = base_kernel_name_map_.find(op_type);
    if (it == base_kernel_name_map_.end()) {
      return op_type;
144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161
    } else {
      return it->second;
    }
  }

  ArgumentMappingFn GetArgumentMappingFn(const std::string& op_type) const {
    auto it = arg_mapping_fn_map_.find(op_type);
    if (it == arg_mapping_fn_map_.end()) {
      auto func =
          [op_type](const ArgumentMappingContext& ctx) -> KernelSignature {
        return DefaultKernelSignatureMap::Instance().Get(op_type);
      };
      return func;
    } else {
      return it->second;
    }
  }

162 163 164 165 166
  const paddle::flat_hash_map<std::string, std::string>& base_kernel_name_map()
      const {
    return base_kernel_name_map_;
  }

167 168 169
 private:
  OpUtilsMap() = default;

170
  paddle::flat_hash_map<std::string, std::string> base_kernel_name_map_;
171 172 173 174 175
  paddle::flat_hash_map<std::string, ArgumentMappingFn> arg_mapping_fn_map_;

  DISABLE_COPY_AND_ASSIGN(OpUtilsMap);
};

176 177 178
struct BaseKernelNameRegistrar {
  BaseKernelNameRegistrar(const char* op_type, const char* base_kernel_name) {
    OpUtilsMap::Instance().InsertBaseKernelName(op_type, base_kernel_name);
179 180 181 182 183 184 185 186 187 188 189
  }
};

struct ArgumentMappingFnRegistrar {
  ArgumentMappingFnRegistrar(const char* op_type,
                             ArgumentMappingFn arg_mapping_fn) {
    OpUtilsMap::Instance().InsertArgumentMappingFn(op_type,
                                                   std::move(arg_mapping_fn));
  }
};

190
#define PD_REGISTER_BASE_KERNEL_NAME(op_type, base_kernel_name)                \
191
  PD_STATIC_ASSERT_GLOBAL_NAMESPACE(                                           \
192 193
      PD_REGISTER_base_kernel_name_ns_check_##op_type,                         \
      "PD_REGISTER_BASE_KERNEL_NAME must be called in global namespace.");     \
194
  static const ::phi::BaseKernelNameRegistrar                                  \
195 196 197
      __registrar_base_kernel_name_for_##op_type(#op_type, #base_kernel_name); \
  int TouchBaseKernelNameSymbol_##op_type() { return 0; }

198
#define PD_DECLARE_BASE_KERNEL_NAME(op_type)                              \
199
  PD_STATIC_ASSERT_GLOBAL_NAMESPACE(                                      \
200 201
      PD_DECLARE_ai_name_ns_check_##op_type,                              \
      "PD_DECLARE_BASE_KERNEL_NAME must be called in global namespace."); \
202 203 204
  extern int TouchBaseKernelNameSymbol_##op_type();                       \
  UNUSED static int __declare_base_kernel_name_symbol_for_##op_type =     \
      TouchBaseKernelNameSymbol_##op_type()
205

206
#define PD_REGISTER_ARG_MAPPING_FN(op_type, arg_mapping_fn)              \
207
  PD_STATIC_ASSERT_GLOBAL_NAMESPACE(                                     \
208 209
      PD_REGISTER_arg_map_fn_ns_check_##op_type,                         \
      "PD_REGISTER_ARG_MAPPING_FN must be called in global namespace."); \
210
  static const ::phi::ArgumentMappingFnRegistrar                         \
211 212 213
      __registrar_arg_map_fn_for_##op_type(#op_type, arg_mapping_fn);    \
  int TouchArgumentMappingFnSymbol_##op_type() { return 0; }

214
#define PD_DECLARE_ARG_MAPPING_FN(op_type)                              \
215
  PD_STATIC_ASSERT_GLOBAL_NAMESPACE(                                    \
216 217
      PD_DECLARE_arg_map_fn_ns_check_##op_type,                         \
      "PD_DECLARE_ARG_MAPPING_FN must be called in global namespace."); \
218 219 220 221
  extern int TouchArgumentMappingFnSymbol_##op_type();                  \
  UNUSED static int __declare_arg_map_fn_symbol_for_##op_type =         \
      TouchArgumentMappingFnSymbol_##op_type()

222
}  // namespace phi