op_utils.h 8.4 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
/* Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

17 18
#include <string>
#include <unordered_set>
19

20
#include "glog/logging.h"
21 22 23 24 25
#include "paddle/phi/core/compat/arg_map_context.h"
#include "paddle/phi/core/enforce.h"
#include "paddle/phi/core/infermeta_utils.h"
#include "paddle/phi/core/macros.h"
#include "paddle/phi/core/type_defs.h"
26 27
#include "paddle/utils/flat_hash_map.h"

28
namespace phi {
29

30 31
const static std::string deprecated_kernel_name = "deprecated";  // NOLINT

32 33 34 35 36
const std::unordered_set<std::string> standard_kernel_suffixs({
    "sr",  // SelectedRows kernel
    "raw"  // fallback kernel of origfinal fluid op
});

37 38 39 40 41 42
/**
 * Some fluid ops are no longer used under the corresponding official API
 * system of 2.0. These names need to correspond to the official API names
 * after 2.0, and can no longer be occupied by the previously abandoned ops.
 * They are marked here uniformly.
 */
43
static const std::unordered_set<std::string> deprecated_op_names(
44 45 46 47 48 49 50 51 52 53 54
    {"diag",
     "flatten",
     "flatten_grad",
     "isinf",
     "isnan",
     "unsqueeze",
     "unsqueeze_grad",
     "squeeze",
     "squeeze_grad",
     "isfinite",
     "fill",
55
     "matmul",
56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75
     "matmul_grad",
     "matmul_grad_grad",
     "max",
     "max_grad",
     "min",
     "min_grad",
     "prod",
     "prod_grad",
     "any",
     "all",
     "reshape",
     "reshape_grad",
     "expand",
     "expand_as",
     "expand_grad",
     "expand_as_grad",
     "one_hot",
     "top_k",
     "top_k_grad",
     "linear_interp",
76
     "linear_interp_grad",
77 78
     "bilinear_interp",
     "bilinear_interp_grad",
79 80
     "trilinear_interp",
     "trilinear_interp_grad",
81
     "nearest_interp",
82 83
     "nearest_interp_grad",
     "bicubic_interp",
84 85 86
     "bicubic_interp_grad",
     "crop",
     "crop_grad"});
87

88 89 90 91 92 93 94 95 96 97 98
class DefaultKernelSignatureMap {
 public:
  static DefaultKernelSignatureMap& Instance();

  bool Has(const std::string& op_type) const { return map_.count(op_type) > 0; }

  const KernelSignature& Get(const std::string& op_type) const {
    auto it = map_.find(op_type);
    PADDLE_ENFORCE_NE(
        it,
        map_.end(),
99
        phi::errors::NotFound(
100 101 102 103
            "Operator `%s`'s kernel signature is not registered.", op_type));
    return it->second;
  }

104 105 106 107 108 109 110 111
  const KernelSignature* GetNullable(const std::string& op_type) const {
    auto it = map_.find(op_type);
    if (it != map_.end()) {
      return &it->second;
    }
    return nullptr;
  }

112 113 114 115
  void Insert(std::string op_type, KernelSignature signature) {
    PADDLE_ENFORCE_NE(
        Has(op_type),
        true,
116
        phi::errors::AlreadyExists(
117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133
            "Operator (%s)'s Kernel Siginature has been registered.", op_type));
    map_.insert({std::move(op_type), std::move(signature)});
  }

 private:
  DefaultKernelSignatureMap() = default;

  paddle::flat_hash_map<std::string, KernelSignature> map_;

  DISABLE_COPY_AND_ASSIGN(DefaultKernelSignatureMap);
};

class OpUtilsMap {
 public:
  static OpUtilsMap& Instance();

  bool Contains(const std::string& op_type) const {
Z
zyfncg 已提交
134
    return fluid_op_to_phi_kernel_.count(op_type) ||
135
           arg_mapping_fn_map_.count(op_type);
136 137
  }

Z
zyfncg 已提交
138 139 140 141 142
  void InsertBaseKernelName(const std::string& op_type,
                            const std::string& base_kernel_name) {
    fluid_op_to_phi_kernel_.insert({op_type, base_kernel_name});
  }
  void InsertFluidOplName(std::string op_type, std::string base_kernel_name) {
143
    PADDLE_ENFORCE_EQ(
Z
zyfncg 已提交
144
        phi_kernel_to_fluid_op_.count(base_kernel_name),
145
        0UL,
146
        phi::errors::AlreadyExists(
Z
zyfncg 已提交
147 148 149 150
            "Operator (%s)'s kernel name (%s) has been registered.",
            op_type,
            base_kernel_name));
    phi_kernel_to_fluid_op_.insert({base_kernel_name, op_type});
151 152
  }

153 154 155 156
  bool HasArgumentMappingFn(const std::string& op_type) const {
    return arg_mapping_fn_map_.count(op_type);
  }

157 158 159 160
  void InsertArgumentMappingFn(std::string op_type, ArgumentMappingFn fn) {
    PADDLE_ENFORCE_EQ(
        arg_mapping_fn_map_.count(op_type),
        0UL,
161
        phi::errors::AlreadyExists(
162 163 164 165 166
            "Operator (%s)'s argu,emt mapping function has been registered.",
            op_type));
    arg_mapping_fn_map_.insert({std::move(op_type), std::move(fn)});
  }

167
  const std::string& GetBaseKernelName(const std::string& op_type) const {
168
    if (deprecated_op_names.find(op_type) != deprecated_op_names.end()) {
169
      return deprecated_kernel_name;
170
    }
Z
zyfncg 已提交
171 172
    auto it = fluid_op_to_phi_kernel_.find(op_type);
    if (it == fluid_op_to_phi_kernel_.end()) {
173
      return op_type;
174 175 176 177 178
    } else {
      return it->second;
    }
  }

179 180
  const ArgumentMappingFn* GetArgumentMappingFn(
      const std::string& op_type) const {
181 182
    auto it = arg_mapping_fn_map_.find(op_type);
    if (it == arg_mapping_fn_map_.end()) {
183
      return nullptr;
184
    } else {
185
      return &it->second;
186 187 188
    }
  }

Z
zyfncg 已提交
189 190 191 192 193 194 195 196
  const paddle::flat_hash_map<std::string, std::string>&
  fluid_op_to_phi_kernel() const {
    return fluid_op_to_phi_kernel_;
  }

  const paddle::flat_hash_map<std::string, std::string>&
  phi_kernel_to_fluid_op() const {
    return phi_kernel_to_fluid_op_;
197 198
  }

199 200 201
 private:
  OpUtilsMap() = default;

Z
zyfncg 已提交
202 203 204 205
  paddle::flat_hash_map<std::string, std::string> fluid_op_to_phi_kernel_;

  paddle::flat_hash_map<std::string, std::string> phi_kernel_to_fluid_op_;

206 207 208 209 210
  paddle::flat_hash_map<std::string, ArgumentMappingFn> arg_mapping_fn_map_;

  DISABLE_COPY_AND_ASSIGN(OpUtilsMap);
};

211 212 213
struct BaseKernelNameRegistrar {
  BaseKernelNameRegistrar(const char* op_type, const char* base_kernel_name) {
    OpUtilsMap::Instance().InsertBaseKernelName(op_type, base_kernel_name);
Z
zyfncg 已提交
214
    OpUtilsMap::Instance().InsertFluidOplName(op_type, base_kernel_name);
215 216 217 218 219 220 221 222 223 224 225
  }
};

struct ArgumentMappingFnRegistrar {
  ArgumentMappingFnRegistrar(const char* op_type,
                             ArgumentMappingFn arg_mapping_fn) {
    OpUtilsMap::Instance().InsertArgumentMappingFn(op_type,
                                                   std::move(arg_mapping_fn));
  }
};

226
#define PD_REGISTER_BASE_KERNEL_NAME(op_type, base_kernel_name)                \
227
  PD_STATIC_ASSERT_GLOBAL_NAMESPACE(                                           \
228 229
      PD_REGISTER_base_kernel_name_ns_check_##op_type,                         \
      "PD_REGISTER_BASE_KERNEL_NAME must be called in global namespace.");     \
230
  static const ::phi::BaseKernelNameRegistrar                                  \
231 232 233
      __registrar_base_kernel_name_for_##op_type(#op_type, #base_kernel_name); \
  int TouchBaseKernelNameSymbol_##op_type() { return 0; }

234
#define PD_DECLARE_BASE_KERNEL_NAME(op_type)                              \
235
  PD_STATIC_ASSERT_GLOBAL_NAMESPACE(                                      \
236 237
      PD_DECLARE_ai_name_ns_check_##op_type,                              \
      "PD_DECLARE_BASE_KERNEL_NAME must be called in global namespace."); \
238 239 240
  extern int TouchBaseKernelNameSymbol_##op_type();                       \
  UNUSED static int __declare_base_kernel_name_symbol_for_##op_type =     \
      TouchBaseKernelNameSymbol_##op_type()
241

242
#define PD_REGISTER_ARG_MAPPING_FN(op_type, arg_mapping_fn)              \
243
  PD_STATIC_ASSERT_GLOBAL_NAMESPACE(                                     \
244 245
      PD_REGISTER_arg_map_fn_ns_check_##op_type,                         \
      "PD_REGISTER_ARG_MAPPING_FN must be called in global namespace."); \
246
  static const ::phi::ArgumentMappingFnRegistrar                         \
247 248 249
      __registrar_arg_map_fn_for_##op_type(#op_type, arg_mapping_fn);    \
  int TouchArgumentMappingFnSymbol_##op_type() { return 0; }

250
#define PD_DECLARE_ARG_MAPPING_FN(op_type)                              \
251
  PD_STATIC_ASSERT_GLOBAL_NAMESPACE(                                    \
252 253
      PD_DECLARE_arg_map_fn_ns_check_##op_type,                         \
      "PD_DECLARE_ARG_MAPPING_FN must be called in global namespace."); \
254 255 256 257
  extern int TouchArgumentMappingFnSymbol_##op_type();                  \
  UNUSED static int __declare_arg_map_fn_symbol_for_##op_type =         \
      TouchArgumentMappingFnSymbol_##op_type()

258
}  // namespace phi