op_utils.h 8.5 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
/* Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#pragma once

17 18
#include <string>
#include <unordered_set>
19

20
#include "glog/logging.h"
21 22 23 24 25
#include "paddle/phi/core/compat/arg_map_context.h"
#include "paddle/phi/core/enforce.h"
#include "paddle/phi/core/infermeta_utils.h"
#include "paddle/phi/core/macros.h"
#include "paddle/phi/core/type_defs.h"
26 27
#include "paddle/utils/flat_hash_map.h"

28
namespace phi {
29

30 31
const static std::string deprecated_kernel_name = "deprecated";  // NOLINT

32 33 34 35 36
const std::unordered_set<std::string> standard_kernel_suffixs({
    "sr",  // SelectedRows kernel
    "raw"  // fallback kernel of origfinal fluid op
});

37 38 39 40 41 42
/**
 * Some fluid ops are no longer used under the corresponding official API
 * system of 2.0. These names need to correspond to the official API names
 * after 2.0, and can no longer be occupied by the previously abandoned ops.
 * They are marked here uniformly.
 */
43
static const std::unordered_set<std::string> deprecated_op_names(
44 45 46 47 48 49 50 51 52 53 54
    {"diag",
     "flatten",
     "flatten_grad",
     "isinf",
     "isnan",
     "unsqueeze",
     "unsqueeze_grad",
     "squeeze",
     "squeeze_grad",
     "isfinite",
     "fill",
55
     "matmul",
56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75
     "matmul_grad",
     "matmul_grad_grad",
     "max",
     "max_grad",
     "min",
     "min_grad",
     "prod",
     "prod_grad",
     "any",
     "all",
     "reshape",
     "reshape_grad",
     "expand",
     "expand_as",
     "expand_grad",
     "expand_as_grad",
     "one_hot",
     "top_k",
     "top_k_grad",
     "linear_interp",
76
     "linear_interp_grad",
77 78
     "bilinear_interp",
     "bilinear_interp_grad",
79 80
     "trilinear_interp",
     "trilinear_interp_grad",
81
     "nearest_interp",
82 83
     "nearest_interp_grad",
     "bicubic_interp",
84 85
     "bicubic_interp_grad",
     "crop",
86 87
     "crop_grad",
     "generate_proposals"});
88

89 90 91 92 93 94 95 96 97 98 99
class DefaultKernelSignatureMap {
 public:
  static DefaultKernelSignatureMap& Instance();

  bool Has(const std::string& op_type) const { return map_.count(op_type) > 0; }

  const KernelSignature& Get(const std::string& op_type) const {
    auto it = map_.find(op_type);
    PADDLE_ENFORCE_NE(
        it,
        map_.end(),
100
        phi::errors::NotFound(
101 102 103 104
            "Operator `%s`'s kernel signature is not registered.", op_type));
    return it->second;
  }

105 106 107 108 109 110 111 112
  const KernelSignature* GetNullable(const std::string& op_type) const {
    auto it = map_.find(op_type);
    if (it != map_.end()) {
      return &it->second;
    }
    return nullptr;
  }

113 114 115 116
  void Insert(std::string op_type, KernelSignature signature) {
    PADDLE_ENFORCE_NE(
        Has(op_type),
        true,
117
        phi::errors::AlreadyExists(
118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134
            "Operator (%s)'s Kernel Siginature has been registered.", op_type));
    map_.insert({std::move(op_type), std::move(signature)});
  }

 private:
  DefaultKernelSignatureMap() = default;

  paddle::flat_hash_map<std::string, KernelSignature> map_;

  DISABLE_COPY_AND_ASSIGN(DefaultKernelSignatureMap);
};

class OpUtilsMap {
 public:
  static OpUtilsMap& Instance();

  bool Contains(const std::string& op_type) const {
Z
zyfncg 已提交
135
    return fluid_op_to_phi_kernel_.count(op_type) ||
136
           arg_mapping_fn_map_.count(op_type);
137 138
  }

Z
zyfncg 已提交
139 140 141 142 143
  void InsertBaseKernelName(const std::string& op_type,
                            const std::string& base_kernel_name) {
    fluid_op_to_phi_kernel_.insert({op_type, base_kernel_name});
  }
  void InsertFluidOplName(std::string op_type, std::string base_kernel_name) {
144
    PADDLE_ENFORCE_EQ(
Z
zyfncg 已提交
145
        phi_kernel_to_fluid_op_.count(base_kernel_name),
146
        0UL,
147
        phi::errors::AlreadyExists(
Z
zyfncg 已提交
148 149 150 151
            "Operator (%s)'s kernel name (%s) has been registered.",
            op_type,
            base_kernel_name));
    phi_kernel_to_fluid_op_.insert({base_kernel_name, op_type});
152 153
  }

154 155 156 157
  bool HasArgumentMappingFn(const std::string& op_type) const {
    return arg_mapping_fn_map_.count(op_type);
  }

158 159 160 161
  void InsertArgumentMappingFn(std::string op_type, ArgumentMappingFn fn) {
    PADDLE_ENFORCE_EQ(
        arg_mapping_fn_map_.count(op_type),
        0UL,
162
        phi::errors::AlreadyExists(
163 164 165 166 167
            "Operator (%s)'s argu,emt mapping function has been registered.",
            op_type));
    arg_mapping_fn_map_.insert({std::move(op_type), std::move(fn)});
  }

168
  const std::string& GetBaseKernelName(const std::string& op_type) const {
169
    if (deprecated_op_names.find(op_type) != deprecated_op_names.end()) {
170
      return deprecated_kernel_name;
171
    }
Z
zyfncg 已提交
172 173
    auto it = fluid_op_to_phi_kernel_.find(op_type);
    if (it == fluid_op_to_phi_kernel_.end()) {
174
      return op_type;
175 176 177 178 179
    } else {
      return it->second;
    }
  }

180 181
  const ArgumentMappingFn* GetArgumentMappingFn(
      const std::string& op_type) const {
182 183
    auto it = arg_mapping_fn_map_.find(op_type);
    if (it == arg_mapping_fn_map_.end()) {
184
      return nullptr;
185
    } else {
186
      return &it->second;
187 188 189
    }
  }

Z
zyfncg 已提交
190 191 192 193 194 195 196 197
  const paddle::flat_hash_map<std::string, std::string>&
  fluid_op_to_phi_kernel() const {
    return fluid_op_to_phi_kernel_;
  }

  const paddle::flat_hash_map<std::string, std::string>&
  phi_kernel_to_fluid_op() const {
    return phi_kernel_to_fluid_op_;
198 199
  }

200 201 202
 private:
  OpUtilsMap() = default;

Z
zyfncg 已提交
203 204 205 206
  paddle::flat_hash_map<std::string, std::string> fluid_op_to_phi_kernel_;

  paddle::flat_hash_map<std::string, std::string> phi_kernel_to_fluid_op_;

207 208 209 210 211
  paddle::flat_hash_map<std::string, ArgumentMappingFn> arg_mapping_fn_map_;

  DISABLE_COPY_AND_ASSIGN(OpUtilsMap);
};

212 213 214
struct BaseKernelNameRegistrar {
  BaseKernelNameRegistrar(const char* op_type, const char* base_kernel_name) {
    OpUtilsMap::Instance().InsertBaseKernelName(op_type, base_kernel_name);
Z
zyfncg 已提交
215
    OpUtilsMap::Instance().InsertFluidOplName(op_type, base_kernel_name);
216 217 218 219 220 221 222 223 224 225 226
  }
};

struct ArgumentMappingFnRegistrar {
  ArgumentMappingFnRegistrar(const char* op_type,
                             ArgumentMappingFn arg_mapping_fn) {
    OpUtilsMap::Instance().InsertArgumentMappingFn(op_type,
                                                   std::move(arg_mapping_fn));
  }
};

227 228 229 230 231 232 233 234 235 236
#define PD_REGISTER_BASE_KERNEL_NAME(op_type, base_kernel_name)               \
  PD_STATIC_ASSERT_GLOBAL_NAMESPACE(                                          \
      PD_REGISTER_base_kernel_name_ns_check_##base_kernel_name,               \
      "PD_REGISTER_BASE_KERNEL_NAME must be called in global namespace.");    \
  static const ::phi::BaseKernelNameRegistrar                                 \
      __registrar_base_kernel_name_for_##base_kernel_name(#op_type,           \
                                                          #base_kernel_name); \
  int TouchBaseKernelNameSymbol_##base_kernel_name() { return 0; }

#define PD_DECLARE_BASE_KERNEL_NAME(op_type, base_kernel_name)                 \
237
  PD_STATIC_ASSERT_GLOBAL_NAMESPACE(                                           \
238 239 240 241 242
      PD_DECLARE_ai_name_ns_check_##base_kernel_name,                          \
      "PD_DECLARE_BASE_KERNEL_NAME must be called in global namespace.");      \
  extern int TouchBaseKernelNameSymbol_##base_kernel_name();                   \
  UNUSED static int __declare_base_kernel_name_symbol_for_##base_kernel_name = \
      TouchBaseKernelNameSymbol_##base_kernel_name()
243

244
#define PD_REGISTER_ARG_MAPPING_FN(op_type, arg_mapping_fn)              \
245
  PD_STATIC_ASSERT_GLOBAL_NAMESPACE(                                     \
246 247
      PD_REGISTER_arg_map_fn_ns_check_##op_type,                         \
      "PD_REGISTER_ARG_MAPPING_FN must be called in global namespace."); \
248
  static const ::phi::ArgumentMappingFnRegistrar                         \
249 250 251
      __registrar_arg_map_fn_for_##op_type(#op_type, arg_mapping_fn);    \
  int TouchArgumentMappingFnSymbol_##op_type() { return 0; }

252
#define PD_DECLARE_ARG_MAPPING_FN(op_type)                              \
253
  PD_STATIC_ASSERT_GLOBAL_NAMESPACE(                                    \
254 255
      PD_DECLARE_arg_map_fn_ns_check_##op_type,                         \
      "PD_DECLARE_ARG_MAPPING_FN must be called in global namespace."); \
256 257 258 259
  extern int TouchArgumentMappingFnSymbol_##op_type();                  \
  UNUSED static int __declare_arg_map_fn_symbol_for_##op_type =         \
      TouchArgumentMappingFnSymbol_##op_type()

260
}  // namespace phi