未验证 提交 8601859e 编写于 作者: J jjyaoao 提交者: GitHub

delete SupportNPU(), SupportMLU() (#52911)

* delete SupportNPU(), SupportMLU()

* delete npu branch
上级 468869e4
......@@ -385,21 +385,6 @@ void ApplyDeviceGuard(const OperatorBase* op_base,
}
VLOG(3) << "Switch into " << expected_kernel_key->place_
<< " by device_guard.";
} else if (op_device.find("npu") != std::string::npos &&
platform::is_npu_place(place)) {
// when the Op that does not have NPUKernel is assigned to NPU, the
// CPUKernel will be executed and a warning will be given at the same
// time.
if (op_base->SupportNPU()) {
expected_kernel_key->place_ = place;
} else {
expected_kernel_key->place_ = platform::CPUPlace();
LOG_FIRST_N(WARNING, 1)
<< "Op(" << op_base->Type()
<< ") has no NPU implementation. It will be assigned to CPUPlace.";
}
VLOG(3) << "Switch into " << expected_kernel_key->place_
<< " by device_guard.";
} else if (op_device.find("xpu") != std::string::npos &&
platform::is_xpu_place(place)) {
// when the Op that does not have XPUKernel is assigned to XPU, the
......
......@@ -1326,33 +1326,6 @@ bool OperatorWithKernel::SupportGPU() const {
}
}
bool OperatorWithKernel::SupportNPU() const {
auto phi_kernels = phi::KernelFactory::Instance().SelectKernelMap(
phi::TransToPhiKernelName(type_));
auto has_phi_kernel =
std::any_of(phi_kernels.begin(),
phi_kernels.end(),
[](phi::KernelKeyMap::const_reference kern_pair) {
return kern_pair.first.backend() == phi::Backend::NPU;
});
if (has_phi_kernel) {
return true;
} else {
auto kernel_iter = OperatorWithKernel::AllOpKernels().find(type_);
if (kernel_iter == OperatorWithKernel::AllOpKernels().end()) {
return false;
} else {
auto& op_kernels = kernel_iter->second;
return std::any_of(
op_kernels.begin(),
op_kernels.end(),
[](OpKernelMap::const_reference kern_pair) {
return platform::is_npu_place(kern_pair.first.place_);
});
}
}
}
bool OperatorWithKernel::SupportXPU() const {
#ifdef PADDLE_WITH_XPU
auto phi_kernels = phi::KernelFactory::Instance().SelectKernelMap(
......
......@@ -285,8 +285,6 @@ class OperatorBase {
std::string DebugString() const { return DebugStringEx(nullptr); }
virtual bool SupportGPU() const { return false; }
virtual bool SupportNPU() const { return false; }
virtual bool SupportMLU() const { return false; }
virtual bool SupportXPU() const { return false; }
const std::string& Type() const { return type_; }
......@@ -746,18 +744,6 @@ class OperatorWithKernel : public OperatorBase {
bool SupportGPU() const override;
bool SupportNPU() const override;
bool SupportMLU() const override {
// TODO(zhiqiu): support phi if needed?
auto& op_kernels = OperatorWithKernel::AllOpKernels().at(type_);
return std::any_of(op_kernels.begin(),
op_kernels.end(),
[](OpKernelMap::const_reference kern_pair) {
return platform::is_mlu_place(kern_pair.first.place_);
});
}
bool SupportXPU() const override;
bool SupportsMKLDNN(phi::DataType data_type) const;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册