未验证 提交 06ecc6d2 编写于 作者: T tianshuo78520a 提交者: GitHub

Revert "[BUG FIX]Fix performance bugs that created by PR#49116 (#52124)" (#53109)

This reverts commit 543efcc5.
上级 00ac8014
......@@ -1565,16 +1565,41 @@ void OperatorWithKernel::CheckWhetherPreparePhiData(
const VariableNameMap& outnames,
const Scope& scope) const {
if (run_phi_kernel_ && impl_ != nullptr) {
// For feed, there are two situations we need prepare phi data.
// 1. Sometimes the output's tensor in cached PhiKernelContext is
// inconsistent with the variable in scope. So we need prepare phi data.
// 2. Somehow, sometimes the input's tensor in cached PhiKernelContext has
// some problems. When we use these inputs, we get segmentfault and we must
// prepare phi data.
if (Type() == "feed") {
const auto& phi_kernel_context = impl_->getKernelContext();
size_t phi_tensor_index = 0;
// Check each tensor in KernelContext, if there is a tensor that has
// different type with variable. The PhiKernelContext need be reconstructed.
// We use kernel_signature_'s output to retrieve tensor. Because the tensor
// in phi_kernel_context stored in the order of kernel_signature_'s output.
if (phi_kernel_context->OutputsSize() >= phi_tensor_index ||
kernel_signature_ == nullptr) {
need_prepare_phi_data_ = true;
return;
}
const auto& phi_output_names = kernel_signature_->output_names;
for (auto& phi_output_name : phi_output_names) {
const auto& iter = outnames.find(phi_output_name);
if (iter != outnames.end()) {
for (auto& var_name : iter->second) {
auto var_output = scope.FindVar(var_name);
auto phi_output =
phi_kernel_context->MutableOutputAt<phi::TensorBase>(
phi_tensor_index);
if (phi_output == nullptr) {
continue;
}
if (!(HasSameTensorType<phi::DenseTensor>(phi_output, var_output) ||
HasSameTensorType<phi::SparseCooTensor>(phi_output,
var_output) ||
HasSameTensorType<framework::Strings>(phi_output,
var_output))) {
need_prepare_phi_data_ = true;
}
phi_tensor_index++;
}
}
}
}
}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册