未验证 提交 5996f623 编写于 作者: W weishengying 提交者: GitHub

Enabel memory optimize pass although MkLDNN is enabled (#53615)

上级 ee4eecef
......@@ -968,22 +968,8 @@ void AnalysisConfig::Update() {
#endif
}
// TODO(inference): When we enable memory_optimize and mkldnn, PaddleSeg model
// fail.
if (enable_memory_optim_) {
#ifdef PADDLE_WITH_MKLDNN
if (use_mkldnn_) {
enable_memory_optim_ = false;
LOG_FIRST_N(WARNING, 1)
<< "It is detected that mkldnn and memory_optimize_pass are enabled "
"at the same time, but they are not supported yet. Currently, "
"memory_optimize_pass is explicitly disabled";
} else {
pass_builder()->AppendAnalysisPass("memory_optimize_pass");
}
#else
pass_builder()->AppendAnalysisPass("memory_optimize_pass");
#endif
}
if (use_lite_) {
......
......@@ -18,7 +18,9 @@
#include "paddle/phi/backends/onednn/onednn_reuse.h"
#include "paddle/phi/core/expect.h"
#include "paddle/phi/core/macros.h"
#include "paddle/phi/core/tensor_utils.h"
#include "paddle/phi/kernels/cpu/conv_util.h"
namespace phi {
namespace onednn {
......@@ -743,17 +745,9 @@ class ConvOneDNNHandlerT
std::shared_ptr<dnnl::memory> AcquireDstMemoryWithResidual(
phi::DenseTensor* output, const phi::DenseTensor* residual_param) {
std::shared_ptr<dnnl::memory> dst_memory_p;
if (residual_param->mem_desc() != this->fwd_pd_->dst_desc()) {
auto residual_memory_p = this->AcquireResidualMemory(residual_param);
dst_memory_p = this->template AcquireDstMemory<T_out>(output);
this->AcquireReorder(residual_memory_p, dst_memory_p);
} else {
// Changing ShareDataWith to TensorCopy results in performance drop
// on ResNet architectures
// (https://github.com/PaddlePaddle/Paddle/issues/22964)
output->ShareDataWith(*residual_param);
dst_memory_p = this->template AcquireDstMemory<T_out>(output);
}
auto residual_memory_p = this->AcquireResidualMemory(residual_param);
dst_memory_p = this->template AcquireDstMemory<T_out>(output);
this->AcquireReorder(residual_memory_p, dst_memory_p);
return dst_memory_p;
}
};
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册