diff --git a/paddle/fluid/framework/ir/memory_optimize_pass/inplace_addto_op_pass.cc b/paddle/fluid/framework/ir/memory_optimize_pass/inplace_addto_op_pass.cc index 0ed2ec51b89cb7e3e1bf4e0065ad5abc0e8a16bf..680dad5cc6b202e7a91e1c5bc28263b192e1c11a 100644 --- a/paddle/fluid/framework/ir/memory_optimize_pass/inplace_addto_op_pass.cc +++ b/paddle/fluid/framework/ir/memory_optimize_pass/inplace_addto_op_pass.cc @@ -178,9 +178,11 @@ void InplaceAddToOpPass::Run(Graph *graph) const { auto *out_generated_op = dynamic_cast( out_var_ptr->GeneratedOp()); - // NOTE(zhiqiu): currently, only conv2d_grad supports addto strategy + // FIXME(zengjinle): the "custom_fused_dense_grad" is only used for + // MLPerf temporarily. Replace it with the formal op type in the future. if (right_generated_op->Name() != "conv2d_grad" && - right_generated_op->Name() != "resnet_unit_grad") { + right_generated_op->Name() != "resnet_unit_grad" && + right_generated_op->Name() != "custom_fused_dense_grad") { continue; }