diff --git a/paddle/fluid/framework/ir/fc_gru_fuse_pass.cc b/paddle/fluid/framework/ir/fc_gru_fuse_pass.cc index 7ec1227c1290aee256dafe5261ab022c66358702..e1958702501543fb8e93d627a11aede320dcd6e6 100644 --- a/paddle/fluid/framework/ir/fc_gru_fuse_pass.cc +++ b/paddle/fluid/framework/ir/fc_gru_fuse_pass.cc @@ -350,7 +350,7 @@ void FCGRUFusePass::ApplyImpl(ir::Graph* graph) const { graph, name_scope_, param_scope(), true /*with_fc_bias*/); AddStatis(fusion_count); - if (!Has("disable_logs") || !Get("disable_logs")) + if ((!Has("disable_logs") || !Get("disable_logs")) && fusion_count > 0) string::PrettyLogDetail("--- fused %d pairs of fc gru patterns", fusion_count); } diff --git a/paddle/fluid/framework/ir/layer_norm_fuse_pass.cc b/paddle/fluid/framework/ir/layer_norm_fuse_pass.cc index afc117856755c622b3e2eb901d98f73ad1887841..5df2306935eadb8425a1881232dc92a017cfd235 100644 --- a/paddle/fluid/framework/ir/layer_norm_fuse_pass.cc +++ b/paddle/fluid/framework/ir/layer_norm_fuse_pass.cc @@ -422,7 +422,8 @@ void LayerNormFusePass::ApplyImpl(Graph* graph) const { gpd(graph, handler); AddStatis(found_layer_norm_count); - if (!Has("disable_logs") || !Get("disable_logs")) + if ((!Has("disable_logs") || !Get("disable_logs")) && + found_layer_norm_count > 0) PrettyLogDetail("--- Fused %d subgraphs into layer_norm op.", found_layer_norm_count); } diff --git a/paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass.cc index fa2bc4d374001c723dac52f22b62ab50bddd67bb..02a394cafdbddb50d4f80229ffa9b0b919b1572e 100644 --- a/paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass.cc @@ -143,7 +143,8 @@ void FuseBatchNormActOneDNNPass::FuseBatchNormAct( gpd(graph, handler); AddStatis(found_bn_act_count); - if (!Has("disable_logs") || !Get("disable_logs")) + if ((!Has("disable_logs") || !Get("disable_logs")) && + found_bn_act_count > 0) PrettyLogDetail("--- fused %d batch norm with relu activation", found_bn_act_count); } diff --git a/paddle/fluid/framework/ir/mkldnn/conv_elementwise_add_mkldnn_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/conv_elementwise_add_mkldnn_fuse_pass.cc index a9bc746680c1637bcfa6f30a2e5a265ab30c9c03..6a3a24648dbf147ae98ca5c51e65228b0ecdd8bf 100644 --- a/paddle/fluid/framework/ir/mkldnn/conv_elementwise_add_mkldnn_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/conv_elementwise_add_mkldnn_fuse_pass.cc @@ -140,7 +140,8 @@ GraphWithStats ResidualConnectionMKLDNNFusePass::FuseConv( }; gpd(graph_with_stats.first, handler); - if (!Has("disable_logs") || !Get("disable_logs")) { + if ((!Has("disable_logs") || !Get("disable_logs")) && + found_conv_count > 0) { std::stringstream msg_ss; std::string fusionMode = as_x ? "x" : "y"; msg_ss << "--- Fused " << found_conv_count << " conv (as " << fusionMode @@ -228,7 +229,8 @@ GraphWithStats ResidualConnectionMKLDNNFusePass::FuseProjectionConv( }; gpd(graph_with_stats.first, handler); - if (!Has("disable_logs") || !Get("disable_logs")) { + if ((!Has("disable_logs") || !Get("disable_logs")) && + found_projection_conv_count > 0) { std::stringstream msg_ss; msg_ss << "--- Fused " << found_projection_conv_count << " projection conv (as y) + elementwise_add patterns"; diff --git a/paddle/fluid/framework/ir/mkldnn/elt_act_mkldnn_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/elt_act_mkldnn_fuse_pass.cc index 3059be8a9a97285bb5bf43a1818ebce22b273d0e..ab481b2b9499e7b7e3d325873ac9463cdb83b017 100644 --- a/paddle/fluid/framework/ir/mkldnn/elt_act_mkldnn_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/elt_act_mkldnn_fuse_pass.cc @@ -99,7 +99,8 @@ void ElementwiseActivationOneDNNPass::FuseElementwiseAct( gpd(graph, handler); AddStatis(found_elementwise_activation_count); - if (!Has("disable_logs") || !Get("disable_logs")) + if ((!Has("disable_logs") || + !Get("disable_logs") && found_elementwise_activation_count > 0)) PrettyLogDetail("--- fused %d %s with %s activation", found_elementwise_activation_count, elt_type, diff --git a/paddle/fluid/framework/ir/mkldnn/fc_elementwise_add_mkldnn_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/fc_elementwise_add_mkldnn_fuse_pass.cc index 7e9a434be1c4bbc5358b537f242bb8ab1124c7ba..52e3e9b5d79de4b27e8bdedefa0848bad8d0be65 100644 --- a/paddle/fluid/framework/ir/mkldnn/fc_elementwise_add_mkldnn_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/fc_elementwise_add_mkldnn_fuse_pass.cc @@ -132,7 +132,8 @@ GraphWithStats FCResidualConnectionMKLDNNFusePass::FuseFC( }; gpd(graph_with_stats.first, handler); - if (!Has("disable_logs") || !Get("disable_logs")) { + if ((!Has("disable_logs") || !Get("disable_logs")) && + found_fc_count > 0) { std::stringstream msg_ss; std::string fusionMode = fc_as_x ? "x" : "y"; msg_ss << "--- Fused " << found_fc_count << " fc (as " << fusionMode diff --git a/paddle/fluid/framework/ir/mkldnn/matmul_activation_mkldnn_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/matmul_activation_mkldnn_fuse_pass.cc index 9ba89106c3471e60f60899f7d8c2e2fdaa4228a8..3609466cb4fac4caebbac7aaea20dbd8a04692b8 100644 --- a/paddle/fluid/framework/ir/mkldnn/matmul_activation_mkldnn_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/matmul_activation_mkldnn_fuse_pass.cc @@ -87,7 +87,8 @@ void MatmulActivationMkldnnFusePass::FuseMatmulAct( gpd(graph, handler); AddStatis(found_matmul_activation_count); - if (!Has("disable_logs") || !Get("disable_logs")) { + if ((!Has("disable_logs") || !Get("disable_logs")) && + found_matmul_activation_count > 0) { PrettyLogDetail("--- fused %d %s with %s activation", found_matmul_activation_count, matmul_type, diff --git a/paddle/fluid/framework/ir/mkldnn/matmul_elementwise_add_mkldnn_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/matmul_elementwise_add_mkldnn_fuse_pass.cc index 2e6e450cd4c72324e036c78586b10f55f5dfc83c..a2a67c3aafa9c7cb304c0aecfcf796951dc22a65 100644 --- a/paddle/fluid/framework/ir/mkldnn/matmul_elementwise_add_mkldnn_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/matmul_elementwise_add_mkldnn_fuse_pass.cc @@ -82,7 +82,8 @@ void MatmulElementwiseAddMKLDNNFusePass::FuseMatmulElementwiseAdd( gpd(graph, handler); AddStatis(found_matmul_elementwise_add_count); - if (!Has("disable_logs") || !Get("disable_logs")) { + if ((!Has("disable_logs") || !Get("disable_logs")) && + found_matmul_elementwise_add_count > 0) { PrettyLogDetail("--- fused %d %s (as %s) with elementwise_add", found_matmul_elementwise_add_count, matmul_type, diff --git a/paddle/fluid/framework/ir/mkldnn/scale_matmul_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/scale_matmul_fuse_pass.cc index a968af26bd20a040e1ab3bb0841b7e3694dafccb..e464d8d8db3b9e5fb5fcccf5da4e7116c1140474 100644 --- a/paddle/fluid/framework/ir/mkldnn/scale_matmul_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/scale_matmul_fuse_pass.cc @@ -135,7 +135,8 @@ void ScaleMatmulFusePass::ApplyImpl(ir::Graph* graph) const { }; gpd(graph, handler); AddStatis(found_scale_matmul_fuse_count); - if (!Has("disable_logs") || !Get("disable_logs")) + if ((!Has("disable_logs") || !Get("disable_logs")) && + found_scale_matmul_fuse_count > 0) PrettyLogDetail("--- fused %d scale with matmul", found_scale_matmul_fuse_count); } diff --git a/paddle/fluid/framework/ir/mkldnn/softplus_activation_mkldnn_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/softplus_activation_mkldnn_fuse_pass.cc index 77df45c8e07858d85e17870660e797d49f9e249e..90a886aece5176296604b8a03340082ff94576bd 100644 --- a/paddle/fluid/framework/ir/mkldnn/softplus_activation_mkldnn_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/softplus_activation_mkldnn_fuse_pass.cc @@ -94,7 +94,8 @@ void SoftplusActivationOneDNNPass::FuseSoftplusActivation( gpd(graph, handler); AddStatis(found_softplus_activation_count); - if (!Has("disable_logs") || !Get("disable_logs")) + if ((!Has("disable_logs") || !Get("disable_logs")) && + found_softplus_activation_count > 0) PrettyLogDetail("--- fused %d softplus with %s activation", found_softplus_activation_count, act_type);