diff --git a/paddle/fluid/framework/ir/fc_gru_fuse_pass.cc b/paddle/fluid/framework/ir/fc_gru_fuse_pass.cc index 9a43edf40ef443b370b679522cc04fcaf722e032..52e88c6408b0e8007d84bd16c21bb5beec8e76b5 100644 --- a/paddle/fluid/framework/ir/fc_gru_fuse_pass.cc +++ b/paddle/fluid/framework/ir/fc_gru_fuse_pass.cc @@ -335,9 +335,9 @@ void FCGRUFusePass::ApplyImpl(ir::Graph* graph) const { graph, name_scope_, param_scope(), true /*with_fc_bias*/); AddStatis(fusion_count); - - string::PrettyLogDetail("--- fused %d pairs of fc gru patterns", - fusion_count); + if (!Has("disable_logs") || !Get("disable_logs")) + string::PrettyLogDetail("--- fused %d pairs of fc gru patterns", + fusion_count); } } // namespace ir diff --git a/paddle/fluid/framework/ir/fc_lstm_fuse_pass.cc b/paddle/fluid/framework/ir/fc_lstm_fuse_pass.cc index 2e6ce1a0f73818a7f104bbef13220b58b72bd72f..d72b626fc1ebcfb3118521409882472d7f0d9ecb 100644 --- a/paddle/fluid/framework/ir/fc_lstm_fuse_pass.cc +++ b/paddle/fluid/framework/ir/fc_lstm_fuse_pass.cc @@ -349,9 +349,9 @@ void FCLstmFusePass::ApplyImpl(ir::Graph* graph) const { BuildFusion(graph, name_scope_, param_scope(), true /*with_fc_bias*/); AddStatis(fusion_count); - - string::PrettyLogDetail("--- fused %d pairs of fc lstm patterns", - fusion_count); + if (!Has("disable_logs") || !Get("disable_logs")) + string::PrettyLogDetail("--- fused %d pairs of fc lstm patterns", + fusion_count); } } // namespace ir diff --git a/paddle/fluid/framework/ir/layer_norm_fuse_pass.cc b/paddle/fluid/framework/ir/layer_norm_fuse_pass.cc index 95d55834f823bf0adf1b32537fc3e64eb088de92..86191587e184958ecf8baaff124dcb3144c84680 100644 --- a/paddle/fluid/framework/ir/layer_norm_fuse_pass.cc +++ b/paddle/fluid/framework/ir/layer_norm_fuse_pass.cc @@ -351,8 +351,9 @@ void LayerNormFusePass::ApplyImpl(Graph* graph) const { gpd(graph, handler); AddStatis(found_layer_norm_count); - PrettyLogDetail("--- Fused %d subgraphs into layer_norm op.", - found_layer_norm_count); + if (!Has("disable_logs") || !Get("disable_logs")) + PrettyLogDetail("--- Fused %d subgraphs into layer_norm op.", + found_layer_norm_count); } } // namespace ir diff --git a/paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass.cc index 3fdb87f254403652a99983c29f9ba283a45eed2b..c5bb4bf0b2fc97d5f82f99f75423f3c3b0400686 100644 --- a/paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/batch_norm_act_fuse_pass.cc @@ -150,8 +150,9 @@ void FuseBatchNormActOneDNNPass::FuseBatchNormAct( gpd(graph, handler); AddStatis(found_bn_act_count); - PrettyLogDetail("--- fused %d batch norm with relu activation", - found_bn_act_count); + if (!Has("disable_logs") || !Get("disable_logs")) + PrettyLogDetail("--- fused %d batch norm with relu activation", + found_bn_act_count); } } // namespace ir diff --git a/paddle/fluid/framework/ir/mkldnn/fc_act_mkldnn_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/fc_act_mkldnn_fuse_pass.cc index 85d308c7eb30db29186f4a565cca704e8af4c4b0..093fd5ec538db1791441d1aa213644a72c89516e 100644 --- a/paddle/fluid/framework/ir/mkldnn/fc_act_mkldnn_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/fc_act_mkldnn_fuse_pass.cc @@ -68,9 +68,9 @@ void FuseFCActOneDNNPass::FuseFCAct(Graph *graph, bool approximate = BOOST_GET_CONST(bool, act_op->GetAttr("approximate")); std::string type = approximate ? "_tanh" : "_erf"; fc_op->SetAttr("activation_type", act_type + type); - } else + } else { fc_op->SetAttr("activation_type", act_type); - + } fc_op->SetAttr("use_mkldnn", true); fc_op->SetOutput("Out", {act_out->Name()}); @@ -82,8 +82,9 @@ void FuseFCActOneDNNPass::FuseFCAct(Graph *graph, gpd(graph, handler); AddStatis(found_fc_act_count); - PrettyLogDetail("--- fused %d fc with %s activation", found_fc_act_count, - act_type); + if (!Has("disable_logs") || !Get("disable_logs")) + PrettyLogDetail("--- fused %d fc with %s activation", found_fc_act_count, + act_type); } } // namespace ir diff --git a/paddle/fluid/framework/ir/mkldnn/matmul_transpose_reshape_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/matmul_transpose_reshape_fuse_pass.cc index e5bdb08fe4ab4825aef1d3d3ccd7d3a7f352574e..a61099b4986747073bf4cde39ce497f365cea51f 100644 --- a/paddle/fluid/framework/ir/mkldnn/matmul_transpose_reshape_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/matmul_transpose_reshape_fuse_pass.cc @@ -149,10 +149,12 @@ void MatmulTransposeReshapeMKLDNNPass::ApplyImpl(ir::Graph *graph) const { gpd(graph, handler); AddStatis(found_matmul_transpose_reshape_count); - std::stringstream msg_ss; - msg_ss << "--- Fused " << found_matmul_transpose_reshape_count - << " MatmulTransposeReshape patterns"; - paddle::string::PrettyLogDetail(msg_ss.str().c_str()); + if (!Has("disable_logs") || !Get("disable_logs")) { + std::stringstream msg_ss; + msg_ss << "--- Fused " << found_matmul_transpose_reshape_count + << " MatmulTransposeReshape patterns"; + paddle::string::PrettyLogDetail(msg_ss.str().c_str()); + } } } // namespace ir } // namespace framework diff --git a/paddle/fluid/framework/ir/mkldnn/multi_gru_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/multi_gru_fuse_pass.cc index 43c9849d5bbe3bcf53d02407cf245f6179668db9..76a0c883c8923341f538a501ed8d0f091c35ee7c 100644 --- a/paddle/fluid/framework/ir/mkldnn/multi_gru_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/multi_gru_fuse_pass.cc @@ -111,9 +111,9 @@ void MultiGRUFusePass::ApplyImpl(ir::Graph* graph) const { }; gpd(graph, handler); AddStatis(fused_count); - - PrettyLogDetail("--- fused %d pairs of concatenated multi_gru ops", - fused_count); + if (!Has("disable_logs") || !Get("disable_logs")) + PrettyLogDetail("--- fused %d pairs of concatenated multi_gru ops", + fused_count); } } // namespace ir diff --git a/paddle/fluid/framework/ir/mkldnn/multi_gru_seq_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/multi_gru_seq_fuse_pass.cc index 17770d26d7de9d86d4c60e7fe1b705bfeb722504..7821501cc4b23ccdd50ac5bf9c187b2e19d488f9 100644 --- a/paddle/fluid/framework/ir/mkldnn/multi_gru_seq_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/multi_gru_seq_fuse_pass.cc @@ -126,9 +126,9 @@ void MultiGruSeqFusePass::ApplyImpl(ir::Graph* graph) const { }; gpd(graph, handler); AddStatis(fused_count); - - PrettyLogDetail("--- fused %d sequences of two multi_gru ops", - fused_count); + if (!Has("disable_logs") || !Get("disable_logs")) + PrettyLogDetail("--- fused %d sequences of two multi_gru ops", + fused_count); } } // namespace ir diff --git a/paddle/fluid/framework/ir/mkldnn/reshape_transpose_matmul_mkldnn_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/reshape_transpose_matmul_mkldnn_fuse_pass.cc index 26692849d977b5bc0e3dabbd35b7f8fa53832978..e408440f26f1c2743789e59d7de82b0a41a2f8a5 100644 --- a/paddle/fluid/framework/ir/mkldnn/reshape_transpose_matmul_mkldnn_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/reshape_transpose_matmul_mkldnn_fuse_pass.cc @@ -148,13 +148,14 @@ void ReshapeTransposeMatmulMkldnnFusePass::Fuse( gpd(graph, handler); AddStatis(found_reshape_transpose_matmul_count); - - std::stringstream msg_ss; - msg_ss << "--- Fused " << found_reshape_transpose_matmul_count - << " ReshapeTransposeMatmulMkldnn patterns"; - if (with_reshape_xshape) msg_ss << " with reshape's xshape"; - if (with_transpose_xshape) msg_ss << " with transpose's xshape"; - string::PrettyLogDetail(msg_ss.str().c_str()); + if (!Has("disable_logs") || !Get("disable_logs")) { + std::stringstream msg_ss; + msg_ss << "--- Fused " << found_reshape_transpose_matmul_count + << " ReshapeTransposeMatmulMkldnn patterns"; + if (with_reshape_xshape) msg_ss << " with reshape's xshape"; + if (with_transpose_xshape) msg_ss << " with transpose's xshape"; + string::PrettyLogDetail(msg_ss.str().c_str()); + } } void ReshapeTransposeMatmulMkldnnFusePass::ApplyImpl(ir::Graph *graph) const { diff --git a/paddle/fluid/framework/ir/mkldnn/scale_matmul_fuse_pass.cc b/paddle/fluid/framework/ir/mkldnn/scale_matmul_fuse_pass.cc index 13f1fa50d080a33d837ebb63984cd4e5c3c1c350..0fc458723ffe43040aa376e2389c950bd26c4c98 100644 --- a/paddle/fluid/framework/ir/mkldnn/scale_matmul_fuse_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/scale_matmul_fuse_pass.cc @@ -129,8 +129,9 @@ void ScaleMatmulFusePass::ApplyImpl(ir::Graph* graph) const { }; gpd(graph, handler); AddStatis(found_scale_matmul_fuse_count); - PrettyLogDetail("--- fused %d scale with matmul", - found_scale_matmul_fuse_count); + if (!Has("disable_logs") || !Get("disable_logs")) + PrettyLogDetail("--- fused %d scale with matmul", + found_scale_matmul_fuse_count); } } // namespace ir diff --git a/paddle/fluid/inference/analysis/ir_pass_manager.cc b/paddle/fluid/inference/analysis/ir_pass_manager.cc index 4fdd963b6abff98f052175950e20b6999472569e..d2ea6450fc011ee7a812c7eab122cdada803419f 100644 --- a/paddle/fluid/inference/analysis/ir_pass_manager.cc +++ b/paddle/fluid/inference/analysis/ir_pass_manager.cc @@ -237,6 +237,8 @@ void IRPassManager::CreatePasses(Argument *argument, pass->Set("use_fc_padding", new bool(use_fc_padding)); } + pass->Set("disable_logs", new bool(disable_logs_)); + pre_pass = pass_name; passes_.emplace_back(std::move(pass));