未验证 提交 7f5128f4 编写于 作者: P Pei Yang 提交者: GitHub

clean inference logs when config.DisableGlogInfo is triggered (#36356)

上级 8fd1b6ad
...@@ -335,9 +335,9 @@ void FCGRUFusePass::ApplyImpl(ir::Graph* graph) const { ...@@ -335,9 +335,9 @@ void FCGRUFusePass::ApplyImpl(ir::Graph* graph) const {
graph, name_scope_, param_scope(), true /*with_fc_bias*/); graph, name_scope_, param_scope(), true /*with_fc_bias*/);
AddStatis(fusion_count); AddStatis(fusion_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
string::PrettyLogDetail("--- fused %d pairs of fc gru patterns", string::PrettyLogDetail("--- fused %d pairs of fc gru patterns",
fusion_count); fusion_count);
} }
} // namespace ir } // namespace ir
......
...@@ -349,9 +349,9 @@ void FCLstmFusePass::ApplyImpl(ir::Graph* graph) const { ...@@ -349,9 +349,9 @@ void FCLstmFusePass::ApplyImpl(ir::Graph* graph) const {
BuildFusion(graph, name_scope_, param_scope(), true /*with_fc_bias*/); BuildFusion(graph, name_scope_, param_scope(), true /*with_fc_bias*/);
AddStatis(fusion_count); AddStatis(fusion_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
string::PrettyLogDetail("--- fused %d pairs of fc lstm patterns", string::PrettyLogDetail("--- fused %d pairs of fc lstm patterns",
fusion_count); fusion_count);
} }
} // namespace ir } // namespace ir
......
...@@ -351,8 +351,9 @@ void LayerNormFusePass::ApplyImpl(Graph* graph) const { ...@@ -351,8 +351,9 @@ void LayerNormFusePass::ApplyImpl(Graph* graph) const {
gpd(graph, handler); gpd(graph, handler);
AddStatis(found_layer_norm_count); AddStatis(found_layer_norm_count);
PrettyLogDetail("--- Fused %d subgraphs into layer_norm op.", if (!Has("disable_logs") || !Get<bool>("disable_logs"))
found_layer_norm_count); PrettyLogDetail("--- Fused %d subgraphs into layer_norm op.",
found_layer_norm_count);
} }
} // namespace ir } // namespace ir
......
...@@ -150,8 +150,9 @@ void FuseBatchNormActOneDNNPass::FuseBatchNormAct( ...@@ -150,8 +150,9 @@ void FuseBatchNormActOneDNNPass::FuseBatchNormAct(
gpd(graph, handler); gpd(graph, handler);
AddStatis(found_bn_act_count); AddStatis(found_bn_act_count);
PrettyLogDetail("--- fused %d batch norm with relu activation", if (!Has("disable_logs") || !Get<bool>("disable_logs"))
found_bn_act_count); PrettyLogDetail("--- fused %d batch norm with relu activation",
found_bn_act_count);
} }
} // namespace ir } // namespace ir
......
...@@ -68,9 +68,9 @@ void FuseFCActOneDNNPass::FuseFCAct(Graph *graph, ...@@ -68,9 +68,9 @@ void FuseFCActOneDNNPass::FuseFCAct(Graph *graph,
bool approximate = BOOST_GET_CONST(bool, act_op->GetAttr("approximate")); bool approximate = BOOST_GET_CONST(bool, act_op->GetAttr("approximate"));
std::string type = approximate ? "_tanh" : "_erf"; std::string type = approximate ? "_tanh" : "_erf";
fc_op->SetAttr("activation_type", act_type + type); fc_op->SetAttr("activation_type", act_type + type);
} else } else {
fc_op->SetAttr("activation_type", act_type); fc_op->SetAttr("activation_type", act_type);
}
fc_op->SetAttr("use_mkldnn", true); fc_op->SetAttr("use_mkldnn", true);
fc_op->SetOutput("Out", {act_out->Name()}); fc_op->SetOutput("Out", {act_out->Name()});
...@@ -82,8 +82,9 @@ void FuseFCActOneDNNPass::FuseFCAct(Graph *graph, ...@@ -82,8 +82,9 @@ void FuseFCActOneDNNPass::FuseFCAct(Graph *graph,
gpd(graph, handler); gpd(graph, handler);
AddStatis(found_fc_act_count); AddStatis(found_fc_act_count);
PrettyLogDetail("--- fused %d fc with %s activation", found_fc_act_count, if (!Has("disable_logs") || !Get<bool>("disable_logs"))
act_type); PrettyLogDetail("--- fused %d fc with %s activation", found_fc_act_count,
act_type);
} }
} // namespace ir } // namespace ir
......
...@@ -149,10 +149,12 @@ void MatmulTransposeReshapeMKLDNNPass::ApplyImpl(ir::Graph *graph) const { ...@@ -149,10 +149,12 @@ void MatmulTransposeReshapeMKLDNNPass::ApplyImpl(ir::Graph *graph) const {
gpd(graph, handler); gpd(graph, handler);
AddStatis(found_matmul_transpose_reshape_count); AddStatis(found_matmul_transpose_reshape_count);
std::stringstream msg_ss; if (!Has("disable_logs") || !Get<bool>("disable_logs")) {
msg_ss << "--- Fused " << found_matmul_transpose_reshape_count std::stringstream msg_ss;
<< " MatmulTransposeReshape patterns"; msg_ss << "--- Fused " << found_matmul_transpose_reshape_count
paddle::string::PrettyLogDetail(msg_ss.str().c_str()); << " MatmulTransposeReshape patterns";
paddle::string::PrettyLogDetail(msg_ss.str().c_str());
}
} }
} // namespace ir } // namespace ir
} // namespace framework } // namespace framework
......
...@@ -111,9 +111,9 @@ void MultiGRUFusePass::ApplyImpl(ir::Graph* graph) const { ...@@ -111,9 +111,9 @@ void MultiGRUFusePass::ApplyImpl(ir::Graph* graph) const {
}; };
gpd(graph, handler); gpd(graph, handler);
AddStatis(fused_count); AddStatis(fused_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
PrettyLogDetail("--- fused %d pairs of concatenated multi_gru ops", PrettyLogDetail("--- fused %d pairs of concatenated multi_gru ops",
fused_count); fused_count);
} }
} // namespace ir } // namespace ir
......
...@@ -126,9 +126,9 @@ void MultiGruSeqFusePass::ApplyImpl(ir::Graph* graph) const { ...@@ -126,9 +126,9 @@ void MultiGruSeqFusePass::ApplyImpl(ir::Graph* graph) const {
}; };
gpd(graph, handler); gpd(graph, handler);
AddStatis(fused_count); AddStatis(fused_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
PrettyLogDetail("--- fused %d sequences of two multi_gru ops", PrettyLogDetail("--- fused %d sequences of two multi_gru ops",
fused_count); fused_count);
} }
} // namespace ir } // namespace ir
......
...@@ -148,13 +148,14 @@ void ReshapeTransposeMatmulMkldnnFusePass::Fuse( ...@@ -148,13 +148,14 @@ void ReshapeTransposeMatmulMkldnnFusePass::Fuse(
gpd(graph, handler); gpd(graph, handler);
AddStatis(found_reshape_transpose_matmul_count); AddStatis(found_reshape_transpose_matmul_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs")) {
std::stringstream msg_ss; std::stringstream msg_ss;
msg_ss << "--- Fused " << found_reshape_transpose_matmul_count msg_ss << "--- Fused " << found_reshape_transpose_matmul_count
<< " ReshapeTransposeMatmulMkldnn patterns"; << " ReshapeTransposeMatmulMkldnn patterns";
if (with_reshape_xshape) msg_ss << " with reshape's xshape"; if (with_reshape_xshape) msg_ss << " with reshape's xshape";
if (with_transpose_xshape) msg_ss << " with transpose's xshape"; if (with_transpose_xshape) msg_ss << " with transpose's xshape";
string::PrettyLogDetail(msg_ss.str().c_str()); string::PrettyLogDetail(msg_ss.str().c_str());
}
} }
void ReshapeTransposeMatmulMkldnnFusePass::ApplyImpl(ir::Graph *graph) const { void ReshapeTransposeMatmulMkldnnFusePass::ApplyImpl(ir::Graph *graph) const {
......
...@@ -129,8 +129,9 @@ void ScaleMatmulFusePass::ApplyImpl(ir::Graph* graph) const { ...@@ -129,8 +129,9 @@ void ScaleMatmulFusePass::ApplyImpl(ir::Graph* graph) const {
}; };
gpd(graph, handler); gpd(graph, handler);
AddStatis(found_scale_matmul_fuse_count); AddStatis(found_scale_matmul_fuse_count);
PrettyLogDetail("--- fused %d scale with matmul", if (!Has("disable_logs") || !Get<bool>("disable_logs"))
found_scale_matmul_fuse_count); PrettyLogDetail("--- fused %d scale with matmul",
found_scale_matmul_fuse_count);
} }
} // namespace ir } // namespace ir
......
...@@ -237,6 +237,8 @@ void IRPassManager::CreatePasses(Argument *argument, ...@@ -237,6 +237,8 @@ void IRPassManager::CreatePasses(Argument *argument,
pass->Set("use_fc_padding", new bool(use_fc_padding)); pass->Set("use_fc_padding", new bool(use_fc_padding));
} }
pass->Set("disable_logs", new bool(disable_logs_));
pre_pass = pass_name; pre_pass = pass_name;
passes_.emplace_back(std::move(pass)); passes_.emplace_back(std::move(pass));
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册