未验证 提交 d6208aad 编写于 作者: S Sylwester Fraczek 提交者: GitHub

log only if > 0 (#47181)

上级 acf56fb6
...@@ -350,7 +350,7 @@ void FCGRUFusePass::ApplyImpl(ir::Graph* graph) const { ...@@ -350,7 +350,7 @@ void FCGRUFusePass::ApplyImpl(ir::Graph* graph) const {
graph, name_scope_, param_scope(), true /*with_fc_bias*/); graph, name_scope_, param_scope(), true /*with_fc_bias*/);
AddStatis(fusion_count); AddStatis(fusion_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs")) if ((!Has("disable_logs") || !Get<bool>("disable_logs")) && fusion_count > 0)
string::PrettyLogDetail("--- fused %d pairs of fc gru patterns", string::PrettyLogDetail("--- fused %d pairs of fc gru patterns",
fusion_count); fusion_count);
} }
......
...@@ -422,7 +422,8 @@ void LayerNormFusePass::ApplyImpl(Graph* graph) const { ...@@ -422,7 +422,8 @@ void LayerNormFusePass::ApplyImpl(Graph* graph) const {
gpd(graph, handler); gpd(graph, handler);
AddStatis(found_layer_norm_count); AddStatis(found_layer_norm_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs")) if ((!Has("disable_logs") || !Get<bool>("disable_logs")) &&
found_layer_norm_count > 0)
PrettyLogDetail("--- Fused %d subgraphs into layer_norm op.", PrettyLogDetail("--- Fused %d subgraphs into layer_norm op.",
found_layer_norm_count); found_layer_norm_count);
} }
......
...@@ -143,7 +143,8 @@ void FuseBatchNormActOneDNNPass::FuseBatchNormAct( ...@@ -143,7 +143,8 @@ void FuseBatchNormActOneDNNPass::FuseBatchNormAct(
gpd(graph, handler); gpd(graph, handler);
AddStatis(found_bn_act_count); AddStatis(found_bn_act_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs")) if ((!Has("disable_logs") || !Get<bool>("disable_logs")) &&
found_bn_act_count > 0)
PrettyLogDetail("--- fused %d batch norm with relu activation", PrettyLogDetail("--- fused %d batch norm with relu activation",
found_bn_act_count); found_bn_act_count);
} }
......
...@@ -140,7 +140,8 @@ GraphWithStats ResidualConnectionMKLDNNFusePass::FuseConv( ...@@ -140,7 +140,8 @@ GraphWithStats ResidualConnectionMKLDNNFusePass::FuseConv(
}; };
gpd(graph_with_stats.first, handler); gpd(graph_with_stats.first, handler);
if (!Has("disable_logs") || !Get<bool>("disable_logs")) { if ((!Has("disable_logs") || !Get<bool>("disable_logs")) &&
found_conv_count > 0) {
std::stringstream msg_ss; std::stringstream msg_ss;
std::string fusionMode = as_x ? "x" : "y"; std::string fusionMode = as_x ? "x" : "y";
msg_ss << "--- Fused " << found_conv_count << " conv (as " << fusionMode msg_ss << "--- Fused " << found_conv_count << " conv (as " << fusionMode
...@@ -228,7 +229,8 @@ GraphWithStats ResidualConnectionMKLDNNFusePass::FuseProjectionConv( ...@@ -228,7 +229,8 @@ GraphWithStats ResidualConnectionMKLDNNFusePass::FuseProjectionConv(
}; };
gpd(graph_with_stats.first, handler); gpd(graph_with_stats.first, handler);
if (!Has("disable_logs") || !Get<bool>("disable_logs")) { if ((!Has("disable_logs") || !Get<bool>("disable_logs")) &&
found_projection_conv_count > 0) {
std::stringstream msg_ss; std::stringstream msg_ss;
msg_ss << "--- Fused " << found_projection_conv_count msg_ss << "--- Fused " << found_projection_conv_count
<< " projection conv (as y) + elementwise_add patterns"; << " projection conv (as y) + elementwise_add patterns";
......
...@@ -99,7 +99,8 @@ void ElementwiseActivationOneDNNPass::FuseElementwiseAct( ...@@ -99,7 +99,8 @@ void ElementwiseActivationOneDNNPass::FuseElementwiseAct(
gpd(graph, handler); gpd(graph, handler);
AddStatis(found_elementwise_activation_count); AddStatis(found_elementwise_activation_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs")) if ((!Has("disable_logs") ||
!Get<bool>("disable_logs") && found_elementwise_activation_count > 0))
PrettyLogDetail("--- fused %d %s with %s activation", PrettyLogDetail("--- fused %d %s with %s activation",
found_elementwise_activation_count, found_elementwise_activation_count,
elt_type, elt_type,
......
...@@ -132,7 +132,8 @@ GraphWithStats FCResidualConnectionMKLDNNFusePass::FuseFC( ...@@ -132,7 +132,8 @@ GraphWithStats FCResidualConnectionMKLDNNFusePass::FuseFC(
}; };
gpd(graph_with_stats.first, handler); gpd(graph_with_stats.first, handler);
if (!Has("disable_logs") || !Get<bool>("disable_logs")) { if ((!Has("disable_logs") || !Get<bool>("disable_logs")) &&
found_fc_count > 0) {
std::stringstream msg_ss; std::stringstream msg_ss;
std::string fusionMode = fc_as_x ? "x" : "y"; std::string fusionMode = fc_as_x ? "x" : "y";
msg_ss << "--- Fused " << found_fc_count << " fc (as " << fusionMode msg_ss << "--- Fused " << found_fc_count << " fc (as " << fusionMode
......
...@@ -87,7 +87,8 @@ void MatmulActivationMkldnnFusePass::FuseMatmulAct( ...@@ -87,7 +87,8 @@ void MatmulActivationMkldnnFusePass::FuseMatmulAct(
gpd(graph, handler); gpd(graph, handler);
AddStatis(found_matmul_activation_count); AddStatis(found_matmul_activation_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs")) { if ((!Has("disable_logs") || !Get<bool>("disable_logs")) &&
found_matmul_activation_count > 0) {
PrettyLogDetail("--- fused %d %s with %s activation", PrettyLogDetail("--- fused %d %s with %s activation",
found_matmul_activation_count, found_matmul_activation_count,
matmul_type, matmul_type,
......
...@@ -82,7 +82,8 @@ void MatmulElementwiseAddMKLDNNFusePass::FuseMatmulElementwiseAdd( ...@@ -82,7 +82,8 @@ void MatmulElementwiseAddMKLDNNFusePass::FuseMatmulElementwiseAdd(
gpd(graph, handler); gpd(graph, handler);
AddStatis(found_matmul_elementwise_add_count); AddStatis(found_matmul_elementwise_add_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs")) { if ((!Has("disable_logs") || !Get<bool>("disable_logs")) &&
found_matmul_elementwise_add_count > 0) {
PrettyLogDetail("--- fused %d %s (as %s) with elementwise_add", PrettyLogDetail("--- fused %d %s (as %s) with elementwise_add",
found_matmul_elementwise_add_count, found_matmul_elementwise_add_count,
matmul_type, matmul_type,
......
...@@ -135,7 +135,8 @@ void ScaleMatmulFusePass::ApplyImpl(ir::Graph* graph) const { ...@@ -135,7 +135,8 @@ void ScaleMatmulFusePass::ApplyImpl(ir::Graph* graph) const {
}; };
gpd(graph, handler); gpd(graph, handler);
AddStatis(found_scale_matmul_fuse_count); AddStatis(found_scale_matmul_fuse_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs")) if ((!Has("disable_logs") || !Get<bool>("disable_logs")) &&
found_scale_matmul_fuse_count > 0)
PrettyLogDetail("--- fused %d scale with matmul", PrettyLogDetail("--- fused %d scale with matmul",
found_scale_matmul_fuse_count); found_scale_matmul_fuse_count);
} }
......
...@@ -94,7 +94,8 @@ void SoftplusActivationOneDNNPass::FuseSoftplusActivation( ...@@ -94,7 +94,8 @@ void SoftplusActivationOneDNNPass::FuseSoftplusActivation(
gpd(graph, handler); gpd(graph, handler);
AddStatis(found_softplus_activation_count); AddStatis(found_softplus_activation_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs")) if ((!Has("disable_logs") || !Get<bool>("disable_logs")) &&
found_softplus_activation_count > 0)
PrettyLogDetail("--- fused %d softplus with %s activation", PrettyLogDetail("--- fused %d softplus with %s activation",
found_softplus_activation_count, found_softplus_activation_count,
act_type); act_type);
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册