未验证 提交 7f5128f4 编写于 作者: P Pei Yang 提交者: GitHub

clean inference logs when config.DisableGlogInfo is triggered (#36356)

上级 8fd1b6ad
......@@ -335,9 +335,9 @@ void FCGRUFusePass::ApplyImpl(ir::Graph* graph) const {
graph, name_scope_, param_scope(), true /*with_fc_bias*/);
AddStatis(fusion_count);
string::PrettyLogDetail("--- fused %d pairs of fc gru patterns",
fusion_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
string::PrettyLogDetail("--- fused %d pairs of fc gru patterns",
fusion_count);
}
} // namespace ir
......
......@@ -349,9 +349,9 @@ void FCLstmFusePass::ApplyImpl(ir::Graph* graph) const {
BuildFusion(graph, name_scope_, param_scope(), true /*with_fc_bias*/);
AddStatis(fusion_count);
string::PrettyLogDetail("--- fused %d pairs of fc lstm patterns",
fusion_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
string::PrettyLogDetail("--- fused %d pairs of fc lstm patterns",
fusion_count);
}
} // namespace ir
......
......@@ -351,8 +351,9 @@ void LayerNormFusePass::ApplyImpl(Graph* graph) const {
gpd(graph, handler);
AddStatis(found_layer_norm_count);
PrettyLogDetail("--- Fused %d subgraphs into layer_norm op.",
found_layer_norm_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
PrettyLogDetail("--- Fused %d subgraphs into layer_norm op.",
found_layer_norm_count);
}
} // namespace ir
......
......@@ -150,8 +150,9 @@ void FuseBatchNormActOneDNNPass::FuseBatchNormAct(
gpd(graph, handler);
AddStatis(found_bn_act_count);
PrettyLogDetail("--- fused %d batch norm with relu activation",
found_bn_act_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
PrettyLogDetail("--- fused %d batch norm with relu activation",
found_bn_act_count);
}
} // namespace ir
......
......@@ -68,9 +68,9 @@ void FuseFCActOneDNNPass::FuseFCAct(Graph *graph,
bool approximate = BOOST_GET_CONST(bool, act_op->GetAttr("approximate"));
std::string type = approximate ? "_tanh" : "_erf";
fc_op->SetAttr("activation_type", act_type + type);
} else
} else {
fc_op->SetAttr("activation_type", act_type);
}
fc_op->SetAttr("use_mkldnn", true);
fc_op->SetOutput("Out", {act_out->Name()});
......@@ -82,8 +82,9 @@ void FuseFCActOneDNNPass::FuseFCAct(Graph *graph,
gpd(graph, handler);
AddStatis(found_fc_act_count);
PrettyLogDetail("--- fused %d fc with %s activation", found_fc_act_count,
act_type);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
PrettyLogDetail("--- fused %d fc with %s activation", found_fc_act_count,
act_type);
}
} // namespace ir
......
......@@ -149,10 +149,12 @@ void MatmulTransposeReshapeMKLDNNPass::ApplyImpl(ir::Graph *graph) const {
gpd(graph, handler);
AddStatis(found_matmul_transpose_reshape_count);
std::stringstream msg_ss;
msg_ss << "--- Fused " << found_matmul_transpose_reshape_count
<< " MatmulTransposeReshape patterns";
paddle::string::PrettyLogDetail(msg_ss.str().c_str());
if (!Has("disable_logs") || !Get<bool>("disable_logs")) {
std::stringstream msg_ss;
msg_ss << "--- Fused " << found_matmul_transpose_reshape_count
<< " MatmulTransposeReshape patterns";
paddle::string::PrettyLogDetail(msg_ss.str().c_str());
}
}
} // namespace ir
} // namespace framework
......
......@@ -111,9 +111,9 @@ void MultiGRUFusePass::ApplyImpl(ir::Graph* graph) const {
};
gpd(graph, handler);
AddStatis(fused_count);
PrettyLogDetail("--- fused %d pairs of concatenated multi_gru ops",
fused_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
PrettyLogDetail("--- fused %d pairs of concatenated multi_gru ops",
fused_count);
}
} // namespace ir
......
......@@ -126,9 +126,9 @@ void MultiGruSeqFusePass::ApplyImpl(ir::Graph* graph) const {
};
gpd(graph, handler);
AddStatis(fused_count);
PrettyLogDetail("--- fused %d sequences of two multi_gru ops",
fused_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
PrettyLogDetail("--- fused %d sequences of two multi_gru ops",
fused_count);
}
} // namespace ir
......
......@@ -148,13 +148,14 @@ void ReshapeTransposeMatmulMkldnnFusePass::Fuse(
gpd(graph, handler);
AddStatis(found_reshape_transpose_matmul_count);
std::stringstream msg_ss;
msg_ss << "--- Fused " << found_reshape_transpose_matmul_count
<< " ReshapeTransposeMatmulMkldnn patterns";
if (with_reshape_xshape) msg_ss << " with reshape's xshape";
if (with_transpose_xshape) msg_ss << " with transpose's xshape";
string::PrettyLogDetail(msg_ss.str().c_str());
if (!Has("disable_logs") || !Get<bool>("disable_logs")) {
std::stringstream msg_ss;
msg_ss << "--- Fused " << found_reshape_transpose_matmul_count
<< " ReshapeTransposeMatmulMkldnn patterns";
if (with_reshape_xshape) msg_ss << " with reshape's xshape";
if (with_transpose_xshape) msg_ss << " with transpose's xshape";
string::PrettyLogDetail(msg_ss.str().c_str());
}
}
void ReshapeTransposeMatmulMkldnnFusePass::ApplyImpl(ir::Graph *graph) const {
......
......@@ -129,8 +129,9 @@ void ScaleMatmulFusePass::ApplyImpl(ir::Graph* graph) const {
};
gpd(graph, handler);
AddStatis(found_scale_matmul_fuse_count);
PrettyLogDetail("--- fused %d scale with matmul",
found_scale_matmul_fuse_count);
if (!Has("disable_logs") || !Get<bool>("disable_logs"))
PrettyLogDetail("--- fused %d scale with matmul",
found_scale_matmul_fuse_count);
}
} // namespace ir
......
......@@ -237,6 +237,8 @@ void IRPassManager::CreatePasses(Argument *argument,
pass->Set("use_fc_padding", new bool(use_fc_padding));
}
pass->Set("disable_logs", new bool(disable_logs_));
pre_pass = pass_name;
passes_.emplace_back(std::move(pass));
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册