未验证 提交 0cae0151 编写于 作者: C cyberslack_lee 提交者: GitHub

[clang-tidy] No.52 enable bugprone-argument-comment (#56217)

上级 ba9bb6bc
--- ---
Checks: ' Checks: '
-bugprone-argument-comment, bugprone-argument-comment,
-bugprone-assert-side-effect, -bugprone-assert-side-effect,
-bugprone-bad-signal-to-kill-thread, -bugprone-bad-signal-to-kill-thread,
-bugprone-bool-pointer-implicit-conversion, -bugprone-bool-pointer-implicit-conversion,
......
...@@ -266,7 +266,7 @@ CacheInfo GetExecutorInfoFromCache(const ProgramDesc &program_desc, ...@@ -266,7 +266,7 @@ CacheInfo GetExecutorInfoFromCache(const ProgramDesc &program_desc,
auto &cached_value = cached_exe_info.GetMutable(program_id, is_grad); auto &cached_value = cached_exe_info.GetMutable(program_id, is_grad);
cached_value.executor_ = pe_and_graph.first; cached_value.executor_ = pe_and_graph.first;
cached_value.graph_ = pe_and_graph.second; cached_value.graph_ = pe_and_graph.second;
return std::make_pair(pe_and_graph.first, /*is_new_created=*/true); return std::make_pair(pe_and_graph.first, true);
} else { } else {
VLOG(1) << "get exe_info from cache by: " << program_id VLOG(1) << "get exe_info from cache by: " << program_id
<< " is_grad: " << is_grad; << " is_grad: " << is_grad;
...@@ -280,7 +280,7 @@ CacheInfo GetExecutorInfoFromCache(const ProgramDesc &program_desc, ...@@ -280,7 +280,7 @@ CacheInfo GetExecutorInfoFromCache(const ProgramDesc &program_desc,
// need to recreate tmp variables in new scope // need to recreate tmp variables in new scope
parallel_executor->PrepareVariables(scope); parallel_executor->PrepareVariables(scope);
return std::make_pair(parallel_executor, /*is_new_created=*/false); return std::make_pair(parallel_executor, false);
} }
} }
......
...@@ -58,8 +58,8 @@ void StreamAnalyzer::ConstructEvents(std::vector<Instruction>* instructions) { ...@@ -58,8 +58,8 @@ void StreamAnalyzer::ConstructEvents(std::vector<Instruction>* instructions) {
std::vector<std::vector<std::vector<size_t>>> run_type_info( std::vector<std::vector<std::vector<size_t>>> run_type_info(
instr_num, instr_num,
std::vector<std::vector<size_t>>( std::vector<std::vector<size_t>>(
/*number_of_run_type = */ 2)); // instr_id -> run_type -> /*number_of_run_type = */ 2)); // NOLINT
// next_instr_id // instr_id -> run_type -> next_instr_id
AnalyseAllRunType( AnalyseAllRunType(
cross_step_merged_instructions_ptr, downstream_map, &run_type_info); cross_step_merged_instructions_ptr, downstream_map, &run_type_info);
......
...@@ -754,7 +754,7 @@ ParallelExecutor::ParallelExecutor(const platform::Place &place, ...@@ -754,7 +754,7 @@ ParallelExecutor::ParallelExecutor(const platform::Place &place,
/*device_count=*/1, /*device_count=*/1,
*graph); *graph);
CreateLocalScopes(scope, /*local_scope=*/{scope}, /*create_new=*/false); CreateLocalScopes(scope, /*local_scopes=*/{scope}, /*create_new=*/false);
// Apply BuildStrategy to compile graph. // Apply BuildStrategy to compile graph.
std::vector<ir::Graph *> graphs = {graph}; std::vector<ir::Graph *> graphs = {graph};
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册