diff --git a/.clang-tidy b/.clang-tidy index c0b30cf39be72a7b90d3ed4ebe5a571d4557d56b..5a4baae37c4216c13a9f9d08b61b942b4e42a531 100644 --- a/.clang-tidy +++ b/.clang-tidy @@ -32,7 +32,7 @@ bugprone-misplaced-widening-cast, -bugprone-string-literal-with-embedded-nul, -bugprone-suspicious-enum-usage, -bugprone-suspicious-memset-usage, --bugprone-suspicious-missing-comma, +bugprone-suspicious-missing-comma, -bugprone-suspicious-semicolon, -bugprone-suspicious-string-compare, -bugprone-terminating-continue, @@ -42,7 +42,7 @@ bugprone-misplaced-widening-cast, -bugprone-undelegated-constructor, bugprone-unhandled-self-assignment, bugprone-unused-raii, --bugprone-unused-return-value, +bugprone-unused-return-value, bugprone-use-after-move, -bugprone-virtual-near-miss, -clang-analyzer-apiModeling.StdCLibraryFunctions, diff --git a/paddle/fluid/distributed/fleet_executor/fleet_executor.cc b/paddle/fluid/distributed/fleet_executor/fleet_executor.cc index f2a9f9cc6bfd85bd2970550068af51cb1d5241db..8daf0636ce890af5ca11d2ab31b7560b6e7b2471 100644 --- a/paddle/fluid/distributed/fleet_executor/fleet_executor.cc +++ b/paddle/fluid/distributed/fleet_executor/fleet_executor.cc @@ -190,7 +190,7 @@ void FleetExecutor::Init( framework::GetUnusedVars(program_desc.Block(0), ops, {}); for (auto& unique_op : ops) { - unique_op.release(); + [[maybe_unused]] auto released_op = unique_op.release(); } // NOTE: For inference, the vars in inference_root_scope_vars diff --git a/paddle/fluid/framework/ir/mkldnn/compute_propagate_scales_mkldnn_pass.cc b/paddle/fluid/framework/ir/mkldnn/compute_propagate_scales_mkldnn_pass.cc index b5d2255a4b90863a90f00c992064e3efe23e8568..745e0ffa9cff8e4ac28cc98242c979ab4d36cc0d 100644 --- a/paddle/fluid/framework/ir/mkldnn/compute_propagate_scales_mkldnn_pass.cc +++ b/paddle/fluid/framework/ir/mkldnn/compute_propagate_scales_mkldnn_pass.cc @@ -492,7 +492,7 @@ void ComputePropagateScalesMkldnnPass::ApplyImpl(ir::Graph* graph) const { FusePassBase::Init(pattern_name, graph); const std::unordered_set scale_immutable_ops = { - "fused_transpose" + "fused_transpose", "transpose2", "reshape2", "pool2d", diff --git a/paddle/fluid/framework/operator.cc b/paddle/fluid/framework/operator.cc index 0c03486fdd7500d92530f31c65d50f5e8a991ab0..c095ba849a4f2eb48cc0700292b870394d755581 100644 --- a/paddle/fluid/framework/operator.cc +++ b/paddle/fluid/framework/operator.cc @@ -2022,7 +2022,7 @@ void OperatorWithKernel::RunImpl(const Scope& scope, ExecutionContext(*this, exec_scope, *dev_ctx, *runtime_ctx)); } if (fallback_to_cpu) { - phi_kernel_.release(); + [[maybe_unused]] auto released_kernel = phi_kernel_.release(); } } diff --git a/paddle/fluid/pybind/eager_legacy_op_function_generator.cc b/paddle/fluid/pybind/eager_legacy_op_function_generator.cc index b1504ba8f88ddc5ae93227cfc37a8cd75cd887f6..e7c9c62e01661d4df59cab5d584197949f4898fc 100644 --- a/paddle/fluid/pybind/eager_legacy_op_function_generator.cc +++ b/paddle/fluid/pybind/eager_legacy_op_function_generator.cc @@ -488,11 +488,11 @@ int main(int argc, char* argv[]) { // NOLINT paddle::operators::RegisterCustomDeviceCommonKernel("fake_device"); #endif + const std::string str = "\"paddle/fluid/eager/api/generated/fluid_generated/"; std::vector headers{ "", "\"paddle/fluid/platform/enforce.h\"", - "\"paddle/fluid/eager/api/generated/fluid_generated/" - "dygraph_forward_api.h\"", + str + "dygraph_forward_api.h\"", "\"paddle/fluid/pybind/eager_utils.h\"", "\"paddle/fluid/platform/profiler/event_tracing.h\"", "\"paddle/fluid/pybind/exception.h\"",