diff --git a/.clang-tidy b/.clang-tidy index a8ecae8aa8da8c9ca695d9762c5cab259c56b165..23da1367f9f5376736c84cd475c1f880198319d0 100644 --- a/.clang-tidy +++ b/.clang-tidy @@ -63,7 +63,7 @@ bugprone-unused-raii, -clang-analyzer-core.builtin.BuiltinFunctions, -clang-analyzer-core.builtin.NoReturnFunctions, -clang-analyzer-core.uninitialized.ArraySubscript, --clang-analyzer-core.uninitialized.Assign, +clang-analyzer-core.uninitialized.Assign, -clang-analyzer-core.uninitialized.Branch, -clang-analyzer-core.uninitialized.CapturedBlockVariable, -clang-analyzer-core.uninitialized.UndefReturn, diff --git a/paddle/phi/kernels/funcs/activation_functor.h b/paddle/phi/kernels/funcs/activation_functor.h index ee59e2634fce970f56f11ca76ada532a2c91edb7..1d3bde7ab59d54d4ed856e516eb3c097351f523f 100644 --- a/paddle/phi/kernels/funcs/activation_functor.h +++ b/paddle/phi/kernels/funcs/activation_functor.h @@ -590,7 +590,7 @@ struct STanhGradFunctor : public BaseActivationFunctor { typename dOut, typename dX> void operator()(Device d, X x, Out out UNUSED, dOut dout, dX dx) const { - auto a = static_cast(scale_a); + auto a = static_cast(scale_a); // NOLINT auto b = static_cast(scale_b); auto temp = (a * x).tanh() * (a * x).tanh(); dx.device(d) = dout * a * b * (static_cast(1) - temp); @@ -1557,7 +1557,7 @@ struct ThresholdedReluFunctor : public BaseActivationFunctor { template void operator()(Device d, X x, Out out) const { - auto th = static_cast(threshold); + auto th = static_cast(threshold); // NOLINT out.device(d) = (x > th).template cast() * x; } }; @@ -1575,7 +1575,7 @@ struct ThresholdedReluGradFunctor : public BaseActivationFunctor { typename dOut, typename dX> void operator()(Device d, X x, Out out UNUSED, dOut dout, dX dx) const { - auto th = static_cast(threshold); + auto th = static_cast(threshold); // NOLINT dx.device(d) = dout * (x > th).template cast(); } @@ -1692,7 +1692,7 @@ struct SoftShrinkFunctor : public BaseActivationFunctor { template void operator()(Device d, X x, Out out) const { - auto lambdaT = static_cast(lambda); + auto lambdaT = static_cast(lambda); // NOLINT auto temp1 = (x > lambdaT).template cast(); auto temp2 = (x < -lambdaT).template cast(); out.device(d) = temp1 * (x - lambdaT) + temp2 * (x + lambdaT); @@ -1711,7 +1711,7 @@ struct SoftShrinkGradFunctor : public BaseActivationFunctor { typename dOut, typename dX> void operator()(Device d, X x, Out out UNUSED, dOut dout, dX dx) const { - auto lambdaT = static_cast(lambda); + auto lambdaT = static_cast(lambda); // NOLINT auto temp1 = (x > lambdaT).template cast(); auto temp2 = (x < -lambdaT).template cast(); dx.device(d) = dout * (temp1 + temp2).template cast(); diff --git a/paddle/phi/kernels/funcs/gpc.cc b/paddle/phi/kernels/funcs/gpc.cc index 74683f3e0f2fce6600e6f22dc57c8655ed522786..cc2e8f21f9e274b2c336068cd557d7fae185afed 100644 --- a/paddle/phi/kernels/funcs/gpc.cc +++ b/paddle/phi/kernels/funcs/gpc.cc @@ -1026,7 +1026,7 @@ void gpc_polygon_clip(gpc_op op, /* Set yb and yt to the bottom and top of the scanbeam */ yb = sbt[scanbeam++]; if (scanbeam < sbt_entries) { - yt = sbt[scanbeam]; + yt = sbt[scanbeam]; // NOLINT dy = yt - yb; } /* === SCANBEAM BOUNDARY PROCESSING ================================ */ @@ -1664,7 +1664,7 @@ void gpc_tristrip_clip(gpc_op op, /* Set yb and yt to the bottom and top of the scanbeam */ yb = sbt[scanbeam++]; if (scanbeam < sbt_entries) { - yt = sbt[scanbeam]; + yt = sbt[scanbeam]; // NOLINT dy = yt - yb; }