diff --git a/lite/kernels/arm/deformable_conv_compute.cc b/lite/kernels/arm/deformable_conv_compute.cc index 9bb48201b88ceafff5c71583c0e2a26ddea7b35f..dfdd27799bc1df7f403f40cb50b48aebbfb8d67a 100644 --- a/lite/kernels/arm/deformable_conv_compute.cc +++ b/lite/kernels/arm/deformable_conv_compute.cc @@ -235,7 +235,8 @@ typedef paddle::lite::kernels::arm::DeformableConvCompute DeformableConvFp32; -REGISTER_LITE_KERNEL(deformable_conv, kARM, kFloat, kNCHW, DeformableConvFp32, def) +REGISTER_LITE_KERNEL( + deformable_conv, kARM, kFloat, kNCHW, DeformableConvFp32, def) .BindInput("Input", {LiteType::GetTensorTy(TARGET(kARM))}) .BindInput("Bias", {LiteType::GetTensorTy(TARGET(kARM))}) .BindInput("Filter", {LiteType::GetTensorTy(TARGET(kARM))}) diff --git a/lite/operators/deformable_conv_op.cc b/lite/operators/deformable_conv_op.cc index 43e269a1808c6b96743da434d5b9a2a2ecd0653b..a834528f27c9d6c97e355a1a149482ad00ae79aa 100644 --- a/lite/operators/deformable_conv_op.cc +++ b/lite/operators/deformable_conv_op.cc @@ -84,4 +84,5 @@ bool DeformableConvOpLite::InferShapeImpl() const { } // namespace lite } // namespace paddle -REGISTER_LITE_OP(deformable_conv, paddle::lite::operators::DeformableConvOpLite); +REGISTER_LITE_OP(deformable_conv, + paddle::lite::operators::DeformableConvOpLite); diff --git a/lite/tests/kernels/group_norm_compute_test.cc b/lite/tests/kernels/group_norm_compute_test.cc index 92cdc38c9a6ca88495c403735d894ff44eff7674..a1df003850731eb4d355d01f65100d2b9d200224 100644 --- a/lite/tests/kernels/group_norm_compute_test.cc +++ b/lite/tests/kernels/group_norm_compute_test.cc @@ -47,7 +47,7 @@ class GroupNormComputeTest : public arena::TestCase { dims_(dims), epsilon_(epsilon), groups_(groups), - channels_(channels){} + channels_(channels) {} void RunBaseline(Scope* scope) override { auto x = scope->FindTensor(x_); @@ -108,8 +108,8 @@ class GroupNormComputeTest : public arena::TestCase { float* y_ch_ptr = y_ptr + c * in_size; for (int j = 0; j < in_size; j++) { y_ch_ptr[j] = scale_val * (x_ch_ptr[j] - saved_mean_data[i]) * - saved_variance_data[i] + bias_val; - // LOG(INFO) << "j: " << j << ", " << y_ch_ptr[j]; + saved_variance_data[i] + + bias_val; } } } @@ -129,7 +129,6 @@ class GroupNormComputeTest : public arena::TestCase { } void PrepareData() override { - LOG(INFO) << "dims_: " << dims_[0] << ", " << dims_[1] << ", " << dims_[2] << ", " << dims_[3]; std::vector x(dims_.production()); fill_data_rand(x.data(), -1.f, 1.f, dims_.production()); @@ -152,14 +151,14 @@ void TestGroupNorm(Place place, for (auto& c : {1}) { for (auto& h : {1, 16, 33, 56}) { for (auto& w : {1, 17, 55}) { - for (auto& groups: {1, 2, 4}) { + for (auto& groups : {1, 2, 4}) { if (c % groups != 0) { continue; } DDim dim_in({n, c, h, w}); float epsilon = 1e-5f; - std::unique_ptr tester( - new GroupNormComputeTest(place, "def", dim_in, epsilon, groups, c)); + std::unique_ptr tester(new GroupNormComputeTest( + place, "def", dim_in, epsilon, groups, c)); #ifdef LITE_WITH_ARM if (place == TARGET(kARM)) { auto& ctx = tester->context()->As();