diff --git a/src/operators/kernel/cl/cl-kernel-func/conv_func.cpp b/src/operators/kernel/cl/cl-kernel-func/conv_func.cpp index 81f089c5e5d422da5ac7c5903b154551768c7d41..fff4693a39f348c7e76859624e735aac274d6e0b 100644 --- a/src/operators/kernel/cl/cl-kernel-func/conv_func.cpp +++ b/src/operators/kernel/cl/cl-kernel-func/conv_func.cpp @@ -208,4 +208,4 @@ void ConvAddBnRelu(framework::CLHelper &cl_helper, } } // namespace operators -} // namespace paddle_mobile \ No newline at end of file +} // namespace paddle_mobile diff --git a/src/operators/kernel/cl/cl-kernel-func/conv_func.h b/src/operators/kernel/cl/cl-kernel-func/conv_func.h index b6f4391a88070117453d14c225724fd7b6095eae..2cecf353fba9ee3d2668719d6365e62be75786e1 100644 --- a/src/operators/kernel/cl/cl-kernel-func/conv_func.h +++ b/src/operators/kernel/cl/cl-kernel-func/conv_func.h @@ -35,8 +35,7 @@ void WinogradConv3x3(framework::CLHelper &cl_helper, const ConvParam ¶m); void ConvAddBnRelu(framework::CLHelper &cl_helper, - const ConvParam ¶m, - bool ifRelu = false, + const ConvParam ¶m, bool ifRelu = false, const CLImage *biase = nullptr, const CLImage *new_scale = nullptr, const CLImage *new_bias = nullptr); diff --git a/src/operators/kernel/cl/conv_add_kernel.cpp b/src/operators/kernel/cl/conv_add_kernel.cpp index eb738662487535448f10946f0d67dc03d8778c49..25bffeec49b7e105e553752c593b506a06ff93dd 100644 --- a/src/operators/kernel/cl/conv_add_kernel.cpp +++ b/src/operators/kernel/cl/conv_add_kernel.cpp @@ -74,7 +74,7 @@ bool ConvAddKernel::Init(FusionConvAddParam *param) { template <> void ConvAddKernel::Compute( const FusionConvAddParam ¶m) { - ConvAddBnRelu(this->cl_helper_, param, false, param.Bias()); + ConvAddBnRelu(this->cl_helper_, param, false, param.Bias()); } template class ConvAddKernel; diff --git a/src/operators/kernel/cl/conv_bn_add_relu_kernel.cpp b/src/operators/kernel/cl/conv_bn_add_relu_kernel.cpp index 88309988a00e002f7785e9a78e6c4b6a00778ea0..e6ac4f594625d42161795c1d94137c4da1ddb209 100644 --- a/src/operators/kernel/cl/conv_bn_add_relu_kernel.cpp +++ b/src/operators/kernel/cl/conv_bn_add_relu_kernel.cpp @@ -131,7 +131,8 @@ bool ConvBNAddReluKernel::Init( template <> void ConvBNAddReluKernel::Compute( const FusionConvBNAddReluParam ¶m) { - ConvAddBnRelu(this->cl_helper_, param, true, param.Bias(), param.NewScale(), param.NewBias()); + ConvAddBnRelu(this->cl_helper_, param, true, param.Bias(), param.NewScale(), + param.NewBias()); } template class ConvBNAddReluKernel; diff --git a/src/operators/kernel/cl/conv_bn_relu_kernel.cpp b/src/operators/kernel/cl/conv_bn_relu_kernel.cpp index d81e53e63a7aa2c05221b983b8a9e325f4569fe7..064c25f3562a0c9dc2e0c112cbcb13e79723e13b 100644 --- a/src/operators/kernel/cl/conv_bn_relu_kernel.cpp +++ b/src/operators/kernel/cl/conv_bn_relu_kernel.cpp @@ -127,7 +127,8 @@ bool ConvBNReluKernel::Init( template <> void ConvBNReluKernel::Compute( const FusionConvBNReluParam ¶m) { - ConvAddBnRelu(this->cl_helper_, param, true, nullptr, param.NewScale(), param.NewBias()); + ConvAddBnRelu(this->cl_helper_, param, true, nullptr, param.NewScale(), + param.NewBias()); } template class ConvBNReluKernel;