From 38ddd7164580445445a02d91f38d539900fd64f8 Mon Sep 17 00:00:00 2001 From: halsay <458208629@qq.com> Date: Wed, 4 Jul 2018 02:13:06 -0700 Subject: [PATCH] bug fixes --- src/operators/batchnorm_op.cpp | 1 - src/operators/fusion_conv_add.cpp | 2 +- src/operators/fusion_conv_add.h | 2 +- src/operators/fusion_fc_op.cpp | 2 +- src/operators/fusion_fc_op.h | 4 ++-- src/operators/kernel/mali/acl_operator.cc | 0 src/operators/kernel/mali/acl_operator.h | 1 + src/operators/kernel/mali/acl_tensor.cc | 0 src/operators/kernel/mali/acl_tensor.h | 0 .../kernel/mali/batchnorm_kernel.cpp | 2 +- src/operators/kernel/mali/concat_kernel.cpp | 4 ++-- src/operators/kernel/mali/conv_add_kernel.cpp | 5 ++--- src/operators/kernel/mali/conv_kernel.cpp | 2 +- .../kernel/mali/elementwise_add_kernel.cpp | 0 .../kernel/mali/fushion_fc_kernel.cpp | 0 src/operators/kernel/mali/lrn_kernel.cpp | 19 ++++++++++++++----- src/operators/kernel/mali/mul_kernel.cpp | 0 src/operators/kernel/mali/pool_kernel.cpp | 4 ++-- src/operators/kernel/mali/relu_kernel.cpp | 4 ++-- src/operators/kernel/mali/reshape_kernel.cpp | 0 src/operators/kernel/mali/softmax_kernel.cpp | 4 ++-- 21 files changed, 32 insertions(+), 24 deletions(-) mode change 100644 => 100755 src/operators/kernel/mali/acl_operator.cc mode change 100644 => 100755 src/operators/kernel/mali/acl_operator.h mode change 100644 => 100755 src/operators/kernel/mali/acl_tensor.cc mode change 100644 => 100755 src/operators/kernel/mali/acl_tensor.h mode change 100644 => 100755 src/operators/kernel/mali/batchnorm_kernel.cpp mode change 100644 => 100755 src/operators/kernel/mali/concat_kernel.cpp mode change 100644 => 100755 src/operators/kernel/mali/conv_kernel.cpp mode change 100644 => 100755 src/operators/kernel/mali/elementwise_add_kernel.cpp mode change 100644 => 100755 src/operators/kernel/mali/fushion_fc_kernel.cpp mode change 100644 => 100755 src/operators/kernel/mali/mul_kernel.cpp mode change 100644 => 100755 src/operators/kernel/mali/pool_kernel.cpp mode change 100644 => 100755 src/operators/kernel/mali/relu_kernel.cpp mode change 100644 => 100755 src/operators/kernel/mali/reshape_kernel.cpp mode change 100644 => 100755 src/operators/kernel/mali/softmax_kernel.cpp diff --git a/src/operators/batchnorm_op.cpp b/src/operators/batchnorm_op.cpp index d2fbd9fb6b..644a27c586 100644 --- a/src/operators/batchnorm_op.cpp +++ b/src/operators/batchnorm_op.cpp @@ -35,7 +35,6 @@ namespace ops = paddle_mobile::operators; REGISTER_OPERATOR_CPU(batch_norm, ops::BatchNormOp); #endif #ifdef PADDLE_MOBILE_MALI_GPU -USE_OP_MALI_GPU(batch_norm); REGISTER_OPERATOR_MALI_GPU(batch_norm, ops::BatchNormOp); #endif #ifdef PADDLE_MOBILE_FPGA diff --git a/src/operators/fusion_conv_add.cpp b/src/operators/fusion_conv_add.cpp index be70370f9d..656d30c4e1 100644 --- a/src/operators/fusion_conv_add.cpp +++ b/src/operators/fusion_conv_add.cpp @@ -76,7 +76,7 @@ namespace ops = paddle_mobile::operators; REGISTER_OPERATOR_CPU(fusion_conv_add, ops::FusionConvAddOp); #endif #ifdef PADDLE_MOBILE_MALI_GPU -REGISTER_OPERATOR_MALI_GPU(conv_add, ops::FusionConvAddOp); +REGISTER_OPERATOR_MALI_GPU(fusion_conv_add, ops::FusionConvAddOp); #endif #ifdef PADDLE_MOBILE_FPGA #endif diff --git a/src/operators/fusion_conv_add.h b/src/operators/fusion_conv_add.h index 02c9d910b9..bc623efc8c 100644 --- a/src/operators/fusion_conv_add.h +++ b/src/operators/fusion_conv_add.h @@ -96,7 +96,7 @@ static framework::FusionOpRegistrar convadd_registrar( USE_OP_CPU(fusion_conv_add); #endif #ifdef PADDLE_MOBILE_MALI_GPU -USE_OP_MALI_GPU(conv_add); +USE_OP_MALI_GPU(fusion_conv_add); #endif #ifdef PADDLE_MOBILE_FPGA #endif diff --git a/src/operators/fusion_fc_op.cpp b/src/operators/fusion_fc_op.cpp index 1b2a46defc..0ca3c26c47 100644 --- a/src/operators/fusion_fc_op.cpp +++ b/src/operators/fusion_fc_op.cpp @@ -80,7 +80,7 @@ namespace ops = paddle_mobile::operators; REGISTER_OPERATOR_CPU(fusion_fc, ops::FusionFcOp); #endif #ifdef PADDLE_MOBILE_MALI_GPU -REGISTER_OPERATOR_MALI_GPU(fc, ops::FusionFcOp); +REGISTER_OPERATOR_MALI_GPU(fusion_fc, ops::FusionFcOp); #endif #ifdef PADDLE_MOBILE_FPGA #endif diff --git a/src/operators/fusion_fc_op.h b/src/operators/fusion_fc_op.h index c07d59e31e..b545bb8380 100644 --- a/src/operators/fusion_fc_op.h +++ b/src/operators/fusion_fc_op.h @@ -78,7 +78,7 @@ extern framework::FusionOpRegistrar fc_registrar(new FusionFcMatcher()); #ifndef CONV_CPU_REGISTER #define CONV_CPU_REGISTER -static framework::FusionOpRegistrar fc_registrar(new FusionFcMatcher()); +extern framework::FusionOpRegistrar fc_registrar(new FusionFcMatcher()); #endif #endif @@ -93,7 +93,7 @@ static framework::FusionOpRegistrar fc_registrar(new FusionFcMatcher()); USE_OP_CPU(fusion_fc); #endif #ifdef PADDLE_MOBILE_MALI_GPU -USE_OP_MALI_GPU(fc); +USE_OP_MALI_GPU(fusion_fc); #endif #ifdef PADDLE_MOBILE_FPGA #endif diff --git a/src/operators/kernel/mali/acl_operator.cc b/src/operators/kernel/mali/acl_operator.cc old mode 100644 new mode 100755 diff --git a/src/operators/kernel/mali/acl_operator.h b/src/operators/kernel/mali/acl_operator.h old mode 100644 new mode 100755 index c2e13283b1..bf8200d486 --- a/src/operators/kernel/mali/acl_operator.h +++ b/src/operators/kernel/mali/acl_operator.h @@ -225,6 +225,7 @@ class AclParameters { bool is_global_pool; bool is_channel_concat; + bool is_bypass; std::vector in_tensor; }; diff --git a/src/operators/kernel/mali/acl_tensor.cc b/src/operators/kernel/mali/acl_tensor.cc old mode 100644 new mode 100755 diff --git a/src/operators/kernel/mali/acl_tensor.h b/src/operators/kernel/mali/acl_tensor.h old mode 100644 new mode 100755 diff --git a/src/operators/kernel/mali/batchnorm_kernel.cpp b/src/operators/kernel/mali/batchnorm_kernel.cpp old mode 100644 new mode 100755 index e749f4223e..ad648d615c --- a/src/operators/kernel/mali/batchnorm_kernel.cpp +++ b/src/operators/kernel/mali/batchnorm_kernel.cpp @@ -136,7 +136,7 @@ bool BatchNormKernel::Init(BatchNormParam* param) { acl_op = new AclBatchNormOp(); this->SetAclOp((void*)acl_op, (void*)this); } - if (acl_op->Bypass_acl(param)) { + if (acl_op->Bypass_acl(*param)) { std::cout << "init acl failed" << std::endl; return false; } diff --git a/src/operators/kernel/mali/concat_kernel.cpp b/src/operators/kernel/mali/concat_kernel.cpp old mode 100644 new mode 100755 index e0c78841c3..aaa586b6d9 --- a/src/operators/kernel/mali/concat_kernel.cpp +++ b/src/operators/kernel/mali/concat_kernel.cpp @@ -102,14 +102,14 @@ class AclConcatOp : public acl::ACLOperator { }; template <> -bool ConcatKernel::Init(const ConcatParam& param) const { +bool ConcatKernel::Init(ConcatParam* param) { AclConcatOp* acl_op = reinterpret_cast*>(this->GetAclOp()); if (acl_op == nullptr) { acl_op = new AclConcatOp(); this->SetAclOp((void*)acl_op, (void*)this); } - if (acl_op->Bypass_acl(param)) { + if (acl_op->Bypass_acl(*param)) { std::cout << "init acl failed" << std::endl; return false; } diff --git a/src/operators/kernel/mali/conv_add_kernel.cpp b/src/operators/kernel/mali/conv_add_kernel.cpp index 57b6d82ac6..318db016d1 100644 --- a/src/operators/kernel/mali/conv_add_kernel.cpp +++ b/src/operators/kernel/mali/conv_add_kernel.cpp @@ -196,15 +196,14 @@ class AclConvAddOp : public acl::ACLOperator { }; template <> -bool ConvAddKernel::Init( - const FusionConvAddParam& param) const { +bool ConvAddKernel::Init(FusionConvAddParam* param) { AclConvAddOp* acl_op = reinterpret_cast*>(this->GetAclOp()); if (acl_op == nullptr) { acl_op = new AclConvAddOp(); this->SetAclOp((void*)acl_op, (void*)this); } - if (acl_op->Bypass_acl(param)) { + if (acl_op->Bypass_acl(*param)) { std::cout << "init acl failed" << std::endl; return false; } diff --git a/src/operators/kernel/mali/conv_kernel.cpp b/src/operators/kernel/mali/conv_kernel.cpp old mode 100644 new mode 100755 index 30bb763728..c548977eba --- a/src/operators/kernel/mali/conv_kernel.cpp +++ b/src/operators/kernel/mali/conv_kernel.cpp @@ -203,7 +203,7 @@ bool ConvKernel::Init(ConvParam* param) { acl_op = new AclConvOp(); this->SetAclOp((void*)acl_op, (void*)this); } - if (acl_op->Bypass_acl(param)) { + if (acl_op->Bypass_acl(*param)) { std::cout << "init acl failed" << std::endl; return false; } diff --git a/src/operators/kernel/mali/elementwise_add_kernel.cpp b/src/operators/kernel/mali/elementwise_add_kernel.cpp old mode 100644 new mode 100755 diff --git a/src/operators/kernel/mali/fushion_fc_kernel.cpp b/src/operators/kernel/mali/fushion_fc_kernel.cpp old mode 100644 new mode 100755 diff --git a/src/operators/kernel/mali/lrn_kernel.cpp b/src/operators/kernel/mali/lrn_kernel.cpp index c063ec8783..4fb5fca869 100644 --- a/src/operators/kernel/mali/lrn_kernel.cpp +++ b/src/operators/kernel/mali/lrn_kernel.cpp @@ -20,6 +20,7 @@ limitations under the License. */ #ifdef PADDLE_MOBILE_MALI_GPU #include "acl_operator.h" #include "framework/operator.h" +#include "operators/kernel/central-arm-func/lrn_arm_func.h" #include "operators/op_param.h" namespace paddle_mobile { @@ -59,12 +60,15 @@ class AclLrnOp : public acl::ACLOperator { acl_configure(lrn, this, norm_info); } + void Set_bypass(bool bypass) { args.is_bypass = bypass; } + void RunAcl(void* input, void* output) { acl::ACLOperator::acl_run(input, output); } bool Bypass_acl(const LrnParam& param) { bool bypass_acl = false; AclParametersByContext(param); + InitAclLayer(param); // for performance, more groups impact GPU performance if (this->force_bypass_acl_path_) { bypass_acl = true; @@ -107,13 +111,18 @@ class AclLrnOp : public acl::ACLOperator { }; template <> -bool LrnKernel::Init(const LrnParam& param) const { +bool LrnKernel::Init(LrnParam* param) { AclLrnOp* acl_op = reinterpret_cast*>(this->GetAclOp()); if (acl_op == nullptr) { acl_op = new AclLrnOp(); this->SetAclOp((void*)acl_op, (void*)this); } + if (acl_op->Bypass_acl(*param)) { + acl_op->Set_bypass(true); + std::cout << "init acl failed" << std::endl; + return true; + } return true; } @@ -125,14 +134,14 @@ void LrnKernel::Compute(const LrnParam& param) const { if (acl_op == nullptr) { return; } - if (acl_op->Bypass_acl(param)) { - std::cout << "init acl failed" << std::endl; + acl::AclParameters& args = acl_op->getargs(); + if (args.is_bypass) { + std::cout << "bypass op" << std::endl; + LrnCompute(param); return; } - acl::AclParameters& args = acl_op->getargs(); const float* input_data = (const float*)args.input_data; const float* output_data = (const float*)args.output_data; - acl_op->InitAclLayer(param); for (int n = 0; n < args.batch; ++n) { acl_op->RunAcl((void*)input_data, (void*)output_data); input_data += args.in_depth * args.in_cols * args.in_rows; diff --git a/src/operators/kernel/mali/mul_kernel.cpp b/src/operators/kernel/mali/mul_kernel.cpp old mode 100644 new mode 100755 diff --git a/src/operators/kernel/mali/pool_kernel.cpp b/src/operators/kernel/mali/pool_kernel.cpp old mode 100644 new mode 100755 index 66f06e0e8f..1f49391341 --- a/src/operators/kernel/mali/pool_kernel.cpp +++ b/src/operators/kernel/mali/pool_kernel.cpp @@ -180,14 +180,14 @@ class AclPoolOp : public acl::ACLOperator { }; template <> -bool PoolKernel::Init(const PoolParam& param) const { +bool PoolKernel::Init(PoolParam* param) { AclPoolOp* acl_op = reinterpret_cast*>(this->GetAclOp()); if (acl_op == nullptr) { acl_op = new AclPoolOp(); this->SetAclOp((void*)acl_op, (void*)this); } - if (acl_op->Bypass_acl(param)) { + if (acl_op->Bypass_acl(*param)) { std::cout << "init acl failed" << std::endl; return false; } diff --git a/src/operators/kernel/mali/relu_kernel.cpp b/src/operators/kernel/mali/relu_kernel.cpp old mode 100644 new mode 100755 index 760076c9dc..1a8c0f8854 --- a/src/operators/kernel/mali/relu_kernel.cpp +++ b/src/operators/kernel/mali/relu_kernel.cpp @@ -100,14 +100,14 @@ class AclReluOp : public acl::ACLOperator { }; template <> -bool ReluKernel::Init(const ReluParam& param) const { +bool ReluKernel::Init(ReluParam* param) { AclReluOp* acl_op = reinterpret_cast*>(this->GetAclOp()); if (acl_op == nullptr) { acl_op = new AclReluOp(); this->SetAclOp((void*)acl_op, (void*)this); } - if (acl_op->Bypass_acl(param)) { + if (acl_op->Bypass_acl(*param)) { std::cout << "init acl failed" << std::endl; return false; } diff --git a/src/operators/kernel/mali/reshape_kernel.cpp b/src/operators/kernel/mali/reshape_kernel.cpp old mode 100644 new mode 100755 diff --git a/src/operators/kernel/mali/softmax_kernel.cpp b/src/operators/kernel/mali/softmax_kernel.cpp old mode 100644 new mode 100755 index 777e85a1dd..37d2f2b6b1 --- a/src/operators/kernel/mali/softmax_kernel.cpp +++ b/src/operators/kernel/mali/softmax_kernel.cpp @@ -97,14 +97,14 @@ class AclSoftmaxOp : public acl::ACLOperator { }; template <> -bool SoftmaxKernel::Init(const SoftmaxParam& param) const { +bool SoftmaxKernel::Init(SoftmaxParam* param) { AclSoftmaxOp* acl_op = reinterpret_cast*>(this->GetAclOp()); if (acl_op == nullptr) { acl_op = new AclSoftmaxOp(); this->SetAclOp((void*)acl_op, (void*)this); } - if (acl_op->Bypass_acl(param)) { + if (acl_op->Bypass_acl(*param)) { std::cout << "init acl failed" << std::endl; return false; } -- GitLab