diff --git a/mindspore/lite/nnacl/fp32/arithmetic_self.c b/mindspore/lite/nnacl/fp32/arithmetic_self.c index d426806e11f2ccaacf71115bb9e10ab19cd850ee..8b500bdb11d6ec73f6340ecf3cb7c4cce03b9830 100644 --- a/mindspore/lite/nnacl/fp32/arithmetic_self.c +++ b/mindspore/lite/nnacl/fp32/arithmetic_self.c @@ -34,14 +34,6 @@ int ElementCos(float *input, float *output, int element_size) { return NNACL_OK; } -// exp: -int ElementExp(float *input, float *output, int element_size) { - for (int i = 0; i < element_size; i++) { - output[i] = expf(input[i]); - } - return NNACL_OK; -} - // log: int ElementLog(float *input, float *output, int element_size) { for (int i = 0; i < element_size; i++) { diff --git a/mindspore/lite/nnacl/fp32/arithmetic_self.h b/mindspore/lite/nnacl/fp32/arithmetic_self.h index 3b76a42f4a42376153f4c224c3bc00c7893dcbc9..38db8002ddbd12d81c90300634882d316fad1454 100644 --- a/mindspore/lite/nnacl/fp32/arithmetic_self.h +++ b/mindspore/lite/nnacl/fp32/arithmetic_self.h @@ -30,8 +30,6 @@ int ElementAbs(float *input, float *output, int element_size); int ElementCos(float *input, float *output, int element_size); -int ElementExp(float *input, float *output, int element_size); - int ElementLog(float *input, float *output, int element_size); int ElementSquare(float *input, float *output, int element_size); diff --git a/mindspore/lite/nnacl/fp32/exp.c b/mindspore/lite/nnacl/fp32/exp.c new file mode 100644 index 0000000000000000000000000000000000000000..a135ed4563e3869a797ba3ca03a30dddc3a68b06 --- /dev/null +++ b/mindspore/lite/nnacl/fp32/exp.c @@ -0,0 +1,37 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "nnacl/fp32/exp.h" +#include +#include "nnacl/errorcode.h" + +int Exp(float *input_data, float *output_data, ExpParameter *parameter, int task_id) { + if (parameter->scale_ == 1) { + for (size_t i = task_id; i < parameter->element_num_; i += parameter->thread_num_) { + output_data[i] = expf(input_data[i]); + } + } else { + for (size_t i = task_id; i < parameter->element_num_; i += parameter->thread_num_) { + output_data[i] = expf(input_data[i] * parameter->in_scale_); + } + } + if (parameter->out_scale_ != 1) { + for (size_t i = task_id; i < parameter->element_num_; i += parameter->thread_num_) { + output_data[i] = output_data[i] * parameter->out_scale_; + } + } + return NNACL_OK; +} diff --git a/mindspore/lite/nnacl/fp32/exp.h b/mindspore/lite/nnacl/fp32/exp.h new file mode 100644 index 0000000000000000000000000000000000000000..e9d8bd3d65519ecb1566ee2b89fdcb0fb541d7f9 --- /dev/null +++ b/mindspore/lite/nnacl/fp32/exp.h @@ -0,0 +1,41 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MINDSPORE_LITE_NNACL_FP32_EXP_H_ +#define MINDSPORE_LITE_NNACL_FP32_EXP_H_ + +#include "nnacl/op_base.h" + +typedef struct ExpParameter { + OpParameter op_parameter_; + int thread_num_; + float base_; + float scale_; + float shift_; + float in_scale_; + float out_scale_; + int element_num_; +} ExpParameter; + +#ifdef __cplusplus +extern "C" { +#endif +int Exp(float *input_data, float *output_data, ExpParameter *parameter, int task_id); +#ifdef __cplusplus +} +#endif + +#endif // MINDSPORE_LITE_NNACL_FP32_EXP_H_ diff --git a/mindspore/lite/schema/ops.fbs b/mindspore/lite/schema/ops.fbs index eccf905448bb2d2f534106e661dd98640a963b16..3ad035c0f7741e88fbd135bda8d2c9f40bf1fde3 100644 --- a/mindspore/lite/schema/ops.fbs +++ b/mindspore/lite/schema/ops.fbs @@ -478,9 +478,9 @@ table Neg { } table Exp { - base : float; - scale : float; - shift : float; + base : float = -1.0; + scale : float = 1.0; + shift : float = 0.0; } table Cos { diff --git a/mindspore/lite/src/ops/exp.cc b/mindspore/lite/src/ops/exp.cc index 1c5acbba01efe2ed7e14f8bbc99febe86d017e8f..4870da0cdab34127801e69018c935ed3f64f8f74 100644 --- a/mindspore/lite/src/ops/exp.cc +++ b/mindspore/lite/src/ops/exp.cc @@ -18,16 +18,35 @@ namespace mindspore { namespace lite { -#ifndef PRIMITIVE_WRITEABLE +#ifdef PRIMITIVE_WRITEABLE +void Exp::SetBase(float base) { this->primitive_->value.AsExp()->base = base; } +void Exp::SetScale(float scale) { this->primitive_->value.AsExp()->scale = scale; } +void Exp::SetShift(float shift) { this->primitive_->value.AsExp()->shift = shift; } + +float Exp::GetBase() const { return this->primitive_->value.AsExp()->base; } +float Exp::GetScale() const { return this->primitive_->value.AsExp()->scale; } +float Exp::GetShift() const { return this->primitive_->value.AsExp()->shift; } +#else + int Exp::UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) { MS_ASSERT(nullptr != primitive); MS_ASSERT(nullptr != fbb); - auto val_offset = schema::CreateExp(*fbb); + auto attr = primitive->value_as_Exp(); + if (attr == nullptr) { + MS_LOG(ERROR) << "value_as_Exp return nullptr"; + return RET_ERROR; + } + + auto val_offset = schema::CreateExp(*fbb, attr->base(), attr->scale(), attr->shift()); auto prim_offset = schema::CreatePrimitive(*fbb, schema::PrimitiveType_Exp, val_offset.o); fbb->Finish(prim_offset); return RET_OK; } +float Exp::GetBase() const { return this->primitive_->value_as_Exp()->base(); } +float Exp::GetScale() const { return this->primitive_->value_as_Exp()->scale(); } +float Exp::GetShift() const { return this->primitive_->value_as_Exp()->shift(); } + #endif } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/src/ops/exp.h b/mindspore/lite/src/ops/exp.h index c79648725e3f627991d5a52184c41d38a5563ac8..cfd24253054b72a835be3952b8a1b406e4394e84 100644 --- a/mindspore/lite/src/ops/exp.h +++ b/mindspore/lite/src/ops/exp.h @@ -21,21 +21,27 @@ #include #include #include "ir/dtype/type_id.h" -#include "src/ops/arithmetic_self.h" +#include "src/ops/primitive_c.h" namespace mindspore { namespace lite { -class Exp : public ArithmeticSelf { +class Exp : public PrimitiveC { public: #ifdef PRIMITIVE_WRITEABLE - MS_DECLARE_PARENT(Exp, ArithmeticSelf); + MS_DECLARE_PARENT(Exp, PrimitiveC); Exp() = default; - explicit Exp(schema::PrimitiveT *primitive) : ArithmeticSelf(primitive) {} + explicit Exp(schema::PrimitiveT *primitive) : PrimitiveC(primitive) {} + void SetBase(float base); + void SetShift(float shift); + void SetScale(float scale); #else Exp() = default; int UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) override; #endif + float GetBase() const; + float GetShift() const; + float GetScale() const; }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/src/ops/floor_div.cc b/mindspore/lite/src/ops/floor_div.cc index 811e29e6351b93d6729b79fda6107cd83545a277..9eff6de98a05eddc40e3d8327b0b6e0739e70553 100644 --- a/mindspore/lite/src/ops/floor_div.cc +++ b/mindspore/lite/src/ops/floor_div.cc @@ -23,7 +23,7 @@ namespace lite { int FloorDiv::UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) { MS_ASSERT(nullptr != primitive); MS_ASSERT(nullptr != fbb); - auto val_offset = schema::CreateFloor(*fbb); + auto val_offset = schema::CreateFloorDiv(*fbb); auto prim_offset = schema::CreatePrimitive(*fbb, schema::PrimitiveType_FloorDiv, val_offset.o); fbb->Finish(prim_offset); return RET_OK; diff --git a/mindspore/lite/src/populate_parameter.cc b/mindspore/lite/src/populate_parameter.cc index 06907285845e3c70a51a8ab4604093d3012a66a7..ede3f91bef7b59c06c72bf5b14b8c20d6cfd753c 100644 --- a/mindspore/lite/src/populate_parameter.cc +++ b/mindspore/lite/src/populate_parameter.cc @@ -173,6 +173,7 @@ #include "nnacl/sparse_to_dense.h" #include "nnacl/l2_norm_parameter.h" #include "nnacl/detection_post_process_parameter.h" +#include "nnacl/fp32/exp.h" namespace mindspore::kernel { @@ -1519,8 +1520,7 @@ OpParameter *PopulateEluParameter(const mindspore::lite::PrimitiveC *primitive) return reinterpret_cast(elu_parameter); } -OpParameter *PopulateL2NormParameter( - const mindspore::lite::PrimitiveC *primitive) { +OpParameter *PopulateL2NormParameter(const mindspore::lite::PrimitiveC *primitive) { L2NormParameter *l2_norm_parameter = reinterpret_cast(malloc(sizeof(L2NormParameter))); if (l2_norm_parameter == nullptr) { MS_LOG(ERROR) << "malloc L2NormParameter failed."; @@ -1568,6 +1568,26 @@ OpParameter *PopulateDetectionPostProcessParameter(const mindspore::lite::Primit return reinterpret_cast(detection_post_process_parameter); } +OpParameter *PopulateExpParameter(const mindspore::lite::PrimitiveC *primitive) { + ExpParameter *exp_parameter = reinterpret_cast(malloc(sizeof(ExpParameter))); + if (exp_parameter == nullptr) { + MS_LOG(ERROR) << "malloc ExpParameter failed."; + return nullptr; + } + memset(exp_parameter, 0, sizeof(ExpParameter)); + exp_parameter->op_parameter_.type_ = primitive->Type(); + auto param = reinterpret_cast(const_cast(primitive)); + exp_parameter->base_ = param->GetBase(); + exp_parameter->scale_ = param->GetScale(); + exp_parameter->shift_ = param->GetShift(); + if (exp_parameter->base_ != -1 && exp_parameter->base_ <= 0) { + MS_LOG(ERROR) << "Exp base must be strictly positive, got " << exp_parameter->base_; + free(exp_parameter); + return nullptr; + } + return reinterpret_cast(exp_parameter); +} + PopulateParameterRegistry::PopulateParameterRegistry() { populate_parameter_funcs_[schema::PrimitiveType_SparseToDense] = PopulateSparseToDenseParameter; populate_parameter_funcs_[schema::PrimitiveType_SoftMax] = PopulateSoftmaxParameter; @@ -1610,7 +1630,7 @@ PopulateParameterRegistry::PopulateParameterRegistry() { populate_parameter_funcs_[schema::PrimitiveType_Abs] = PopulateArithmeticSelf; populate_parameter_funcs_[schema::PrimitiveType_Cos] = PopulateArithmeticSelf; populate_parameter_funcs_[schema::PrimitiveType_Sin] = PopulateArithmeticSelf; - populate_parameter_funcs_[schema::PrimitiveType_Exp] = PopulateArithmeticSelf; + populate_parameter_funcs_[schema::PrimitiveType_Exp] = PopulateExpParameter; populate_parameter_funcs_[schema::PrimitiveType_Log] = PopulateArithmeticSelf; populate_parameter_funcs_[schema::PrimitiveType_Square] = PopulateArithmeticSelf; populate_parameter_funcs_[schema::PrimitiveType_Sqrt] = PopulateArithmeticSelf; diff --git a/mindspore/lite/src/runtime/kernel/arm/fp32/arithmetic_self.cc b/mindspore/lite/src/runtime/kernel/arm/fp32/arithmetic_self.cc index a524f9dd8792c31312c5dac587fa445826da390a..7a183f7d0b92ae72b1b58b4ea5d524a995b0df33 100644 --- a/mindspore/lite/src/runtime/kernel/arm/fp32/arithmetic_self.cc +++ b/mindspore/lite/src/runtime/kernel/arm/fp32/arithmetic_self.cc @@ -169,7 +169,6 @@ kernel::LiteKernel *CpuArithmeticSelfFp32KernelCreator(const std::vector +#include "include/errorcode.h" +#include "src/kernel_registry.h" +#include "src/runtime/runtime_api.h" + +using mindspore::lite::KernelRegistrar; +using mindspore::lite::RET_ERROR; +using mindspore::lite::RET_OK; +using mindspore::schema::PrimitiveType_Exp; + +namespace mindspore::kernel { +int ExpCPUKernel::Init() { + exp_parameter_ = reinterpret_cast(op_parameter_); + exp_parameter_->thread_num_ = thread_count_; + + if (!InferShapeDone()) { + return RET_OK; + } + + return ReSize(); +} + +int ExpCPUKernel::ReSize() { + exp_parameter_->thread_num_ = thread_count_; + float log_ = (exp_parameter_->base_ == -1) ? 1 : logf(exp_parameter_->base_); + exp_parameter_->in_scale_ = exp_parameter_->scale_ * log_; + if (exp_parameter_->shift_ == 0) { + exp_parameter_->out_scale_ = 1; + } else { + if (log_ == 1) { + exp_parameter_->out_scale_ = expf(exp_parameter_->shift_); + } else { + exp_parameter_->out_scale_ = powf(exp_parameter_->base_, exp_parameter_->shift_); + } + } + return RET_OK; +} + +int ExpCPUKernel::DoExcute(int task_id) { + Exp(input_addr_, output_addr_, exp_parameter_, task_id); + return RET_OK; +} + +int ExpRun(void *cdata, int task_id) { + auto ExpData = reinterpret_cast(cdata); + auto ret = ExpData->DoExcute(task_id); + if (ret != RET_OK) { + MS_LOG(ERROR) << "ExpRun error task_id[" << task_id << "] error_code[" << ret << "]"; + return RET_ERROR; + } + return RET_OK; +} + +int ExpCPUKernel::Run() { + auto prepare_ret = Prepare(); + if (prepare_ret != RET_OK) { + MS_LOG(ERROR) << "Prepare fail!ret: " << prepare_ret; + return prepare_ret; + } + input_addr_ = reinterpret_cast(in_tensors_.front()->Data()); + output_addr_ = reinterpret_cast(out_tensors_.front()->Data()); + exp_parameter_->element_num_ = in_tensors_.front()->ElementsNum(); + + auto ret = ParallelLaunch(THREAD_POOL_DEFAULT, ExpRun, this, exp_parameter_->thread_num_); + if (ret != RET_OK) { + MS_LOG(ERROR) << "Exp error: error_code[" << ret << "]"; + return RET_ERROR; + } + return RET_OK; +} + +kernel::LiteKernel *CpuExpFp32KernelCreator(const std::vector &inputs, + const std::vector &outputs, OpParameter *parameter, + const lite::Context *ctx, const KernelKey &desc, + const mindspore::lite::PrimitiveC *primitive) { + if (parameter == nullptr || ctx == nullptr) { + MS_LOG(ERROR) << "parameter or ctx is nullptr"; + return nullptr; + } + MS_ASSERT(desc.type == PrimitiveType_Exp); + auto *kernel = new (std::nothrow) ExpCPUKernel(parameter, inputs, outputs, ctx, primitive); + if (kernel == nullptr) { + MS_LOG(ERROR) << "Create Kernel failed, name: " << parameter->name_; + return nullptr; + } + + auto ret = kernel->Init(); + if (ret != RET_OK) { + MS_LOG(ERROR) << "Init Kernel failed, name: " << parameter->name_ + << ", type: " << schema::EnumNamePrimitiveType(static_cast(parameter->type_)); + delete kernel; + return nullptr; + } + return kernel; +} + +REG_KERNEL(kCPU, kNumberTypeFloat32, PrimitiveType_Exp, CpuExpFp32KernelCreator) +} // namespace mindspore::kernel diff --git a/mindspore/lite/src/runtime/kernel/arm/fp32/exp.h b/mindspore/lite/src/runtime/kernel/arm/fp32/exp.h new file mode 100644 index 0000000000000000000000000000000000000000..b9d3206dc1d1878cb4fb0cce6718053ea4e9a7f9 --- /dev/null +++ b/mindspore/lite/src/runtime/kernel/arm/fp32/exp.h @@ -0,0 +1,49 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MINDSPORE_LITE_SRC_RUNTIME_KERNEL_ARM_FP32_EXP_H_ +#define MINDSPORE_LITE_SRC_RUNTIME_KERNEL_ARM_FP32_EXP_H_ + +#include +#include "src/lite_kernel.h" +#include "nnacl/fp32/exp.h" + +namespace mindspore::kernel { +class ExpCPUKernel : public LiteKernel { + public: + explicit ExpCPUKernel(OpParameter *parameter, const std::vector &inputs, + const std::vector &outputs, const lite::Context *ctx, + const mindspore::lite::PrimitiveC *primitive) + : LiteKernel(parameter, inputs, outputs, ctx, primitive), ctx_(ctx), thread_count_(ctx->thread_num_) {} + ~ExpCPUKernel() override{}; + + int Init() override; + int ReSize() override; + int Run() override; + int DoExcute(int task_id); + + protected: + const lite::Context *ctx_; + int thread_count_; + ExpParameter *exp_parameter_; + + private: + float *input_addr_; + float *output_addr_; +}; +} // namespace mindspore::kernel + +#endif // MINDSPORE_LITE_SRC_RUNTIME_KERNEL_ARM_FP32_EXP_H_