From 2ae4214fa2516776fa6ab6098ecc739badeebd07 Mon Sep 17 00:00:00 2001 From: kai00 Date: Mon, 24 Aug 2020 17:37:32 +0800 Subject: [PATCH] fushion mem check fixed --- .../fusion/batchnorm_convert_scale_pass.cc | 31 ++++++++++--------- .../fusion/batchnorm_convert_scale_pass.h | 2 +- 2 files changed, 17 insertions(+), 16 deletions(-) diff --git a/mindspore/lite/tools/converter/legacy_optimizer/fusion/batchnorm_convert_scale_pass.cc b/mindspore/lite/tools/converter/legacy_optimizer/fusion/batchnorm_convert_scale_pass.cc index fbbd4adcb..cc7c0f5b1 100644 --- a/mindspore/lite/tools/converter/legacy_optimizer/fusion/batchnorm_convert_scale_pass.cc +++ b/mindspore/lite/tools/converter/legacy_optimizer/fusion/batchnorm_convert_scale_pass.cc @@ -90,23 +90,29 @@ STATUS BatchNormConvertScalePass::DoFusion(MetaGraphT *graph, const std::string return RET_OK; } auto bnPath = matchedPath.at(bnOpName); - status = GetTransParam(graph, bnPath); - if (status != RET_OK) { - MS_LOG(ERROR) << "GetTransParam failed: " << status; - return status; - } - status = GenNewScaleTensor(graph, bnPath); if (status != RET_OK) { MS_LOG(ERROR) << "GenNewScaleTensor failed: " << status; + delete[] transScale; + delete[] transBias; + transScale = nullptr; + transBias = nullptr; return status; } status = ConvertBNToScale(graph, bnPath); if (status != RET_OK) { MS_LOG(ERROR) << "GenNewScaleTensor failed: " << status; + delete[] transScale; + delete[] transBias; + transScale = nullptr; + transBias = nullptr; return status; } + delete[] transScale; + delete[] transBias; + transScale = nullptr; + transBias = nullptr; return RET_OK; } STATUS BatchNormConvertScalePass::ConvertBNToScale(MetaGraphT *graph, const std::shared_ptr &bnPath) { @@ -245,6 +251,10 @@ STATUS BatchNormConvertScalePass::GetTransParam(MetaGraphT *graph, const std::sh // cal transScale, tf : scale/sqrt(variance + eps); caffe : 1/sqrt(variance + eps) if (memcpy_s(transScale, bnChannel * sizeof(float), varianceData, bnChannel * sizeof(float)) != 0) { MS_LOG(ERROR) << "memcpy_s transScale error"; + delete[] transScale; + delete[] transBias; + transScale = nullptr; + transBias = nullptr; return RET_ERROR; } // 1/sqrt(variance + eps) @@ -370,14 +380,5 @@ STATUS BatchNormConvertScalePass::GetBnEpsilon(MetaGraphT *graph) { } return RET_OK; } - -BatchNormConvertScalePass::~BatchNormConvertScalePass() { - if (this->transScale != nullptr) { - delete (this->transScale); - } - if (this->transBias != nullptr) { - delete (this->transBias); - } -} } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/converter/legacy_optimizer/fusion/batchnorm_convert_scale_pass.h b/mindspore/lite/tools/converter/legacy_optimizer/fusion/batchnorm_convert_scale_pass.h index 06a683370..7163914b0 100644 --- a/mindspore/lite/tools/converter/legacy_optimizer/fusion/batchnorm_convert_scale_pass.h +++ b/mindspore/lite/tools/converter/legacy_optimizer/fusion/batchnorm_convert_scale_pass.h @@ -36,7 +36,7 @@ class BatchNormConvertScalePass : public FusionPass { public: BatchNormConvertScalePass() = default; - ~BatchNormConvertScalePass() override; + ~BatchNormConvertScalePass() = default; STATUS DefinePattern() override; -- GitLab