From 79ed1c76cd9e7698b013e6127bb181c8c5e388e0 Mon Sep 17 00:00:00 2001 From: tensor-tang Date: Tue, 30 Apr 2019 16:40:24 +0800 Subject: [PATCH] fix bn fuse vardesc and add model saver (#17143) * fix bn fuse vardesc and add model saver test=develop * unify save model in test helper test=develop * fix mkdir on windows test=develop * remove magic number use bn bias var desc test=develop --- .../fluid/framework/ir/conv_bn_fuse_pass.cc | 11 +++-- .../fluid/inference/tests/api/CMakeLists.txt | 3 ++ .../tests/api/analyzer_dam_tester.cc | 16 ++---- .../tests/api/analyzer_resnet50_tester.cc | 16 ++---- .../tests/api/analyzer_save_model_tester.cc | 49 +++++++++++++++++++ .../fluid/inference/tests/api/tester_helper.h | 7 +++ 6 files changed, 74 insertions(+), 28 deletions(-) create mode 100644 paddle/fluid/inference/tests/api/analyzer_save_model_tester.cc diff --git a/paddle/fluid/framework/ir/conv_bn_fuse_pass.cc b/paddle/fluid/framework/ir/conv_bn_fuse_pass.cc index 876a9996456..4fe3fb4f3dc 100644 --- a/paddle/fluid/framework/ir/conv_bn_fuse_pass.cc +++ b/paddle/fluid/framework/ir/conv_bn_fuse_pass.cc @@ -136,18 +136,21 @@ void ConvBNFusePass::ApplyImpl(ir::Graph* graph) const { return; } + // Get batch norm bias + auto* bn_bias_tensor = + scope->FindVar(bn_bias->Name())->GetMutable(); + // Create eltwise_y (conv bias) variable VarDesc eltwise_y_in_desc( patterns::PDNodeName(name_scope_, "eltwise_y_in")); + eltwise_y_in_desc.SetShape(framework::vectorize(bn_bias_tensor->dims())); + eltwise_y_in_desc.SetDataType(bn_bias_tensor->type()); + eltwise_y_in_desc.SetLoDLevel(bn_bias->Var()->GetLoDLevel()); eltwise_y_in_desc.SetPersistable(true); auto* eltwise_y_in_node = g->CreateVarNode(&eltwise_y_in_desc); auto* eltwise_y_in_tensor = scope->Var(eltwise_y_in_node->Name())->GetMutable(); - // Get batch norm bias - auto* bn_bias_tensor = - scope->FindVar(bn_bias->Name())->GetMutable(); - // Initialize eltwise_y eltwise_y_in_tensor->Resize(bn_bias_tensor->dims()); std::fill_n(eltwise_y_in_tensor->mutable_data(platform::CPUPlace()), diff --git a/paddle/fluid/inference/tests/api/CMakeLists.txt b/paddle/fluid/inference/tests/api/CMakeLists.txt index a4641140e06..c0854d4d0a7 100644 --- a/paddle/fluid/inference/tests/api/CMakeLists.txt +++ b/paddle/fluid/inference/tests/api/CMakeLists.txt @@ -86,6 +86,9 @@ inference_analysis_test(test_analyzer_small_dam SRCS analyzer_dam_tester.cc EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} ARGS --infer_model=${DAM_SMALL_INSTALL_DIR}/model --infer_data=${DAM_SMALL_INSTALL_DIR}/data.txt --max_turn_num=1 SERIAL) +# save model +inference_analysis_api_test(test_analyzer_save_model ${DAM_SMALL_INSTALL_DIR} analyzer_save_model_tester.cc SERIAL) + # chinese_ner set(CHINESE_NER_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/chinese_ner") download_model_and_data(${CHINESE_NER_INSTALL_DIR} "chinese_ner_model.tar.gz" "chinese_ner-data.txt.tar.gz") diff --git a/paddle/fluid/inference/tests/api/analyzer_dam_tester.cc b/paddle/fluid/inference/tests/api/analyzer_dam_tester.cc index c9da5b3ea55..a3eac7b200c 100644 --- a/paddle/fluid/inference/tests/api/analyzer_dam_tester.cc +++ b/paddle/fluid/inference/tests/api/analyzer_dam_tester.cc @@ -171,9 +171,7 @@ void SetConfig(AnalysisConfig *cfg) { } void SetOptimConfig(AnalysisConfig *cfg) { - std::string optimModelPath = - FLAGS_infer_model.substr(0, FLAGS_infer_model.find_last_of("/")) + - "/saved_optim_model"; + std::string optimModelPath = FLAGS_infer_model + "/saved_optim_model"; cfg->SetModel(optimModelPath + "/model", optimModelPath + "/params"); cfg->SwitchIrOptim(true); cfg->SwitchSpecifyInputNames(); @@ -327,16 +325,10 @@ TEST(Analyzer_dam, compare_determine) { // Save optim model TEST(Analyzer_dam, save_optim_model) { AnalysisConfig cfg; - SetConfig(&cfg); - std::string optimModelPath = - FLAGS_infer_model.substr(0, FLAGS_infer_model.find_last_of("/")) + - "/saved_optim_model"; + std::string optimModelPath = FLAGS_infer_model + "/saved_optim_model"; mkdir(optimModelPath.c_str(), 0777); - auto predictor = CreateTestPredictor( - reinterpret_cast(&cfg), - FLAGS_use_analysis); - (static_cast(predictor.get())) - ->SaveOptimModel(optimModelPath); + SetConfig(&cfg); + SaveOptimModel(&cfg, optimModelPath); } void CompareOptimAndOrig(const PaddlePredictor::Config *orig_config, diff --git a/paddle/fluid/inference/tests/api/analyzer_resnet50_tester.cc b/paddle/fluid/inference/tests/api/analyzer_resnet50_tester.cc index 588c80aa607..e883ad5bfcf 100644 --- a/paddle/fluid/inference/tests/api/analyzer_resnet50_tester.cc +++ b/paddle/fluid/inference/tests/api/analyzer_resnet50_tester.cc @@ -33,9 +33,7 @@ void SetInput(std::vector> *inputs) { } void SetOptimConfig(AnalysisConfig *cfg) { - std::string optimModelPath = - FLAGS_infer_model.substr(0, FLAGS_infer_model.find_last_of("/")) + - "/saved_optim_model"; + std::string optimModelPath = FLAGS_infer_model + "/saved_optim_model"; cfg->SetModel(optimModelPath + "/model", optimModelPath + "/params"); cfg->DisableGpu(); cfg->SwitchIrOptim(); @@ -107,16 +105,10 @@ TEST(Analyzer_resnet50, compare_determine) { // Save optim model TEST(Analyzer_resnet50, save_optim_model) { AnalysisConfig cfg; - SetConfig(&cfg); - std::string optimModelPath = - FLAGS_infer_model.substr(0, FLAGS_infer_model.find_last_of("/")) + - "/saved_optim_model"; + std::string optimModelPath = FLAGS_infer_model + "/saved_optim_model"; mkdir(optimModelPath.c_str(), 0777); - auto predictor = CreateTestPredictor( - reinterpret_cast(&cfg), - FLAGS_use_analysis); - (static_cast(predictor.get())) - ->SaveOptimModel(optimModelPath); + SetConfig(&cfg); + SaveOptimModel(&cfg, optimModelPath); } void CompareOptimAndOrig(const PaddlePredictor::Config *orig_config, diff --git a/paddle/fluid/inference/tests/api/analyzer_save_model_tester.cc b/paddle/fluid/inference/tests/api/analyzer_save_model_tester.cc new file mode 100644 index 00000000000..578b420ea92 --- /dev/null +++ b/paddle/fluid/inference/tests/api/analyzer_save_model_tester.cc @@ -0,0 +1,49 @@ +// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "paddle/fluid/inference/tests/api/tester_helper.h" + +namespace paddle { +namespace inference { + +void SetConfig(AnalysisConfig *cfg) { + cfg->SwitchSpecifyInputNames(); + cfg->SwitchIrOptim(true); + cfg->SwitchIrDebug(); +} + +int GetNumOps(const AnalysisConfig &cfg) { + int num_ops; + auto predictor = CreatePaddlePredictor(cfg); + GetFuseStatis(static_cast(predictor.get()), &num_ops); + return num_ops; +} + +TEST(Analyzer, save_model) { + AnalysisConfig cfg; + SetConfig(&cfg); + cfg.SetModel(FLAGS_infer_model + "/__model__", FLAGS_infer_model + "/param"); + std::string optimModelPath = FLAGS_infer_model + "/saved_optim_model"; + mkdir(optimModelPath.c_str(), 0777); + SaveOptimModel(&cfg, optimModelPath); + + cfg.pass_builder()->ClearPasses(); + int origin_num_ops = GetNumOps(cfg); + cfg.SetModel(optimModelPath + "/model", optimModelPath + "/params"); + int fused_num_ops = GetNumOps(cfg); + CHECK_LE(fused_num_ops, origin_num_ops); +} + +} // namespace inference +} // namespace paddle diff --git a/paddle/fluid/inference/tests/api/tester_helper.h b/paddle/fluid/inference/tests/api/tester_helper.h index d13469a8482..10fc7556994 100644 --- a/paddle/fluid/inference/tests/api/tester_helper.h +++ b/paddle/fluid/inference/tests/api/tester_helper.h @@ -552,6 +552,13 @@ void CompareAnalysisAndZeroCopy( CompareResult(analysis_outputs, zerocopy_outputs); } +void SaveOptimModel(AnalysisConfig *cfg, const std::string &dstPath) { + auto predictor = CreateTestPredictor( + reinterpret_cast(cfg), + FLAGS_use_analysis); + (static_cast(predictor.get()))->SaveOptimModel(dstPath); +} + template std::string LoDTensorSummary(const framework::LoDTensor &tensor) { std::stringstream ss; -- GitLab