diff --git a/paddle/fluid/framework/ir/conv_bn_fuse_pass.cc b/paddle/fluid/framework/ir/conv_bn_fuse_pass.cc index 876a9996456c256f9b5f511ecd792f915b74b0df..4fe3fb4f3dc5e1258f34cefe4c1f642b37e05936 100644 --- a/paddle/fluid/framework/ir/conv_bn_fuse_pass.cc +++ b/paddle/fluid/framework/ir/conv_bn_fuse_pass.cc @@ -136,18 +136,21 @@ void ConvBNFusePass::ApplyImpl(ir::Graph* graph) const { return; } + // Get batch norm bias + auto* bn_bias_tensor = + scope->FindVar(bn_bias->Name())->GetMutable(); + // Create eltwise_y (conv bias) variable VarDesc eltwise_y_in_desc( patterns::PDNodeName(name_scope_, "eltwise_y_in")); + eltwise_y_in_desc.SetShape(framework::vectorize(bn_bias_tensor->dims())); + eltwise_y_in_desc.SetDataType(bn_bias_tensor->type()); + eltwise_y_in_desc.SetLoDLevel(bn_bias->Var()->GetLoDLevel()); eltwise_y_in_desc.SetPersistable(true); auto* eltwise_y_in_node = g->CreateVarNode(&eltwise_y_in_desc); auto* eltwise_y_in_tensor = scope->Var(eltwise_y_in_node->Name())->GetMutable(); - // Get batch norm bias - auto* bn_bias_tensor = - scope->FindVar(bn_bias->Name())->GetMutable(); - // Initialize eltwise_y eltwise_y_in_tensor->Resize(bn_bias_tensor->dims()); std::fill_n(eltwise_y_in_tensor->mutable_data(platform::CPUPlace()), diff --git a/paddle/fluid/inference/tests/api/CMakeLists.txt b/paddle/fluid/inference/tests/api/CMakeLists.txt index a4641140e06b9c6efb13cdb58eb2a0ee810c71c6..c0854d4d0a7f855dcd6625863909d47ac17d2942 100644 --- a/paddle/fluid/inference/tests/api/CMakeLists.txt +++ b/paddle/fluid/inference/tests/api/CMakeLists.txt @@ -86,6 +86,9 @@ inference_analysis_test(test_analyzer_small_dam SRCS analyzer_dam_tester.cc EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} ARGS --infer_model=${DAM_SMALL_INSTALL_DIR}/model --infer_data=${DAM_SMALL_INSTALL_DIR}/data.txt --max_turn_num=1 SERIAL) +# save model +inference_analysis_api_test(test_analyzer_save_model ${DAM_SMALL_INSTALL_DIR} analyzer_save_model_tester.cc SERIAL) + # chinese_ner set(CHINESE_NER_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/chinese_ner") download_model_and_data(${CHINESE_NER_INSTALL_DIR} "chinese_ner_model.tar.gz" "chinese_ner-data.txt.tar.gz") diff --git a/paddle/fluid/inference/tests/api/analyzer_dam_tester.cc b/paddle/fluid/inference/tests/api/analyzer_dam_tester.cc index c9da5b3ea5581e415f11c8f85e1d6aea757531ab..a3eac7b200c37b4500183eb3888582d1dc695bb7 100644 --- a/paddle/fluid/inference/tests/api/analyzer_dam_tester.cc +++ b/paddle/fluid/inference/tests/api/analyzer_dam_tester.cc @@ -171,9 +171,7 @@ void SetConfig(AnalysisConfig *cfg) { } void SetOptimConfig(AnalysisConfig *cfg) { - std::string optimModelPath = - FLAGS_infer_model.substr(0, FLAGS_infer_model.find_last_of("/")) + - "/saved_optim_model"; + std::string optimModelPath = FLAGS_infer_model + "/saved_optim_model"; cfg->SetModel(optimModelPath + "/model", optimModelPath + "/params"); cfg->SwitchIrOptim(true); cfg->SwitchSpecifyInputNames(); @@ -327,16 +325,10 @@ TEST(Analyzer_dam, compare_determine) { // Save optim model TEST(Analyzer_dam, save_optim_model) { AnalysisConfig cfg; - SetConfig(&cfg); - std::string optimModelPath = - FLAGS_infer_model.substr(0, FLAGS_infer_model.find_last_of("/")) + - "/saved_optim_model"; + std::string optimModelPath = FLAGS_infer_model + "/saved_optim_model"; mkdir(optimModelPath.c_str(), 0777); - auto predictor = CreateTestPredictor( - reinterpret_cast(&cfg), - FLAGS_use_analysis); - (static_cast(predictor.get())) - ->SaveOptimModel(optimModelPath); + SetConfig(&cfg); + SaveOptimModel(&cfg, optimModelPath); } void CompareOptimAndOrig(const PaddlePredictor::Config *orig_config, diff --git a/paddle/fluid/inference/tests/api/analyzer_resnet50_tester.cc b/paddle/fluid/inference/tests/api/analyzer_resnet50_tester.cc index 588c80aa607c8d79365bbdfbb42a3d3c7667dbb2..e883ad5bfcf678a75eb24e1d402b09b55786fbbc 100644 --- a/paddle/fluid/inference/tests/api/analyzer_resnet50_tester.cc +++ b/paddle/fluid/inference/tests/api/analyzer_resnet50_tester.cc @@ -33,9 +33,7 @@ void SetInput(std::vector> *inputs) { } void SetOptimConfig(AnalysisConfig *cfg) { - std::string optimModelPath = - FLAGS_infer_model.substr(0, FLAGS_infer_model.find_last_of("/")) + - "/saved_optim_model"; + std::string optimModelPath = FLAGS_infer_model + "/saved_optim_model"; cfg->SetModel(optimModelPath + "/model", optimModelPath + "/params"); cfg->DisableGpu(); cfg->SwitchIrOptim(); @@ -107,16 +105,10 @@ TEST(Analyzer_resnet50, compare_determine) { // Save optim model TEST(Analyzer_resnet50, save_optim_model) { AnalysisConfig cfg; - SetConfig(&cfg); - std::string optimModelPath = - FLAGS_infer_model.substr(0, FLAGS_infer_model.find_last_of("/")) + - "/saved_optim_model"; + std::string optimModelPath = FLAGS_infer_model + "/saved_optim_model"; mkdir(optimModelPath.c_str(), 0777); - auto predictor = CreateTestPredictor( - reinterpret_cast(&cfg), - FLAGS_use_analysis); - (static_cast(predictor.get())) - ->SaveOptimModel(optimModelPath); + SetConfig(&cfg); + SaveOptimModel(&cfg, optimModelPath); } void CompareOptimAndOrig(const PaddlePredictor::Config *orig_config, diff --git a/paddle/fluid/inference/tests/api/analyzer_save_model_tester.cc b/paddle/fluid/inference/tests/api/analyzer_save_model_tester.cc new file mode 100644 index 0000000000000000000000000000000000000000..578b420ea924754999640925a6b5f3fe524d7668 --- /dev/null +++ b/paddle/fluid/inference/tests/api/analyzer_save_model_tester.cc @@ -0,0 +1,49 @@ +// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "paddle/fluid/inference/tests/api/tester_helper.h" + +namespace paddle { +namespace inference { + +void SetConfig(AnalysisConfig *cfg) { + cfg->SwitchSpecifyInputNames(); + cfg->SwitchIrOptim(true); + cfg->SwitchIrDebug(); +} + +int GetNumOps(const AnalysisConfig &cfg) { + int num_ops; + auto predictor = CreatePaddlePredictor(cfg); + GetFuseStatis(static_cast(predictor.get()), &num_ops); + return num_ops; +} + +TEST(Analyzer, save_model) { + AnalysisConfig cfg; + SetConfig(&cfg); + cfg.SetModel(FLAGS_infer_model + "/__model__", FLAGS_infer_model + "/param"); + std::string optimModelPath = FLAGS_infer_model + "/saved_optim_model"; + mkdir(optimModelPath.c_str(), 0777); + SaveOptimModel(&cfg, optimModelPath); + + cfg.pass_builder()->ClearPasses(); + int origin_num_ops = GetNumOps(cfg); + cfg.SetModel(optimModelPath + "/model", optimModelPath + "/params"); + int fused_num_ops = GetNumOps(cfg); + CHECK_LE(fused_num_ops, origin_num_ops); +} + +} // namespace inference +} // namespace paddle diff --git a/paddle/fluid/inference/tests/api/tester_helper.h b/paddle/fluid/inference/tests/api/tester_helper.h index d13469a8482304d04b99c96e70bac5c8b90e4043..10fc7556994b93776ed15184ba17820cebae07a0 100644 --- a/paddle/fluid/inference/tests/api/tester_helper.h +++ b/paddle/fluid/inference/tests/api/tester_helper.h @@ -552,6 +552,13 @@ void CompareAnalysisAndZeroCopy( CompareResult(analysis_outputs, zerocopy_outputs); } +void SaveOptimModel(AnalysisConfig *cfg, const std::string &dstPath) { + auto predictor = CreateTestPredictor( + reinterpret_cast(cfg), + FLAGS_use_analysis); + (static_cast(predictor.get()))->SaveOptimModel(dstPath); +} + template std::string LoDTensorSummary(const framework::LoDTensor &tensor) { std::stringstream ss;