提交 b5f617fa 编写于 作者: S Sylwester Fraczek

make mobilenet test reuse resnet50 test

上级 1987d45e
...@@ -83,13 +83,8 @@ inference_analysis_api_test_with_fake_data(test_analyzer_resnet50 ...@@ -83,13 +83,8 @@ inference_analysis_api_test_with_fake_data(test_analyzer_resnet50
"${INFERENCE_DEMO_INSTALL_DIR}/resnet50" analyzer_resnet50_tester.cc "resnet50_model.tar.gz") "${INFERENCE_DEMO_INSTALL_DIR}/resnet50" analyzer_resnet50_tester.cc "resnet50_model.tar.gz")
# mobilenet # mobilenet
set(MOBILENET_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/mobilenet") inference_analysis_api_test_with_fake_data(test_analyzer_mobilenet
if (NOT EXISTS ${MOBILENET_INSTALL_DIR}) "${INFERENCE_DEMO_INSTALL_DIR}/mobilenet" analyzer_resnet50_tester.cc "mobilenet_model.tar.gz")
inference_download_and_uncompress(${MOBILENET_INSTALL_DIR} "http://paddle-inference-dist.bj.bcebos.com/tensorrt_test" "mobilenet.tar.gz")
file(RENAME ${MOBILENET_INSTALL_DIR}/mobilenet/__model__ ${MOBILENET_INSTALL_DIR}/mobilenet/model)
endif()
inference_analysis_test(test_analyzer_mobilenet SRCS analyzer_mobilenet_tester.cc
EXTRA_DEPS ${INFERENCE_EXTRA_DEPS} ARGS --infer_model=${MOBILENET_INSTALL_DIR}/mobilenet)
# anakin # anakin
if (WITH_ANAKIN AND WITH_MKL) # only needed in CI if (WITH_ANAKIN AND WITH_MKL) # only needed in CI
......
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include <fstream>
#include <iostream>
#include "paddle/fluid/inference/tests/api/tester_helper.h"
namespace paddle {
namespace inference {
namespace analysis {
void SetConfig(AnalysisConfig *cfg) {
cfg->model_dir = FLAGS_infer_model;
cfg->use_gpu = false;
cfg->device = 0;
cfg->enable_ir_optim = true;
cfg->specify_input_name = true;
}
void SetInput(std::vector<std::vector<PaddleTensor>> *inputs) {
SetFakeImageInput(inputs, FLAGS_infer_model);
}
// Easy for profiling independently.
void profile(bool use_mkldnn = false) {
AnalysisConfig cfg;
SetConfig(&cfg);
cfg._use_mkldnn = use_mkldnn;
std::vector<PaddleTensor> outputs;
std::vector<std::vector<PaddleTensor>> input_slots_all;
SetInput(&input_slots_all);
TestPrediction(cfg, input_slots_all, &outputs, FLAGS_num_threads);
}
TEST(Analyzer_mobilenet, profile) { profile(); }
#ifdef PADDLE_WITH_MKLDNN
TEST(Analyzer_mobilenet, profile_mkldnn) { profile(true /* use_mkldnn */); }
#endif
// Check the depthwise_conv pass status
TEST(Analyzer_mobilenet, depthwise_conv_statis) {
AnalysisConfig cfg;
SetConfig(&cfg);
cfg._use_mkldnn = true;
int num_ops;
auto predictor = CreatePaddlePredictor<AnalysisConfig>(cfg);
auto fuse_statis = GetFuseStatis(
static_cast<AnalysisPredictor *>(predictor.get()), &num_ops);
LOG(INFO) << "num_ops: " << num_ops;
}
// Compare result of NativeConfig and AnalysisConfig
void compare(bool use_mkldnn = false) {
AnalysisConfig cfg;
SetConfig(&cfg);
cfg._use_mkldnn = use_mkldnn;
std::vector<std::vector<PaddleTensor>> input_slots_all;
SetInput(&input_slots_all);
CompareNativeAndAnalysis(cfg, input_slots_all);
}
TEST(Analyzer_mobilenet, compare) { compare(); }
#ifdef PADDLE_WITH_MKLDNN
TEST(Analyzer_mobilenet, compare_mkldnn) { compare(true /* use_mkldnn */); }
#endif
} // namespace analysis
} // namespace inference
} // namespace paddle
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册