From 5504425eb32d1e2263e5bcf45fa2a3dc5ced0b3c Mon Sep 17 00:00:00 2001 From: luotao1 Date: Tue, 29 Jan 2019 12:09:46 +0800 Subject: [PATCH] fix compiler error, use len20 dataset for bert test=develop --- .../fluid/inference/tests/api/CMakeLists.txt | 8 +++--- .../tests/api/analyzer_bert_tester.cc | 28 ++++++++----------- .../tests/api/analyzer_rnn1_tester.cc | 1 - 3 files changed, 15 insertions(+), 22 deletions(-) diff --git a/paddle/fluid/inference/tests/api/CMakeLists.txt b/paddle/fluid/inference/tests/api/CMakeLists.txt index b0f7dcc0d..aa3da397f 100644 --- a/paddle/fluid/inference/tests/api/CMakeLists.txt +++ b/paddle/fluid/inference/tests/api/CMakeLists.txt @@ -128,10 +128,10 @@ inference_analysis_api_test_with_fake_data(test_analyzer_resnet50 inference_analysis_api_test_with_fake_data(test_analyzer_mobilenet_depthwise_conv "${INFERENCE_DEMO_INSTALL_DIR}/mobilenet_depthwise_conv" analyzer_resnet50_tester.cc "mobilenet_model.tar.gz" SERIAL) -# bert -set(BERT_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/bert") -download_model_and_data(${BERT_INSTALL_DIR} "bert_model.tar.gz" "bert_data.txt.tar.gz") -inference_analysis_api_test(test_analyzer_bert ${BERT_INSTALL_DIR} analyzer_bert_tester.cc) +# bert, max_len=20 +set(BERT_INSTALL_DIR "${INFERENCE_DEMO_INSTALL_DIR}/bert20") +download_model_and_data(${BERT_INSTALL_DIR} "bert_model.tar.gz" "bert_data_len20.txt.tar.gz") +inference_analysis_api_test(test_analyzer_bert ${BERT_INSTALL_DIR} analyzer_bert_tester.cc SERIAL) # anakin if (WITH_ANAKIN AND WITH_MKL) # only needed in CI diff --git a/paddle/fluid/inference/tests/api/analyzer_bert_tester.cc b/paddle/fluid/inference/tests/api/analyzer_bert_tester.cc index 24cbd39ea..f646fd6d9 100644 --- a/paddle/fluid/inference/tests/api/analyzer_bert_tester.cc +++ b/paddle/fluid/inference/tests/api/analyzer_bert_tester.cc @@ -18,7 +18,6 @@ namespace paddle { namespace inference { using paddle::PaddleTensor; -using paddle::contrib::AnalysisConfig; template void GetValueFromStream(std::stringstream *ss, T *t) { @@ -158,12 +157,10 @@ bool LoadInputData(std::vector> *inputs) { return true; } -void SetConfig(contrib::AnalysisConfig *config) { - config->SetModel(FLAGS_infer_model); -} +void SetConfig(AnalysisConfig *config) { config->SetModel(FLAGS_infer_model); } void profile(bool use_mkldnn = false) { - contrib::AnalysisConfig config; + AnalysisConfig config; SetConfig(&config); if (use_mkldnn) { @@ -213,17 +210,14 @@ TEST(Analyzer_bert, compare_mkldnn) { compare(true /* use_mkldnn */); } #endif // Compare Deterministic result -// TODO(luotao): Since each unit-test on CI only have 10 minutes, cancel this to -// decrease the CI time. -// TEST(Analyzer_bert, compare_determine) { -// AnalysisConfig cfg; -// SetConfig(&cfg); -// -// std::vector> inputs; -// LoadInputData(&inputs); -// CompareDeterministic(reinterpret_cast(&cfg), -// inputs); -// } +TEST(Analyzer_bert, compare_determine) { + AnalysisConfig cfg; + SetConfig(&cfg); + + std::vector> inputs; + LoadInputData(&inputs); + CompareDeterministic(reinterpret_cast(&cfg), + inputs); +} } // namespace inference } // namespace paddle diff --git a/paddle/fluid/inference/tests/api/analyzer_rnn1_tester.cc b/paddle/fluid/inference/tests/api/analyzer_rnn1_tester.cc index 5ab857705..c27c39f40 100644 --- a/paddle/fluid/inference/tests/api/analyzer_rnn1_tester.cc +++ b/paddle/fluid/inference/tests/api/analyzer_rnn1_tester.cc @@ -20,7 +20,6 @@ namespace paddle { namespace inference { using namespace framework; // NOLINT -using namespace contrib; // NOLINT struct DataRecord { std::vector>> link_step_data_all; -- GitLab