From 353b5f06a768aad47564b2d37c1aac408fe35ce3 Mon Sep 17 00:00:00 2001 From: luotao1 Date: Wed, 23 Jan 2019 16:22:17 +0800 Subject: [PATCH] refine analyzer_bert_test to pass the ci test=develop --- .../tests/api/analyzer_bert_tester.cc | 69 +++++++++++++------ 1 file changed, 47 insertions(+), 22 deletions(-) diff --git a/paddle/fluid/inference/tests/api/analyzer_bert_tester.cc b/paddle/fluid/inference/tests/api/analyzer_bert_tester.cc index 709d51388d9..aced71b7747 100644 --- a/paddle/fluid/inference/tests/api/analyzer_bert_tester.cc +++ b/paddle/fluid/inference/tests/api/analyzer_bert_tester.cc @@ -12,17 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -#include -#include -#include -#include -#include -#include -#include -#include -#include "paddle/fluid/inference/api/paddle_inference_api.h" - -DEFINE_int32(repeat, 1, "repeat"); +#include "paddle/fluid/inference/tests/api/tester_helper.h" namespace paddle { namespace inference { @@ -166,16 +156,17 @@ bool LoadInputData(std::vector> *inputs) { std::ifstream fin(FLAGS_infer_data); std::string line; + int sample = 0; - int lineno = 0; + // The unit-test dataset only have 10 samples, each sample have 5 feeds. while (std::getline(fin, line)) { std::vector feed_data; - if (!ParseLine(line, &feed_data)) { - LOG(ERROR) << "Parse line[" << lineno << "] error!"; - } else { - inputs->push_back(std::move(feed_data)); - } + ParseLine(line, &feed_data); + inputs->push_back(std::move(feed_data)); + sample++; + if (!FLAGS_test_all_data && sample == FLAGS_batch_size) break; } + LOG(INFO) << "number of samples: " << sample; return true; } @@ -199,19 +190,53 @@ void profile(bool use_mkldnn = false) { inputs, &outputs, FLAGS_num_threads); } +TEST(Analyzer_bert, profile) { profile(); } +#ifdef PADDLE_WITH_MKLDNN +TEST(Analyzer_bert, profile_mkldnn) { profile(true); } +#endif + +// Check the fuse status +TEST(Analyzer_bert, fuse_statis) { + AnalysisConfig cfg; + SetConfig(&cfg); + int num_ops; + auto predictor = CreatePaddlePredictor(cfg); + auto fuse_statis = GetFuseStatis( + static_cast(predictor.get()), &num_ops); + LOG(INFO) << "num_ops: " << num_ops; +} + +// Compare result of NativeConfig and AnalysisConfig void compare(bool use_mkldnn = false) { - AnalysisConfig config; - SetConfig(&config); + AnalysisConfig cfg; + SetConfig(&cfg); + if (use_mkldnn) { + cfg.EnableMKLDNN(); + } std::vector> inputs; LoadInputData(&inputs); CompareNativeAndAnalysis( - reinterpret_cast(&config), inputs); + reinterpret_cast(&cfg), inputs); } -TEST(Analyzer_bert, profile) { profile(); } +TEST(Analyzer_bert, compare) { compare(); } #ifdef PADDLE_WITH_MKLDNN -TEST(Analyzer_bert, profile_mkldnn) { profile(true); } +TEST(Analyzer_bert, compare_mkldnn) { compare(true /* use_mkldnn */); } #endif + +// Compare Deterministic result +// TODO(luotao): Since each unit-test on CI only have 10 minutes, cancel this to +// decrease the CI time. +// TEST(Analyzer_bert, compare_determine) { +// AnalysisConfig cfg; +// SetConfig(&cfg); +// +// std::vector> inputs; +// LoadInputData(&inputs); +// CompareDeterministic(reinterpret_cast(&cfg), +// inputs); +// } } // namespace inference } // namespace paddle -- GitLab