diff --git a/paddle/fluid/inference/tests/api/analyzer_mm_dnn_tester.cc b/paddle/fluid/inference/tests/api/analyzer_mm_dnn_tester.cc index 8aaab6d6649e1d4b6db7695df0e9dd219c89422c..2a61a30c726cceb4c0dcedee6e0422f8a9885e05 100644 --- a/paddle/fluid/inference/tests/api/analyzer_mm_dnn_tester.cc +++ b/paddle/fluid/inference/tests/api/analyzer_mm_dnn_tester.cc @@ -116,11 +116,15 @@ void SetInput(std::vector> *inputs) { } // Easy for profiling independently. -TEST(Analyzer_MM_DNN, profile) { +void profile(bool use_mkldnn = false) { contrib::AnalysisConfig cfg; SetConfig(&cfg); std::vector outputs; + if (use_mkldnn) { + cfg.EnableMKLDNN(); + } + std::vector> input_slots_all; SetInput(&input_slots_all); TestPrediction(reinterpret_cast(&cfg), @@ -141,6 +145,11 @@ TEST(Analyzer_MM_DNN, profile) { } } +TEST(Analyzer_MM_DNN, profile) { profile(); } +#ifdef PADDLE_WITH_MKLDNN +TEST(Analyzer_MM_DNN, profile_mkldnn) { profile(true /* use_mkldnn */); } +#endif + // Check the fuse status TEST(Analyzer_MM_DNN, fuse_statis) { contrib::AnalysisConfig cfg; @@ -153,16 +162,25 @@ TEST(Analyzer_MM_DNN, fuse_statis) { } // Compare result of NativeConfig and AnalysisConfig -TEST(Analyzer_MM_DNN, compare) { +void compare(bool use_mkldnn = false) { contrib::AnalysisConfig cfg; SetConfig(&cfg); + if (use_mkldnn) { + cfg.EnableMKLDNN(); + } + std::vector> input_slots_all; SetInput(&input_slots_all); CompareNativeAndAnalysis( reinterpret_cast(&cfg), input_slots_all); } +TEST(Analyzer_MM_DNN, compare) { compare(); } +#ifdef PADDLE_WITH_MKLDNN +TEST(Analyzer_MM_DNN, compare_mkldnn) { compare(true /* use_mkldnn */); } +#endif + // Compare Deterministic result TEST(Analyzer_MM_DNN, compare_determine) { AnalysisConfig cfg;