From 61ef56a13f609cafe88c32f7aa589124f9211816 Mon Sep 17 00:00:00 2001 From: heliqi Date: Tue, 21 Dec 2021 11:22:01 +0800 Subject: [PATCH] =?UTF-8?q?PassAutoScan=20=E5=9F=BA=E7=BA=BF=E8=B7=9F?= =?UTF-8?q?=E6=B5=8B=E8=AF=95=E7=94=A8=E4=BE=8B=E4=BD=BF=E7=94=A8=E4=B8=80?= =?UTF-8?q?=E6=A0=B7=E9=85=8D=E7=BD=AE=E7=9A=84config=20(#38252)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add timeout * add timeout * PassAutoScan base_line use same config * try run base_line * fix dropout Mask of output attr error * fix dropout Mask of output attr error --- .../unittests/ir/inference/auto_scan_test.py | 48 +++++++++---------- ...test_fc_elementwise_layernorm_fuse_pass.py | 3 +- ...t_simplify_with_basic_ops_pass_autoscan.py | 6 ++- 3 files changed, 28 insertions(+), 29 deletions(-) diff --git a/python/paddle/fluid/tests/unittests/ir/inference/auto_scan_test.py b/python/paddle/fluid/tests/unittests/ir/inference/auto_scan_test.py index fa09ef19977..08c634d58ca 100755 --- a/python/paddle/fluid/tests/unittests/ir/inference/auto_scan_test.py +++ b/python/paddle/fluid/tests/unittests/ir/inference/auto_scan_test.py @@ -330,8 +330,7 @@ class PassAutoScanTest(AutoScanTest): reproduce=None, min_success_num=25, max_duration=180, - passes=None, - use_gpu_run_baseline=False): + passes=None): if os.getenv('HYPOTHESIS_TEST_PROFILE', 'ci') == "dev": max_examples *= 10 min_success_num *= 10 @@ -356,10 +355,7 @@ class PassAutoScanTest(AutoScanTest): return self.sample_program_config(draw) def run_test(prog_config): - return self.run_test( - quant=quant, - prog_configs=[prog_config], - use_gpu_run_baseline=use_gpu_run_baseline) + return self.run_test(quant=quant, prog_configs=[prog_config]) generator = st.composite(program_generator) loop_func = given(generator())(run_test) @@ -396,10 +392,7 @@ class PassAutoScanTest(AutoScanTest): format(max_duration)) assert False - def run_test(self, - quant=False, - prog_configs=None, - use_gpu_run_baseline=False): + def run_test(self, quant=False, prog_configs=None): status = True for prog_config in prog_configs: @@ -418,22 +411,13 @@ class PassAutoScanTest(AutoScanTest): 'data': tensor_config.data, 'lod': tensor_config.lod } - results: List[Dict[str, np.ndarray]] = [] - - # baseline: cpu no ir_optim run - base_config = self.create_inference_config( - ir_optim=False, use_gpu=use_gpu_run_baseline) logging.info('RUN program_config: ' + str(prog_config)) - results.append( - self.run_test_config(model, params, prog_config, base_config, - feed_data)) - self.success_log('RUN_CPU_BASELINE done') - self.num_predictor_kinds = 0 for pred_config, op_list, ( atol, rtol) in self.sample_predictor_configs(prog_config): self.num_predictor_kinds += 1 + # skip info ignore_flag = False for ignore_info in self.ignore_cases: @@ -454,12 +438,26 @@ class PassAutoScanTest(AutoScanTest): if not os.path.exists(self.cache_dir): os.mkdir(self.cache_dir) + # baseline: no ir_optim run + base_config = self.create_inference_config( + ir_optim=False, + use_gpu=pred_config.use_gpu(), + use_mkldnn=pred_config.mkldnn_enabled(), ) try: - results.append( - self.run_test_config(model, params, prog_config, - pred_config, feed_data)) - self.assert_tensors_near(atol, rtol, results[-1], - results[0]) + # baseline + base_result = self.run_test_config( + model, params, prog_config, base_config, feed_data) + self.success_log('RUN_BASELINE ' + + self.inference_config_str( + base_config) + ' done') + + if os.path.exists(self.cache_dir): + shutil.rmtree(self.cache_dir) + + pred_result = self.run_test_config( + model, params, prog_config, pred_config, feed_data) + self.assert_tensors_near(atol, rtol, pred_result, + base_result) if not ignore_flag: self.assert_op_list(op_list) diff --git a/python/paddle/fluid/tests/unittests/ir/inference/test_fc_elementwise_layernorm_fuse_pass.py b/python/paddle/fluid/tests/unittests/ir/inference/test_fc_elementwise_layernorm_fuse_pass.py index 2ccb9de5d54..26f91092d2a 100644 --- a/python/paddle/fluid/tests/unittests/ir/inference/test_fc_elementwise_layernorm_fuse_pass.py +++ b/python/paddle/fluid/tests/unittests/ir/inference/test_fc_elementwise_layernorm_fuse_pass.py @@ -127,8 +127,7 @@ class TestFCElementwiseLayerNormFusePass(PassAutoScanTest): self.run_and_statis( quant=False, max_examples=300, - passes=["fc_elementwise_layernorm_fuse_pass"], - use_gpu_run_baseline=True) + passes=["fc_elementwise_layernorm_fuse_pass"]) if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/ir/inference/test_simplify_with_basic_ops_pass_autoscan.py b/python/paddle/fluid/tests/unittests/ir/inference/test_simplify_with_basic_ops_pass_autoscan.py index 03e9feb418a..cb55dc64445 100644 --- a/python/paddle/fluid/tests/unittests/ir/inference/test_simplify_with_basic_ops_pass_autoscan.py +++ b/python/paddle/fluid/tests/unittests/ir/inference/test_simplify_with_basic_ops_pass_autoscan.py @@ -46,7 +46,8 @@ class TestSimplifyWithBasicOpsPassUpscale(PassAutoScanTest): dropout_op = OpConfig( "dropout", inputs={"X": ["input_data"]}, - outputs={"Out": ["dropout_output"]}, + outputs={"Out": ["dropout_output"], + "Mask": ["mask"]}, fix_seed=fix_seed, dropout_implementation=dropout_implementation, dropout_prob=dropout_prob, @@ -107,7 +108,8 @@ class TestSimplifyWithBasicOpsPassDowngrade(PassAutoScanTest): dropout_op = OpConfig( "dropout", inputs={"X": ["input_data"]}, - outputs={"Out": ["dropout_output"]}, + outputs={"Out": ["dropout_output"], + "Mask": ["mask"]}, fix_seed=fix_seed, dropout_implementation=dropout_implementation, dropout_prob=dropout_prob, -- GitLab