From 21612be768cd57a69af355c7282d5b5f42f04c95 Mon Sep 17 00:00:00 2001 From: Allen Guo Date: Mon, 10 Oct 2022 15:56:39 +0800 Subject: [PATCH] rm fp16 dtype_check (#46739) --- .../unittests/ipu/test_mixed_precision_inference_ipu.py | 7 ------- .../unittests/ipu/test_mixed_precision_training_ipu.py | 7 ------- 2 files changed, 14 deletions(-) diff --git a/python/paddle/fluid/tests/unittests/ipu/test_mixed_precision_inference_ipu.py b/python/paddle/fluid/tests/unittests/ipu/test_mixed_precision_inference_ipu.py index 21bcb7b731..7118466a52 100644 --- a/python/paddle/fluid/tests/unittests/ipu/test_mixed_precision_inference_ipu.py +++ b/python/paddle/fluid/tests/unittests/ipu/test_mixed_precision_inference_ipu.py @@ -43,12 +43,6 @@ class TestBase(IPUOpTest): self.feed_shape = [x.shape for x in self.feed_fp32.values()] self.feed_list = list(self.feed_fp32.keys()) - def dtype_check(self, program, to_fp16_var_names): - block = program.global_block() - assert len(to_fp16_var_names) > 0 - for var_name in to_fp16_var_names: - assert (block.var(var_name).dtype, paddle.float16) - def set_attrs(self): self.num_ipus = 1 self.enable_pipelining = False @@ -84,7 +78,6 @@ class TestBase(IPUOpTest): amp_list.unsupported_list = {} to_fp16_var_names = paddle.static.amp.cast_model_to_fp16( self.main_prog, amp_list, use_fp16_guard=True) - self.dtype_check(self.main_prog, to_fp16_var_names) if self.is_ipu_mode(exec_mode): place = paddle.CPUPlace() diff --git a/python/paddle/fluid/tests/unittests/ipu/test_mixed_precision_training_ipu.py b/python/paddle/fluid/tests/unittests/ipu/test_mixed_precision_training_ipu.py index a733a26d60..51a0e91a29 100644 --- a/python/paddle/fluid/tests/unittests/ipu/test_mixed_precision_training_ipu.py +++ b/python/paddle/fluid/tests/unittests/ipu/test_mixed_precision_training_ipu.py @@ -55,12 +55,6 @@ class TestBase(IPUOpTest): self.enable_manual_shard = False self.batches_per_step = 1 - def dtype_check(self, program, to_fp16_var_names): - block = program.global_block() - assert len(to_fp16_var_names) > 0 - for var_name in to_fp16_var_names: - assert (block.var(var_name).dtype, paddle.float16) - @IPUOpTest.static_graph def build_model(self): x = paddle.static.data(name=self.feed_list[0], @@ -94,7 +88,6 @@ class TestBase(IPUOpTest): amp_list.unsupported_list = {} to_fp16_var_names = paddle.static.amp.cast_model_to_fp16( self.main_prog, amp_list) - self.dtype_check(self.main_prog, to_fp16_var_names) if self.is_ipu_mode(exec_mode): place = paddle.CPUPlace() -- GitLab