diff --git a/python/paddle/fluid/tests/unittests/ipu/test_flip_op_ipu.py b/python/paddle/fluid/tests/unittests/ipu/test_flip_op_ipu.py index 179f30cb1e8cf74359feae7212f8b0bbef2c7051..f9cca5102084227779dfd59d0e0762d187776d32 100644 --- a/python/paddle/fluid/tests/unittests/ipu/test_flip_op_ipu.py +++ b/python/paddle/fluid/tests/unittests/ipu/test_flip_op_ipu.py @@ -80,8 +80,8 @@ class TestCase1(TestBase): class TestCase2(TestBase): def set_feed(self): data = np.random.randint(0, 2, size=[4, 3, 2, 2]) - self.feed_fp32 = {'x': data.astype(np.bool)} - self.feed_fp16 = {'x': data.astype(np.bool)} + self.feed_fp32 = {'x': data.astype(np.bool_)} + self.feed_fp16 = {'x': data.astype(np.bool_)} if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_compare_and_logical.py b/python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_compare_and_logical.py index 21b51beded08e08288dd55dad3e38f862126af96..e59b9a0cd416cdc966cea806cec7c5947fc37aee 100755 --- a/python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_compare_and_logical.py +++ b/python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_compare_and_logical.py @@ -46,14 +46,14 @@ class TrtConvertLogicalTest(TrtLayerAutoScanTest): "op_inputs": {"X": ["input_data1"]}, "op_outputs": {"Out": ["cast_output_data1"]}, "op_attrs": dics[1], - "outputs_dtype": {"cast_output_data1": np.bool}, + "outputs_dtype": {"cast_output_data1": np.bool_}, }, { "op_type": "cast", "op_inputs": {"X": ["input_data2"]}, "op_outputs": {"Out": ["cast_output_data3"]}, "op_attrs": dics[1], - "outputs_dtype": {"cast_output_data3": np.bool}, + "outputs_dtype": {"cast_output_data3": np.bool_}, }, { "op_type": op_type, diff --git a/python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_fill_any_like.py b/python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_fill_any_like.py index 64a63ef1ba376d1a6a3bb7b70eaa325203ac8099..40b0b829d8f9ce9ae9835e5f7ffa19867b5f814e 100644 --- a/python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_fill_any_like.py +++ b/python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_fill_any_like.py @@ -36,7 +36,7 @@ class TrtConvertExpandV2Test(TrtLayerAutoScanTest): if self.dims == 4: self.input_shape = [1, 1, 4, 6] if self.dtype == 0: - return np.random.random([1, 1, 4, 6]).astype(np.bool) + return np.random.random([1, 1, 4, 6]).astype(np.bool_) elif self.dtype == 2 or self.dtype == -1: return np.random.random([1, 1, 4, 6]).astype(np.int32) elif self.dtype == 3: diff --git a/python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_one_hot.py b/python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_one_hot.py index 60e654bb95e5e7c377cc7e87eb568f3f5a281625..53574a3fd27dcdaff481ed0203a42706a3b17b48 100644 --- a/python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_one_hot.py +++ b/python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_one_hot.py @@ -59,7 +59,7 @@ class TrtConvertOneHotTest(TrtLayerAutoScanTest): }, "op_outputs": {"Out": ["output_data"]}, "op_attrs": dics[0], - "outputs_dtype": {"output_data": np.int}, + "outputs_dtype": {"output_data": np.int64}, }, ] ops = self.generate_op_config(ops_config) diff --git a/python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_unary.py b/python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_unary.py index 0fd95eaa29fc9a673d68e0717a10f6f17ed03ae7..673ab597659fe0134c2ed04c167fcdb3d566add3 100644 --- a/python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_unary.py +++ b/python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_unary.py @@ -193,7 +193,7 @@ class TrtConvertLogicalNotTest(TrtLayerAutoScanTest): "op_inputs": {"X": ["input_data"]}, "op_outputs": {"Out": ["cast_output_data1"]}, "op_attrs": dics[1], - "outputs_dtype": {"cast_output_data1": np.bool}, + "outputs_dtype": {"cast_output_data1": np.bool_}, }, { "op_type": op_type, @@ -202,7 +202,7 @@ class TrtConvertLogicalNotTest(TrtLayerAutoScanTest): }, "op_outputs": {"Out": ["cast_output_data0"]}, "op_attrs": dics[0], - "outputs_dtype": {"cast_output_data0": np.bool}, + "outputs_dtype": {"cast_output_data0": np.bool_}, }, { "op_type": "cast", diff --git a/python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_where.py b/python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_where.py index 10a9fef067b44ae4b79216d6e849ed4d4053a789..269a8edae19b305eb16351fdd8f477a8b59a7931 100644 --- a/python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_where.py +++ b/python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_where.py @@ -73,7 +73,7 @@ class TrtConvertActivationTest(TrtLayerAutoScanTest): "op_inputs": {"X": ["condition_data"]}, "op_outputs": {"Out": ["condition_data_bool"]}, "op_attrs": {"in_dtype": 5, "out_dtype": 0}, - "outputs_dtype": {"condition_data_bool": np.bool}, + "outputs_dtype": {"condition_data_bool": np.bool_}, }, { "op_type": "where", @@ -84,7 +84,7 @@ class TrtConvertActivationTest(TrtLayerAutoScanTest): }, "op_outputs": {"Out": ["output_data"]}, "op_attrs": dics[0], - "outputs_dtype": {"condition_data_bool": np.bool}, + "outputs_dtype": {"condition_data_bool": np.bool_}, }, ] ops = self.generate_op_config(ops_config) diff --git a/python/paddle/fluid/tests/unittests/npu/test_assign_value_op_npu.py b/python/paddle/fluid/tests/unittests/npu/test_assign_value_op_npu.py index 1df24e54a16b32a962c969f00f1f31006a4a4381..e5acef812ba801f91355587833f7c33cb9901006 100644 --- a/python/paddle/fluid/tests/unittests/npu/test_assign_value_op_npu.py +++ b/python/paddle/fluid/tests/unittests/npu/test_assign_value_op_npu.py @@ -70,7 +70,7 @@ class TestAssignValueNPUOp3(TestAssignValueNPUOp): class TestAssignValueNPUOp4(TestAssignValueNPUOp): def init_data(self): self.value = np.random.choice(a=[False, True], size=(2, 5)).astype( - np.bool + np.bool_ ) self.attrs["bool_values"] = [int(v) for v in self.value.flat] @@ -116,7 +116,7 @@ class TestAssignApi4(TestAssignApi): def setUp(self): self.init_dtype() self.value = np.random.choice(a=[False, True], size=(2, 5)).astype( - np.bool + np.bool_ ) self.place = ( fluid.NPUPlace(0) diff --git a/python/paddle/fluid/tests/unittests/test_assign_value_op.py b/python/paddle/fluid/tests/unittests/test_assign_value_op.py index c0a5554d39b97884117c508d93823f7250cb1934..e0b6bd8bd422f848c8d323685a85557bc6e46d21 100644 --- a/python/paddle/fluid/tests/unittests/test_assign_value_op.py +++ b/python/paddle/fluid/tests/unittests/test_assign_value_op.py @@ -60,7 +60,7 @@ class TestAssignValueOp3(TestAssignValueOp): class TestAssignValueOp4(TestAssignValueOp): def init_data(self): self.value = np.random.choice(a=[False, True], size=(2, 5)).astype( - np.bool + np.bool_ ) self.attrs["bool_values"] = [int(v) for v in self.value.flat] @@ -106,7 +106,7 @@ class TestAssignApi4(TestAssignApi): def setUp(self): self.init_dtype() self.value = np.random.choice(a=[False, True], size=(2, 5)).astype( - np.bool + np.bool_ ) self.place = ( fluid.CUDAPlace(0) diff --git a/python/paddle/fluid/tests/unittests/test_fused_multi_transformer_int8_op.py b/python/paddle/fluid/tests/unittests/test_fused_multi_transformer_int8_op.py index ab32d845151cf23256046ad33fd0162b8156ab10..991da5f41eb2e2040698a4a7d91c2060b6ae29e9 100644 --- a/python/paddle/fluid/tests/unittests/test_fused_multi_transformer_int8_op.py +++ b/python/paddle/fluid/tests/unittests/test_fused_multi_transformer_int8_op.py @@ -175,7 +175,7 @@ class TestFusedMultiTransformerInt8Op(unittest.TestCase): self.x_type = np.float32 self.attn_mask_type = np.float64 - # self.attn_mask_type = np.bool + # self.attn_mask_type = np.bool_ self.pre_layer_norm = True self.has_attn_mask = True diff --git a/python/paddle/fluid/tests/unittests/test_fused_multi_transformer_op.py b/python/paddle/fluid/tests/unittests/test_fused_multi_transformer_op.py index f1b048102ebf702a957be60f7063ad4128260312..e3da925a01e4294d3556f09713a1fcea2da2cba9 100644 --- a/python/paddle/fluid/tests/unittests/test_fused_multi_transformer_op.py +++ b/python/paddle/fluid/tests/unittests/test_fused_multi_transformer_op.py @@ -109,7 +109,7 @@ class TestFusedMultiTransformerOp(OpTest): self.x_type = np.float32 self.attn_mask_type = np.float64 - # self.attn_mask_type = np.bool + # self.attn_mask_type = np.bool_ self.pre_layer_norm = True self.has_attn_mask = True diff --git a/python/paddle/fluid/tests/unittests/test_update_loss_scaling_op.py b/python/paddle/fluid/tests/unittests/test_update_loss_scaling_op.py index 00b943df85dc9fd6be34650b4b94e9f3b6a4796f..9cef4d721674e585ca87200883df96abd065558f 100644 --- a/python/paddle/fluid/tests/unittests/test_update_loss_scaling_op.py +++ b/python/paddle/fluid/tests/unittests/test_update_loss_scaling_op.py @@ -85,7 +85,7 @@ class TestUpdateLossScalingOp(OpTest): self.num_good_steps = np.array([999], dtype=np.int32) self.num_bad_steps = np.array([1], dtype=np.int32) self.zero_steps = np.array([0], dtype=np.int32) - self.stop_update = np.array([False], dtype=np.bool) + self.stop_update = np.array([False], dtype=np.bool_) self.attrs = { 'incr_every_n_steps': 1000, 'decr_every_n_nan_or_inf': 2, diff --git a/python/paddle/fluid/tests/unittests/xpu/test_assign_value_op_xpu.py b/python/paddle/fluid/tests/unittests/xpu/test_assign_value_op_xpu.py index 560815cb56bee2eb8481dc3da6196a8e9e57b075..9030cc1e4f68c5c84425409bc8d282b1c228b801 100644 --- a/python/paddle/fluid/tests/unittests/xpu/test_assign_value_op_xpu.py +++ b/python/paddle/fluid/tests/unittests/xpu/test_assign_value_op_xpu.py @@ -75,7 +75,7 @@ class XPUTestAssignValueOp(XPUOpTestWrapper): class TestAssignValueOp4(TestAssignValueOp): def init_data(self): self.value = np.random.choice(a=[False, True], size=(2, 5)).astype( - np.bool + np.bool_ ) self.attrs["bool_values"] = [int(v) for v in self.value.flat] @@ -117,7 +117,7 @@ class TestAssignApi4(TestAssignApi): def setUp(self): self.init_dtype() self.value = np.random.choice(a=[False, True], size=(2, 5)).astype( - np.bool + np.bool_ ) self.place = fluid.XPUPlace(0) diff --git a/python/paddle/fluid/tests/unittests/xpu/test_bitwise_op_xpu.py b/python/paddle/fluid/tests/unittests/xpu/test_bitwise_op_xpu.py index 71aa969afc14745493b4150d2ccc0233baa446de..7bd11bed1a8b761cd1520a344518fb5703e99d65 100644 --- a/python/paddle/fluid/tests/unittests/xpu/test_bitwise_op_xpu.py +++ b/python/paddle/fluid/tests/unittests/xpu/test_bitwise_op_xpu.py @@ -300,7 +300,7 @@ class XPUTestBitwiseNot(XPUOpTestWrapper): self.outputs = {'Out': out} def init_case(self): - self.dtype = np.bool + self.dtype = np.bool_ self.x_shape = [2, 3, 4, 5] diff --git a/python/paddle/fluid/tests/unittests/xpu/test_logical_op_xpu.py b/python/paddle/fluid/tests/unittests/xpu/test_logical_op_xpu.py index 62120c0d1be8d6155ec1c282f90fb1331b8c70ce..8783fb3bb411704f13fd49d719adbdacaa2722a4 100755 --- a/python/paddle/fluid/tests/unittests/xpu/test_logical_op_xpu.py +++ b/python/paddle/fluid/tests/unittests/xpu/test_logical_op_xpu.py @@ -47,7 +47,7 @@ class XPUTestLogicalAnd(XPUOpTestWrapper): self.op_type = 'logical_and' # special range for bool dtype - if self.dtype == np.dtype(np.bool): + if self.dtype == np.dtype(np.bool_): self.low = 0 self.high = 2 @@ -108,7 +108,7 @@ class XPUTestLogicalOr(XPUOpTestWrapper): self.op_type = 'logical_or' # special range for bool dtype - if self.dtype == np.dtype(np.bool): + if self.dtype == np.dtype(np.bool_): self.low = 0 self.high = 2 @@ -169,7 +169,7 @@ class XPUTestLogicalXor(XPUOpTestWrapper): self.op_type = 'logical_xor' # special range for bool dtype - if self.dtype == np.dtype(np.bool): + if self.dtype == np.dtype(np.bool_): self.low = 0 self.high = 2 @@ -230,7 +230,7 @@ class XPUTestLogicalNot(XPUOpTestWrapper): self.op_type = 'logical_not' # special range for bool dtype - if self.dtype == np.dtype(np.bool): + if self.dtype == np.dtype(np.bool_): self.low = 0 self.high = 2