未验证 提交 f12f2a9d 编写于 作者: R Ryan 提交者: GitHub

bool => bool_ (#49961)

上级 cb525d4e
......@@ -80,8 +80,8 @@ class TestCase1(TestBase):
class TestCase2(TestBase):
def set_feed(self):
data = np.random.randint(0, 2, size=[4, 3, 2, 2])
self.feed_fp32 = {'x': data.astype(np.bool)}
self.feed_fp16 = {'x': data.astype(np.bool)}
self.feed_fp32 = {'x': data.astype(np.bool_)}
self.feed_fp16 = {'x': data.astype(np.bool_)}
if __name__ == "__main__":
......
......@@ -46,14 +46,14 @@ class TrtConvertLogicalTest(TrtLayerAutoScanTest):
"op_inputs": {"X": ["input_data1"]},
"op_outputs": {"Out": ["cast_output_data1"]},
"op_attrs": dics[1],
"outputs_dtype": {"cast_output_data1": np.bool},
"outputs_dtype": {"cast_output_data1": np.bool_},
},
{
"op_type": "cast",
"op_inputs": {"X": ["input_data2"]},
"op_outputs": {"Out": ["cast_output_data3"]},
"op_attrs": dics[1],
"outputs_dtype": {"cast_output_data3": np.bool},
"outputs_dtype": {"cast_output_data3": np.bool_},
},
{
"op_type": op_type,
......
......@@ -36,7 +36,7 @@ class TrtConvertExpandV2Test(TrtLayerAutoScanTest):
if self.dims == 4:
self.input_shape = [1, 1, 4, 6]
if self.dtype == 0:
return np.random.random([1, 1, 4, 6]).astype(np.bool)
return np.random.random([1, 1, 4, 6]).astype(np.bool_)
elif self.dtype == 2 or self.dtype == -1:
return np.random.random([1, 1, 4, 6]).astype(np.int32)
elif self.dtype == 3:
......
......@@ -59,7 +59,7 @@ class TrtConvertOneHotTest(TrtLayerAutoScanTest):
},
"op_outputs": {"Out": ["output_data"]},
"op_attrs": dics[0],
"outputs_dtype": {"output_data": np.int},
"outputs_dtype": {"output_data": np.int64},
},
]
ops = self.generate_op_config(ops_config)
......
......@@ -193,7 +193,7 @@ class TrtConvertLogicalNotTest(TrtLayerAutoScanTest):
"op_inputs": {"X": ["input_data"]},
"op_outputs": {"Out": ["cast_output_data1"]},
"op_attrs": dics[1],
"outputs_dtype": {"cast_output_data1": np.bool},
"outputs_dtype": {"cast_output_data1": np.bool_},
},
{
"op_type": op_type,
......@@ -202,7 +202,7 @@ class TrtConvertLogicalNotTest(TrtLayerAutoScanTest):
},
"op_outputs": {"Out": ["cast_output_data0"]},
"op_attrs": dics[0],
"outputs_dtype": {"cast_output_data0": np.bool},
"outputs_dtype": {"cast_output_data0": np.bool_},
},
{
"op_type": "cast",
......
......@@ -73,7 +73,7 @@ class TrtConvertActivationTest(TrtLayerAutoScanTest):
"op_inputs": {"X": ["condition_data"]},
"op_outputs": {"Out": ["condition_data_bool"]},
"op_attrs": {"in_dtype": 5, "out_dtype": 0},
"outputs_dtype": {"condition_data_bool": np.bool},
"outputs_dtype": {"condition_data_bool": np.bool_},
},
{
"op_type": "where",
......@@ -84,7 +84,7 @@ class TrtConvertActivationTest(TrtLayerAutoScanTest):
},
"op_outputs": {"Out": ["output_data"]},
"op_attrs": dics[0],
"outputs_dtype": {"condition_data_bool": np.bool},
"outputs_dtype": {"condition_data_bool": np.bool_},
},
]
ops = self.generate_op_config(ops_config)
......
......@@ -70,7 +70,7 @@ class TestAssignValueNPUOp3(TestAssignValueNPUOp):
class TestAssignValueNPUOp4(TestAssignValueNPUOp):
def init_data(self):
self.value = np.random.choice(a=[False, True], size=(2, 5)).astype(
np.bool
np.bool_
)
self.attrs["bool_values"] = [int(v) for v in self.value.flat]
......@@ -116,7 +116,7 @@ class TestAssignApi4(TestAssignApi):
def setUp(self):
self.init_dtype()
self.value = np.random.choice(a=[False, True], size=(2, 5)).astype(
np.bool
np.bool_
)
self.place = (
fluid.NPUPlace(0)
......
......@@ -60,7 +60,7 @@ class TestAssignValueOp3(TestAssignValueOp):
class TestAssignValueOp4(TestAssignValueOp):
def init_data(self):
self.value = np.random.choice(a=[False, True], size=(2, 5)).astype(
np.bool
np.bool_
)
self.attrs["bool_values"] = [int(v) for v in self.value.flat]
......@@ -106,7 +106,7 @@ class TestAssignApi4(TestAssignApi):
def setUp(self):
self.init_dtype()
self.value = np.random.choice(a=[False, True], size=(2, 5)).astype(
np.bool
np.bool_
)
self.place = (
fluid.CUDAPlace(0)
......
......@@ -175,7 +175,7 @@ class TestFusedMultiTransformerInt8Op(unittest.TestCase):
self.x_type = np.float32
self.attn_mask_type = np.float64
# self.attn_mask_type = np.bool
# self.attn_mask_type = np.bool_
self.pre_layer_norm = True
self.has_attn_mask = True
......
......@@ -109,7 +109,7 @@ class TestFusedMultiTransformerOp(OpTest):
self.x_type = np.float32
self.attn_mask_type = np.float64
# self.attn_mask_type = np.bool
# self.attn_mask_type = np.bool_
self.pre_layer_norm = True
self.has_attn_mask = True
......
......@@ -85,7 +85,7 @@ class TestUpdateLossScalingOp(OpTest):
self.num_good_steps = np.array([999], dtype=np.int32)
self.num_bad_steps = np.array([1], dtype=np.int32)
self.zero_steps = np.array([0], dtype=np.int32)
self.stop_update = np.array([False], dtype=np.bool)
self.stop_update = np.array([False], dtype=np.bool_)
self.attrs = {
'incr_every_n_steps': 1000,
'decr_every_n_nan_or_inf': 2,
......
......@@ -75,7 +75,7 @@ class XPUTestAssignValueOp(XPUOpTestWrapper):
class TestAssignValueOp4(TestAssignValueOp):
def init_data(self):
self.value = np.random.choice(a=[False, True], size=(2, 5)).astype(
np.bool
np.bool_
)
self.attrs["bool_values"] = [int(v) for v in self.value.flat]
......@@ -117,7 +117,7 @@ class TestAssignApi4(TestAssignApi):
def setUp(self):
self.init_dtype()
self.value = np.random.choice(a=[False, True], size=(2, 5)).astype(
np.bool
np.bool_
)
self.place = fluid.XPUPlace(0)
......
......@@ -300,7 +300,7 @@ class XPUTestBitwiseNot(XPUOpTestWrapper):
self.outputs = {'Out': out}
def init_case(self):
self.dtype = np.bool
self.dtype = np.bool_
self.x_shape = [2, 3, 4, 5]
......
......@@ -47,7 +47,7 @@ class XPUTestLogicalAnd(XPUOpTestWrapper):
self.op_type = 'logical_and'
# special range for bool dtype
if self.dtype == np.dtype(np.bool):
if self.dtype == np.dtype(np.bool_):
self.low = 0
self.high = 2
......@@ -108,7 +108,7 @@ class XPUTestLogicalOr(XPUOpTestWrapper):
self.op_type = 'logical_or'
# special range for bool dtype
if self.dtype == np.dtype(np.bool):
if self.dtype == np.dtype(np.bool_):
self.low = 0
self.high = 2
......@@ -169,7 +169,7 @@ class XPUTestLogicalXor(XPUOpTestWrapper):
self.op_type = 'logical_xor'
# special range for bool dtype
if self.dtype == np.dtype(np.bool):
if self.dtype == np.dtype(np.bool_):
self.low = 0
self.high = 2
......@@ -230,7 +230,7 @@ class XPUTestLogicalNot(XPUOpTestWrapper):
self.op_type = 'logical_not'
# special range for bool dtype
if self.dtype == np.dtype(np.bool):
if self.dtype == np.dtype(np.bool_):
self.low = 0
self.high = 2
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册