提交 d4185f33 编写于 作者: M mindspore-ci-bot 提交者: Gitee

!2848 change crop_size from tensor to tuple

Merge pull request !2848 from xutianchun/crop_size_tuple
......@@ -45,10 +45,9 @@ class CropAndResize(PrimitiveWithInfer):
extrapolate the input image values. Types allowd: float32.
- **box_index** (Tensor) - A 1-D tensor of shape [num_boxes] with int32 values in [0, batch).
The value of box_ind[i] specifies the image that the i-th box refers to. Types allowd: int32.
- **crop_size** (Tensor) - Only constant value is allowd. Types allowed: int32.
A 1-D tensor of 2 elements, size = [crop_height, crop_width].
All cropped image patches are resized to this size. The aspect ratio of the image content is not preserved.
Both crop_height and crop_width need to be positive.
- **crop_size** (Tuple[int]) - A tuple of two int32 elements: (crop_height, crop_width).
Only constant value is allowed. All cropped image patches are resized to this size.
The aspect ratio of the image content is not preserved. Both crop_height and crop_width need to be positive.
Outputs:
A 4-D tensor of shape [num_boxes, crop_height, crop_width, depth] with type: float32.
......@@ -70,8 +69,8 @@ class CropAndResize(PrimitiveWithInfer):
>>> image = np.random.normal(size=[BATCH_SIZE, IMAGE_HEIGHT, IMAGE_WIDTH, CHANNELS]).astype(np.float32)
>>> boxes = np.random.uniform(size=[NUM_BOXES, 4]).astype(np.float32)
>>> box_index = np.random.uniform(size=[NUM_BOXES], low=0, high=BATCH_SIZE).astype(np.int32)
>>> crop_size = np.array([24, 24]).astype(np.int32)
>>> crop_and_resize = CropAndResizeNet(crop_size=Tensor(crop_size))
>>> crop_size = (24, 24)
>>> crop_and_resize = CropAndResizeNet(crop_size=crop_size)
>>> output = crop_and_resize(Tensor(image), Tensor(boxes), Tensor(box_index))
>>> print(output.asnumpy())
"""
......@@ -91,11 +90,10 @@ class CropAndResize(PrimitiveWithInfer):
x_shape = list(x['shape'])
boxes_shape = list(boxes['shape'])
box_index_shape = list(box_index['shape'])
crop_size_shape = list(crop_size['shape'])
# get value
if crop_size['value'] is None:
raise ValueError(f"For {self.name}, crop_size must be const.")
crop_size_value = crop_size['value'].asnumpy()
raise ValueError(f"For {self.name}, crop_size must be constant.")
crop_size_value = crop_size['value']
# get dtype
x_dtype = x['dtype']
boxes_dtype = boxes['dtype']
......@@ -107,15 +105,20 @@ class CropAndResize(PrimitiveWithInfer):
mstype.float32, mstype.float64, mstype.uint8, mstype.uint16], self.name)
validator.check_tensor_type_same({"boxes": boxes_dtype}, [mstype.float32], self.name)
validator.check_tensor_type_same({"box_index": box_index_dtype}, [mstype.int32], self.name)
validator.check_tensor_type_same({"crop_size": crop_size_dtype}, [mstype.int32], self.name)
validator.check_value_type("crop_size", crop_size_value, [tuple], self.name)
# check input shape rank
validator.check("x rank", len(x_shape), "expected", 4, Rel.EQ, self.name)
validator.check("boxes rank", len(boxes_shape), "expected", 2, Rel.EQ, self.name)
validator.check("box_index rank", len(box_index_shape), "expected", 1, Rel.EQ, self.name)
validator.check("crop_size rank", len(crop_size_shape), "expected", 1, Rel.EQ, self.name)
validator.check("crop_size size", len(crop_size_value), "expected", 2, Rel.EQ, self.name)
validator.check("boxes dim_0", boxes_shape[0], "box_index dim_0", box_index_shape[0], Rel.EQ, self.name)
validator.check("boxes dim_1", boxes_shape[1], "expected", 4, Rel.EQ, self.name)
# check crop_size_value
validator.check("crop_height", crop_size_value[0], "minimum", 0, Rel.GT, self.name)
validator.check("crop_width", crop_size_value[1], "minimum", 0, Rel.GT, self.name)
# check crop_size element type
validator.check("crop_height dtype", crop_size_dtype[0], mstype.int32, self.name)
validator.check("crop_width dtype", crop_size_dtype[1], mstype.int32, self.name)
num_boxes = boxes_shape[0]
crop_height = crop_size_value[0]
......
......@@ -43,7 +43,7 @@ def test_net_float32():
image = np.random.normal(size=[batch_size, image_height, image_width, channels]).astype(np.float32)
boxes = np.random.uniform(size=[num_boxes, 4]).astype(np.float32)
box_index = np.random.uniform(size=[num_boxes], low=0, high=batch_size).astype(np.int32)
crop_size = np.array([24, 24]).astype(np.int32)
net = Net(crop_size=Tensor(crop_size))
crop_size = (24, 24)
net = Net(crop_size=crop_size)
output = net(Tensor(image), Tensor(boxes), Tensor(box_index))
print(output.asnumpy())
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册