From 6198ff24985bb1f76e669a74ffa48ec589f1c2d6 Mon Sep 17 00:00:00 2001 From: wanghuancoder Date: Tue, 26 Jul 2022 12:53:20 +0800 Subject: [PATCH] add reverse yaml (#44518) * add reverse yaml --- paddle/phi/api/yaml/legacy_api.yaml | 18 ++++++++++++++++++ paddle/phi/api/yaml/legacy_backward.yaml | 17 +++++++++++++++++ paddle/phi/infermeta/unary.cc | 19 +++++++++++++++++++ paddle/phi/infermeta/unary.h | 4 ++++ python/paddle/fluid/layers/tensor.py | 5 +++++ .../fluid/tests/unittests/test_reverse_op.py | 5 +++-- 6 files changed, 66 insertions(+), 2 deletions(-) diff --git a/paddle/phi/api/yaml/legacy_api.yaml b/paddle/phi/api/yaml/legacy_api.yaml index 2a1b307c2b0..ea52ffd9605 100644 --- a/paddle/phi/api/yaml/legacy_api.yaml +++ b/paddle/phi/api/yaml/legacy_api.yaml @@ -1802,6 +1802,24 @@ intermediate : xshape backward: reshape_grad +- api : reverse + args : (Tensor x, int[] axis) + output : Tensor + infer_meta : + func : ReverseInferMeta + kernel : + func : reverse + backward : reverse_grad + +- api : reverse_array + args : (Tensor[] x, int[] axis) + output : Tensor[]{x.size()} + infer_meta : + func : ReverseArrayInferMeta + kernel : + func : reverse_array + backward : reverse_array_grad + - api : roi_align args : (Tensor x, Tensor boxes, Tensor boxes_num, int pooled_height, int pooled_width, float spatial_scale, int sampling_ratio, bool aligned) output : Tensor diff --git a/paddle/phi/api/yaml/legacy_backward.yaml b/paddle/phi/api/yaml/legacy_backward.yaml index 8bbdc5766b4..abbb23cc253 100644 --- a/paddle/phi/api/yaml/legacy_backward.yaml +++ b/paddle/phi/api/yaml/legacy_backward.yaml @@ -1708,6 +1708,23 @@ backward : reshape_double_grad inplace : (out_grad -> x_grad) +- backward_api : reverse_array_grad + forward : reverse_array (Tensor[] x, int[] axis) -> Tensor[](out) + args : (Tensor[] out_grad, int[] axis) + output : Tensor[](x_grad){out_grad.size()} + infer_meta : + func : ReverseArrayInferMeta + kernel : + func : reverse + +- backward_api : reverse_grad + forward : reverse (Tensor x, int[] axis) -> Tensor(out) + args : (Tensor out_grad, int[] axis) + output : Tensor(x_grad) + infer_meta : + func : ReverseInferMeta + invoke : reverse(out_grad, axis) + - backward_api : roi_align_grad forward : roi_align (Tensor x, Tensor boxes, Tensor boxes_num, int pooled_height, int pooled_width, float spatial_scale, int sampling_ratio, bool aligned) -> Tensor(out) args : (Tensor x, Tensor boxes, Tensor boxes_num, Tensor out_grad, int pooled_height, int pooled_width, float spatial_scale, int sampling_ratio, bool aligned) diff --git a/paddle/phi/infermeta/unary.cc b/paddle/phi/infermeta/unary.cc index 80f5da68295..edc455225e4 100644 --- a/paddle/phi/infermeta/unary.cc +++ b/paddle/phi/infermeta/unary.cc @@ -2165,6 +2165,25 @@ void ReverseInferMeta(const MetaTensor& x, out->share_meta(x); } +void ReverseArrayInferMeta(const std::vector& x, + const std::vector& axis, + std::vector out) { + PADDLE_ENFORCE_EQ( + axis.size(), + 1, + phi::errors::InvalidArgument( + "The size of axis must be 1 when the Input(X) is LoDTensorArray, " + "but received %d.", + axis.size())); + PADDLE_ENFORCE_EQ( + axis[0], + 0, + phi::errors::InvalidArgument("The value of axis should be 1 when " + "the Input(X) is LoDTensorArray, " + "but received %d.", + axis[0])); +} + void RollInferMeta(const MetaTensor& x, const IntArray& shifts, const std::vector& axis, diff --git a/paddle/phi/infermeta/unary.h b/paddle/phi/infermeta/unary.h index a9cb1c0b610..1449e8cfe19 100644 --- a/paddle/phi/infermeta/unary.h +++ b/paddle/phi/infermeta/unary.h @@ -298,6 +298,10 @@ void ReverseInferMeta(const MetaTensor& x, const std::vector& axis, MetaTensor* out); +void ReverseArrayInferMeta(const std::vector& x, + const std::vector& axis, + std::vector out); + void RollInferMeta(const MetaTensor& x, const IntArray& shifts, const std::vector& axis, diff --git a/python/paddle/fluid/layers/tensor.py b/python/paddle/fluid/layers/tensor.py index 3c2b0442776..359e44e859e 100644 --- a/python/paddle/fluid/layers/tensor.py +++ b/python/paddle/fluid/layers/tensor.py @@ -1271,6 +1271,11 @@ def reverse(x, axis): check_type(axis, 'axis', (int, tuple, list), 'reverse') if isinstance(axis, int): axis = [axis] + if in_dygraph_mode(): + if x.type == core.VarDesc.VarType.LOD_TENSOR_ARRAY: + return _C_ops.final_state_reverse_array(x, axis) + else: + return _C_ops.final_state_reverse(x, axis) helper = LayerHelper("reverse", **locals()) out = helper.create_variable_for_type_inference(dtype=x.dtype) helper.append_op(type='reverse', diff --git a/python/paddle/fluid/tests/unittests/test_reverse_op.py b/python/paddle/fluid/tests/unittests/test_reverse_op.py index 263fecc619e..adc9e513eaa 100644 --- a/python/paddle/fluid/tests/unittests/test_reverse_op.py +++ b/python/paddle/fluid/tests/unittests/test_reverse_op.py @@ -31,6 +31,7 @@ class TestReverseOp(OpTest): def setUp(self): self.initTestCase() self.op_type = "reverse" + self.python_api = fluid.layers.reverse self.inputs = {"X": self.x} self.attrs = {'axis': self.axis} out = self.x @@ -39,10 +40,10 @@ class TestReverseOp(OpTest): self.outputs = {'Out': out} def test_check_output(self): - self.check_output() + self.check_output(check_eager=True) def test_check_grad(self): - self.check_grad(['X'], 'Out') + self.check_grad(['X'], 'Out', check_eager=True) class TestCase0(TestReverseOp): -- GitLab