From aa4a56fce54f51c10fb6e362e57f7f11d9e00748 Mon Sep 17 00:00:00 2001 From: zhulei <563755780@qq.com> Date: Wed, 19 May 2021 17:50:58 +0800 Subject: [PATCH] [Rocm] fix test of random_crop_op & logsumexp (#32824) * [Rocm] fix test of random_crop_op * [Rocm] fix test of random_crop_op * [Rocm] fix test of random_crop_op & simple_rnn_op * [Rocm] fix test of random_crop_op & simple_rnn_op & logsumexp * [Rocm] fix test of random_crop_op & simple_rnn_op & logsumexp * [Rocm] fix test of random_crop_op & simple_rnn_op & logsumexp * [Rocm] fix test of random_crop_op & logsumexp --- paddle/fluid/operators/random_crop_op.h | 10 ------- .../fluid/tests/unittests/test_logsumexp.py | 27 ++++++++++++++++++- 2 files changed, 26 insertions(+), 11 deletions(-) diff --git a/paddle/fluid/operators/random_crop_op.h b/paddle/fluid/operators/random_crop_op.h index ee111a0ec7c..0ebfb2f1bcd 100644 --- a/paddle/fluid/operators/random_crop_op.h +++ b/paddle/fluid/operators/random_crop_op.h @@ -59,16 +59,6 @@ HOSTDEVICE inline void StridedMemcpy(const T* x, const size_t* x_dims, T* out, size_t offset_i = offsets[i]; if (i == rank - 1) { - PADDLE_ENFORCE(x_stride == 1, - "When i:%d == rank:%d - 1, x_stride of random_crop_op " - "expected to be 1, but got %ld. Please check input " - "value.", - i, rank, x_stride); - PADDLE_ENFORCE(out_stride == 1, - "When i:%d == rank:%d - 1, out_stride of random_crop_op " - "expected to be 1, but got %ld. Please check input " - "value.", - i, rank, out_stride); x += offset_i; for (size_t j = 0; j < out_dim_i; ++j) { *out++ = *x++; diff --git a/python/paddle/fluid/tests/unittests/test_logsumexp.py b/python/paddle/fluid/tests/unittests/test_logsumexp.py index c48ec2a4fb4..31c68b88b86 100644 --- a/python/paddle/fluid/tests/unittests/test_logsumexp.py +++ b/python/paddle/fluid/tests/unittests/test_logsumexp.py @@ -50,15 +50,30 @@ class TestLogsumexp(OpTest): 'keepdim': self.keepdim, 'reduce_all': self.reduce_all } + self.user_defined_grads = None + self.user_defined_grad_outputs = None + self.set_attrs_addition() def set_attrs(self): pass + def set_attrs_addition(self): + pass + def test_check_output(self): self.check_output() def test_check_grad(self): - self.check_grad(['X'], ['Out']) + self.check_grad( + ['X'], ['Out'], + user_defined_grads=self.user_defined_grads, + user_defined_grad_outputs=self.user_defined_grad_outputs) + + def calc_grad(self): + dy = np.ones(1, dtype=self.dtype) + x = self.inputs['X'] + y = self.outputs['Out'] + return dy * np.exp(x - y) class TestLogsumexp_shape(TestLogsumexp): @@ -75,6 +90,11 @@ class TestLogsumexp_axis_all(TestLogsumexp): def set_attrs(self): self.axis = [0, 1, 2, 3] + def set_attrs_addition(self): + if paddle.fluid.core.is_compiled_with_rocm(): + self.user_defined_grads = [self.calc_grad()] + self.user_defined_grad_outputs = [np.ones(1, dtype=self.dtype)] + class TestLogsumexp_keepdim(TestLogsumexp): def set_attrs(self): @@ -85,6 +105,11 @@ class TestLogsumexp_reduce_all(TestLogsumexp): def set_attrs(self): self.reduce_all = True + def set_attrs_addition(self): + if paddle.fluid.core.is_compiled_with_rocm(): + self.user_defined_grads = [self.calc_grad()] + self.user_defined_grad_outputs = [np.ones(1, dtype=self.dtype)] + class TestLogsumexpError(unittest.TestCase): def test_errors(self): -- GitLab