From fd5b8eea463ba14d86c12c327deae0475aa10f0f Mon Sep 17 00:00:00 2001 From: RedContritio Date: Wed, 1 Feb 2023 17:15:42 +0800 Subject: [PATCH] Fix Python IndexError of case2-3 (#49986) * add shape check for fused_multi_head_attention * use raise for coverage test * add unittest * remove unnecessary pass * add unittest --- .../test_fused_attention_no_dropout.py | 12 +++++++++ .../test_fused_multi_transformer_op.py | 26 +++++++++++++++++++ .../nn/functional/fused_transformer.py | 5 ++++ 3 files changed, 43 insertions(+) diff --git a/python/paddle/fluid/tests/unittests/test_fused_attention_no_dropout.py b/python/paddle/fluid/tests/unittests/test_fused_attention_no_dropout.py index c459f2dbb2..4f18abd79e 100644 --- a/python/paddle/fluid/tests/unittests/test_fused_attention_no_dropout.py +++ b/python/paddle/fluid/tests/unittests/test_fused_attention_no_dropout.py @@ -192,5 +192,17 @@ class TestFusedAttentionNormalizeBefore(TestFusedAttention): self.normalize_before = True +class TestFusedAttentionAPIError(unittest.TestCase): + def test_invalid_x_rank(self): + def test_x_rank_1(): + with paddle.fluid.dygraph.guard(): + layer = FusedMultiHeadAttention(embed_dim=1, num_heads=1) + array = np.array([1.9], dtype=np.float32) + x = paddle.to_tensor(np.reshape(array, [1]), dtype='float32') + out = layer(x) + + self.assertRaises(ValueError, test_x_rank_1) + + if __name__ == "__main__": unittest.main() diff --git a/python/paddle/fluid/tests/unittests/test_fused_multi_transformer_op.py b/python/paddle/fluid/tests/unittests/test_fused_multi_transformer_op.py index e3da925a01..8068387cfd 100644 --- a/python/paddle/fluid/tests/unittests/test_fused_multi_transformer_op.py +++ b/python/paddle/fluid/tests/unittests/test_fused_multi_transformer_op.py @@ -1051,5 +1051,31 @@ class TestFusedMultiTransformerOpPreCacheStatic(TestFusedMultiTransformerOp): ) +class TestFusedMultiAttentionAPIError(unittest.TestCase): + def test_errors(self): + def test_invalid_input_dim(): + array = np.array([1.9], dtype=np.float32) + x = paddle.to_tensor(np.reshape(array, [1]), dtype='float32') + layer = paddle.incubate.nn.FusedMultiHeadAttention( + embed_dim=1, num_heads=1 + ) + out = layer(x) + + self.assertRaises(ValueError, test_invalid_input_dim) + + +class TestFusedMultiTransformerAPIError(unittest.TestCase): + def test_errors(self): + def test_invalid_input_dim(): + array = np.array([], dtype=np.float32) + x = paddle.to_tensor(np.reshape(array, [0]), dtype='int32') + layer = paddle.incubate.nn.FusedTransformerEncoderLayer( + 108, 108, 108, 0.0, 'relu' + ) + out = layer(x) + + self.assertRaises(ValueError, test_invalid_input_dim) + + if __name__ == "__main__": unittest.main() diff --git a/python/paddle/incubate/nn/functional/fused_transformer.py b/python/paddle/incubate/nn/functional/fused_transformer.py index 01d2161b22..19ec0ad245 100644 --- a/python/paddle/incubate/nn/functional/fused_transformer.py +++ b/python/paddle/incubate/nn/functional/fused_transformer.py @@ -615,6 +615,11 @@ def fused_multi_head_attention( 'downgrade_in_infer' if mode == 'downscale_in_infer' else mode ) # semantic transfer + if x.ndim != 3: + raise ValueError( + f"The rank of the x should be 3, but received {x.ndim}." + ) + if _non_static_mode(): if default_main_program().random_seed != 0: seed = default_main_program().random_seed -- GitLab