From 5d970b586bd2a0b0c07de2033f6da5a782e22ce4 Mon Sep 17 00:00:00 2001 From: zhupengyang Date: Fri, 10 Apr 2020 11:55:09 +0800 Subject: [PATCH] Op(leaky_relu) error message enhancement (#23627) --- python/paddle/fluid/layers/nn.py | 3 +++ .../fluid/tests/unittests/test_activation_op.py | 14 ++++++++++++++ 2 files changed, 17 insertions(+) diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index aece48f5a71..ca84ffe5f72 100644 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -9238,6 +9238,9 @@ def leaky_relu(x, alpha=0.02, name=None): if in_dygraph_mode(): return core.ops.leaky_relu(x, 'alpha', alpha) + check_variable_and_dtype(x, 'x', ['float16', 'float32', 'float64'], + 'leaky_relu') + inputs = {'X': [x]} attrs = {'alpha': alpha} helper = LayerHelper('leaky_relu', **locals()) diff --git a/python/paddle/fluid/tests/unittests/test_activation_op.py b/python/paddle/fluid/tests/unittests/test_activation_op.py index d37bb3e81cf..033e1dafc1e 100644 --- a/python/paddle/fluid/tests/unittests/test_activation_op.py +++ b/python/paddle/fluid/tests/unittests/test_activation_op.py @@ -450,6 +450,20 @@ class TestLeakyRelu(TestActivation): self.check_grad(['X'], 'Out') +class TestLeakyReluOpError(unittest.TestCase): + def test_errors(self): + with program_guard(Program()): + # The input type must be Variable. + self.assertRaises(TypeError, fluid.layers.leaky_relu, 1) + # The input dtype must be float16, float32, float64. + x_int32 = fluid.data(name='x_int32', shape=[12, 10], dtype='int32') + self.assertRaises(TypeError, fluid.layers.leaky_relu, x_int32) + # support the input dtype is float32 + x_fp16 = fluid.layers.data( + name='x_fp16', shape=[12, 10], dtype='float32') + fluid.layers.leaky_relu(x_fp16) + + def gelu(x, approximate): if approximate: y_ref = 0.5 * x * (1.0 + np.tanh( -- GitLab