diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index ca84ffe5f7291f3a784ce36a50da6e58a90b64ed..5d41729f631b4f267aff683b7bd7882c4784f7cb 100644 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -8201,6 +8201,8 @@ def relu(x, name=None): if in_dygraph_mode(): return core.ops.relu(x) + check_variable_and_dtype(x, 'x', ['float16', 'float32', 'float64'], 'relu') + inputs = {'X': [x]} helper = LayerHelper('relu', **locals()) dtype = helper.input_dtype(input_param_name='x') diff --git a/python/paddle/fluid/tests/unittests/test_activation_op.py b/python/paddle/fluid/tests/unittests/test_activation_op.py index 033e1dafc1ed4e19a2b6c96ae0a8d695d2e613b4..5dc89591e7716727deba0569f8e9ae2c8321c1aa 100644 --- a/python/paddle/fluid/tests/unittests/test_activation_op.py +++ b/python/paddle/fluid/tests/unittests/test_activation_op.py @@ -431,6 +431,20 @@ class TestRelu(TestActivation): self.check_grad(['X'], 'Out') +class TestReluOpError(unittest.TestCase): + def test_errors(self): + with program_guard(Program()): + # The input type must be Variable. + self.assertRaises(TypeError, fluid.layers.sqrt, 1) + # The input dtype must be float16, float32, float64. + x_int32 = fluid.data(name='x_int32', shape=[12, 10], dtype='int32') + self.assertRaises(TypeError, fluid.layers.relu, x_int32) + # support the input dtype is float16 + x_fp16 = fluid.layers.data( + name='x_fp16', shape=[12, 10], dtype='float16') + fluid.layers.relu(x_fp16) + + class TestLeakyRelu(TestActivation): def setUp(self): self.op_type = "leaky_relu"