diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index 73f8dd32fc57974f462a5280353a244a763df790..bb4b8e707f477699f4a482da9feee72e8905d85c 100755 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -10883,6 +10883,17 @@ def elu(x, alpha=1.0, name=None): y = fluid.layers.elu(x, alpha=0.2) """ helper = LayerHelper('elu', **locals()) + if not isinstance(x, Variable): + raise TypeError( + "The type of 'x' in elu must be Variable, but received %s" % + (type(x))) + if convert_dtype(x.dtype) in ['float16']: + warnings.warn( + "The data type of 'x' in elu only support float16 in GPU now.") + if convert_dtype(x.dtype) not in ['float16', 'float32', 'float64']: + raise TypeError( + "The data type of 'x' in elu must be float16 (only support on GPU), float32 or float64, but received %s." + % (convert_dtype(x.dtype))) out = helper.create_variable_for_type_inference(dtype=x.dtype) helper.append_op( type='elu', diff --git a/python/paddle/fluid/tests/unittests/test_activation_op.py b/python/paddle/fluid/tests/unittests/test_activation_op.py index 012d0401b05b56154c90dd6bec56f4fdbe0a3a49..c8e6cbc314ca4018d43c0100f444fb9280ac10e8 100644 --- a/python/paddle/fluid/tests/unittests/test_activation_op.py +++ b/python/paddle/fluid/tests/unittests/test_activation_op.py @@ -19,6 +19,8 @@ import numpy as np import paddle.fluid.core as core from op_test import OpTest from scipy.special import expit, erf +import paddle.fluid as fluid +from paddle.fluid import compiler, Program, program_guard class TestActivation(OpTest): @@ -519,6 +521,18 @@ class TestELU(TestActivation): self.check_grad(['X'], 'Out', max_relative_error=0.02) +class TestELUOpError(OpTest): + def test_errors(self): + with program_guard(Program(), Program()): + # The input type of elu_op must be Variable. + x1 = fluid.create_lod_tensor( + np.array([[-1]]), [[1]], fluid.CPUPlace()) + self.assertRaises(TypeError, fluid.layers.elu, x1) + # The input dtype of elu_op must be float16 float32 or float64. + x2 = fluid.layers.data(name='x2', shape=[4], dtype="int32") + self.assertRaises(TypeError, fluid.layers.elu, x2) + + class TestReciprocal(TestActivation): def setUp(self): self.op_type = "reciprocal"