From 318d5bba64433b377aac3243f6a07c48c67d8004 Mon Sep 17 00:00:00 2001 From: GaoWei8 <53294385+GaoWei8@users.noreply.github.com> Date: Wed, 9 Oct 2019 21:28:28 +0800 Subject: [PATCH] add input type and dtype check for elu_op (#20106) * elu input check test=develop * test=develop --- python/paddle/fluid/layers/nn.py | 11 +++++++++++ .../fluid/tests/unittests/test_activation_op.py | 14 ++++++++++++++ 2 files changed, 25 insertions(+) diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index 73f8dd32fc..bb4b8e707f 100755 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -10883,6 +10883,17 @@ def elu(x, alpha=1.0, name=None): y = fluid.layers.elu(x, alpha=0.2) """ helper = LayerHelper('elu', **locals()) + if not isinstance(x, Variable): + raise TypeError( + "The type of 'x' in elu must be Variable, but received %s" % + (type(x))) + if convert_dtype(x.dtype) in ['float16']: + warnings.warn( + "The data type of 'x' in elu only support float16 in GPU now.") + if convert_dtype(x.dtype) not in ['float16', 'float32', 'float64']: + raise TypeError( + "The data type of 'x' in elu must be float16 (only support on GPU), float32 or float64, but received %s." + % (convert_dtype(x.dtype))) out = helper.create_variable_for_type_inference(dtype=x.dtype) helper.append_op( type='elu', diff --git a/python/paddle/fluid/tests/unittests/test_activation_op.py b/python/paddle/fluid/tests/unittests/test_activation_op.py index 012d0401b0..c8e6cbc314 100644 --- a/python/paddle/fluid/tests/unittests/test_activation_op.py +++ b/python/paddle/fluid/tests/unittests/test_activation_op.py @@ -19,6 +19,8 @@ import numpy as np import paddle.fluid.core as core from op_test import OpTest from scipy.special import expit, erf +import paddle.fluid as fluid +from paddle.fluid import compiler, Program, program_guard class TestActivation(OpTest): @@ -519,6 +521,18 @@ class TestELU(TestActivation): self.check_grad(['X'], 'Out', max_relative_error=0.02) +class TestELUOpError(OpTest): + def test_errors(self): + with program_guard(Program(), Program()): + # The input type of elu_op must be Variable. + x1 = fluid.create_lod_tensor( + np.array([[-1]]), [[1]], fluid.CPUPlace()) + self.assertRaises(TypeError, fluid.layers.elu, x1) + # The input dtype of elu_op must be float16 float32 or float64. + x2 = fluid.layers.data(name='x2', shape=[4], dtype="int32") + self.assertRaises(TypeError, fluid.layers.elu, x2) + + class TestReciprocal(TestActivation): def setUp(self): self.op_type = "reciprocal" -- GitLab