From 17bee1d9a0a8da491f1f2af6f579d1544fe3e366 Mon Sep 17 00:00:00 2001 From: zhupengyang Date: Thu, 9 Apr 2020 19:10:56 +0800 Subject: [PATCH] Op(brelu) error message enhancement (#23606) --- python/paddle/fluid/layers/nn.py | 2 ++ .../fluid/tests/unittests/test_activation_op.py | 14 ++++++++++++++ 2 files changed, 16 insertions(+) diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index 9d1c1a1c8c..f0d741a8cd 100644 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -9179,6 +9179,8 @@ def brelu(x, t_min=0.0, t_max=24.0, name=None): #[[ 1. 6.] #[ 1. 10.]] """ + check_variable_and_dtype(x, 'x', ['float16', 'float32', 'float64'], 'brelu') + helper = LayerHelper('brelu', **locals()) out = helper.create_variable_for_type_inference(dtype=x.dtype) helper.append_op( diff --git a/python/paddle/fluid/tests/unittests/test_activation_op.py b/python/paddle/fluid/tests/unittests/test_activation_op.py index e24818c32a..d37bb3e81c 100644 --- a/python/paddle/fluid/tests/unittests/test_activation_op.py +++ b/python/paddle/fluid/tests/unittests/test_activation_op.py @@ -520,6 +520,20 @@ class TestBRelu(TestActivation): self.check_grad(['X'], 'Out') +class TestBReluOpError(unittest.TestCase): + def test_errors(self): + with program_guard(Program()): + # The input type must be Variable. + self.assertRaises(TypeError, fluid.layers.brelu, 1) + # The input dtype must be float16, float32, float64. + x_int32 = fluid.data(name='x_int32', shape=[12, 10], dtype='int32') + self.assertRaises(TypeError, fluid.layers.brelu, x_int32) + # support the input dtype is float16 + x_fp16 = fluid.layers.data( + name='x_fp16', shape=[12, 10], dtype='float16') + fluid.layers.brelu(x_fp16) + + class TestRelu6(TestActivation): def setUp(self): self.op_type = "relu6" -- GitLab