未验证 提交 cae9340e 编写于 作者: S songyouwei 提交者: GitHub

API(GRUUnit) error message enhancement (#23535)

* err msg enhance for GRUUnit

* add ut
test=develop
上级 475de6da
...@@ -1911,6 +1911,10 @@ class GRUUnit(layers.Layer): ...@@ -1911,6 +1911,10 @@ class GRUUnit(layers.Layer):
self.activation, 'gate_activation', self.gate_activation) self.activation, 'gate_activation', self.gate_activation)
return updated_hidden, reset_hidden_pre, gate return updated_hidden, reset_hidden_pre, gate
check_variable_and_dtype(input, 'input', ['float32', 'float64'],
'GRUUnit')
check_variable_and_dtype(hidden, 'hidden', ['float32', 'float64'],
'GRUUnit')
inputs = { inputs = {
'Input': [input], 'Input': [input],
'HiddenPrev': [hidden], 'HiddenPrev': [hidden],
...@@ -1918,10 +1922,6 @@ class GRUUnit(layers.Layer): ...@@ -1918,10 +1922,6 @@ class GRUUnit(layers.Layer):
} }
if self.bias is not None: if self.bias is not None:
inputs['Bias'] = [self.bias] inputs['Bias'] = [self.bias]
attrs = {
'activation': self.activation,
'gate_activation': self.gate_activation,
}
gate = self._helper.create_variable_for_type_inference(self._dtype) gate = self._helper.create_variable_for_type_inference(self._dtype)
reset_hidden_pre = self._helper.create_variable_for_type_inference( reset_hidden_pre = self._helper.create_variable_for_type_inference(
self._dtype) self._dtype)
......
...@@ -17,9 +17,25 @@ from __future__ import print_function ...@@ -17,9 +17,25 @@ from __future__ import print_function
import math import math
import unittest import unittest
import numpy as np import numpy as np
import paddle.fluid as fluid
from op_test import OpTest from op_test import OpTest
class TestGRUUnitAPIError(unittest.TestCase):
def test_errors(self):
with fluid.program_guard(fluid.Program(), fluid.Program()):
D = 5
layer = fluid.dygraph.nn.GRUUnit(size=D * 3)
# the input must be Variable.
x0 = fluid.create_lod_tensor(
np.array([-1, 3, 5, 5]), [[1, 1, 1, 1]], fluid.CPUPlace())
self.assertRaises(TypeError, layer, x0)
# the input dtype must be float32 or float64
x = fluid.data(name='x', shape=[-1, D * 3], dtype='float16')
hidden = fluid.data(name='hidden', shape=[-1, D], dtype='float32')
self.assertRaises(TypeError, layer, x, hidden)
class GRUActivationType(OpTest): class GRUActivationType(OpTest):
identity = 0 identity = 0
sigmoid = 1 sigmoid = 1
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册